diff --git a/.github/actions/specs-report/.eslintignore b/.github/actions/specs-report/.eslintignore
new file mode 100644
index 0000000..9ff5c1c
--- /dev/null
+++ b/.github/actions/specs-report/.eslintignore
@@ -0,0 +1,4 @@
+lib/
+dist/
+node_modules/
+coverage/
diff --git a/.github/actions/specs-report/.gitattributes b/.github/actions/specs-report/.gitattributes
new file mode 100644
index 0000000..6ba5456
--- /dev/null
+++ b/.github/actions/specs-report/.gitattributes
@@ -0,0 +1,3 @@
+* text=auto eol=lf
+
+dist/** -diff linguist-generated=true
diff --git a/.github/actions/specs-report/.gitignore b/.github/actions/specs-report/.gitignore
new file mode 100644
index 0000000..e4da79d
--- /dev/null
+++ b/.github/actions/specs-report/.gitignore
@@ -0,0 +1,105 @@
+# Dependency directory
+node_modules
+
+# Rest pulled from https://github.com/github/gitignore/blob/master/Node.gitignore
+# Logs
+logs
+*.log
+npm-debug.log*
+yarn-debug.log*
+yarn-error.log*
+lerna-debug.log*
+
+# Diagnostic reports (https://nodejs.org/api/report.html)
+report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
+
+# Runtime data
+pids
+*.pid
+*.seed
+*.pid.lock
+
+# Directory for instrumented libs generated by jscoverage/JSCover
+lib-cov
+
+# Coverage directory used by tools like istanbul
+coverage
+*.lcov
+
+# nyc test coverage
+.nyc_output
+
+# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
+.grunt
+
+# Bower dependency directory (https://bower.io/)
+bower_components
+
+# node-waf configuration
+.lock-wscript
+
+# Compiled binary addons (https://nodejs.org/api/addons.html)
+build/Release
+
+# Dependency directories
+jspm_packages/
+
+# TypeScript v1 declaration files
+typings/
+
+# TypeScript cache
+*.tsbuildinfo
+
+# Optional npm cache directory
+.npm
+
+# Optional eslint cache
+.eslintcache
+
+# Optional REPL history
+.node_repl_history
+
+# Output of 'npm pack'
+*.tgz
+
+# Yarn Integrity file
+.yarn-integrity
+
+# dotenv environment variables file
+.env
+.env.test
+
+# parcel-bundler cache (https://parceljs.org/)
+.cache
+
+# next.js build output
+.next
+
+# nuxt.js build output
+.nuxt
+
+# vuepress build output
+.vuepress/dist
+
+# Serverless directories
+.serverless/
+
+# FuseBox cache
+.fusebox/
+
+# DynamoDB Local files
+.dynamodb/
+
+# OS metadata
+.DS_Store
+Thumbs.db
+
+# Ignore built ts files
+__tests__/runner/*
+
+# IDE files
+.idea
+.vscode
+*.code-workspace
+
+draft
\ No newline at end of file
diff --git a/.github/actions/specs-report/.node-version b/.github/actions/specs-report/.node-version
new file mode 100644
index 0000000..1cc433a
--- /dev/null
+++ b/.github/actions/specs-report/.node-version
@@ -0,0 +1 @@
+20.6.0
diff --git a/.github/actions/specs-report/.prettierignore b/.github/actions/specs-report/.prettierignore
new file mode 100644
index 0000000..2d0c064
--- /dev/null
+++ b/.github/actions/specs-report/.prettierignore
@@ -0,0 +1,3 @@
+dist/
+node_modules/
+coverage/
diff --git a/.github/actions/specs-report/.prettierrc.json b/.github/actions/specs-report/.prettierrc.json
new file mode 100644
index 0000000..a378146
--- /dev/null
+++ b/.github/actions/specs-report/.prettierrc.json
@@ -0,0 +1,16 @@
+{
+ "printWidth": 80,
+ "tabWidth": 2,
+ "useTabs": false,
+ "semi": false,
+ "singleQuote": true,
+ "quoteProps": "as-needed",
+ "jsxSingleQuote": false,
+ "trailingComma": "none",
+ "bracketSpacing": true,
+ "bracketSameLine": true,
+ "arrowParens": "avoid",
+ "proseWrap": "always",
+ "htmlWhitespaceSensitivity": "css",
+ "endOfLine": "lf"
+}
diff --git a/.github/actions/specs-report/README.md b/.github/actions/specs-report/README.md
new file mode 100644
index 0000000..908e700
--- /dev/null
+++ b/.github/actions/specs-report/README.md
@@ -0,0 +1,108 @@
+# tbd specs-report Action
+
+[![Coverage](./badges/coverage.svg)](./badges/coverage.svg)
+
+GitHub Action to Parse and Generate tbd Specs Reports
+
+## Usage
+
+```yaml
+# add it as an step on your CI workflow right after your tests
+
+- name: Execute Spec Test Vector Report
+ # assuming you have named the tests steps as `tests`
+ # it will always run and report your tests results even if they failed
+ if: always() && steps.tests.conclusion != 'skipped'
+ uses: TBD54566975/tbd-sdk-report-runner/.github/actions/specs-report@main
+ with:
+ junit-report-paths: packages/*/results.xml # Glob Path with the JUnit test vectors report
+ spec-path: tbdex # relative path to the tbd spec submodule folder
+ fail-on-missing-vectors: true # if false, it will not fail the build if the test vectors are missing
+ fail-on-failed-test-cases: true # if false, it will not fail the build if the test cases are failed
+ comment-on-pr: true # if true, it will comment on the PR with the report
+ git-token: ${{ secrets.GITHUB_TOKEN }} # the git token to use to comment on the PR
+```
+
+_Check the available parameters in the [action.yml](./action.yml) file_
+
+Example report:
+
+![Example Report](./example-report.png)
+
+## Initial Setup
+
+1. :hammer_and_wrench: Install the dependencies
+
+ ```bash
+ npm install
+ ```
+
+1. :building_construction: Package the TypeScript for distribution
+
+ ```bash
+ npm run bundle
+ ```
+
+1. :white_check_mark: Run the tests
+
+ ```bash
+ $ npm test
+ ...
+ ```
+
+## Maintaining the Action Code
+
+The [`src/`](./src/) directory is the heart of the action! This contains the
+source code that will be run when your action is invoked.
+
+There are a few things to keep in mind when writing your action code:
+
+- Most GitHub Actions toolkit and CI/CD operations are processed asynchronously.
+ In `main.ts`, you will see that the action is run in an `async` function.
+
+ ```javascript
+ import * as core from '@actions/core'
+ //...
+
+ async function run() {
+ try {
+ //...
+ } catch (error) {
+ core.setFailed(error.message)
+ }
+ }
+ ```
+
+ For more information about the GitHub Actions toolkit, see the
+ [documentation](https://github.com/actions/toolkit/blob/master/README.md).
+
+For information about versioning your action, see
+[Versioning](https://github.com/actions/toolkit/blob/master/docs/action-versioning.md)
+in the GitHub Actions toolkit.
+
+## Publishing a New Release
+
+This project includes a helper script, [`script/release`](./script/release)
+designed to streamline the process of tagging and pushing new releases for
+GitHub Actions.
+
+GitHub Actions allows users to select a specific version of the action to use,
+based on release tags. This script simplifies this process by performing the
+following steps:
+
+1. **Retrieving the latest release tag:** The script starts by fetching the most
+ recent SemVer release tag of the current branch, by looking at the local data
+ available in your repository.
+1. **Prompting for a new release tag:** The user is then prompted to enter a new
+ release tag. To assist with this, the script displays the tag retrieved in
+ the previous step, and validates the format of the inputted tag (vX.X.X). The
+ user is also reminded to update the version field in package.json.
+1. **Tagging the new release:** The script then tags a new release and syncs the
+ separate major tag (e.g. v1, v2) with the new release tag (e.g. v1.0.0,
+ v2.1.2). When the user is creating a new major release, the script
+ auto-detects this and creates a `releases/v#` branch for the previous major
+ version.
+1. **Pushing changes to remote:** Finally, the script pushes the necessary
+ commits, tags and branches to the remote repository. From here, you will need
+ to create a new release in GitHub so users can easily reference the new tags
+ in their workflows.
diff --git a/.github/actions/specs-report/__tests__/assets/tbdex-js-results-failure.xml b/.github/actions/specs-report/__tests__/assets/tbdex-js-results-failure.xml
new file mode 100644
index 0000000..9be1038
--- /dev/null
+++ b/.github/actions/specs-report/__tests__/assets/tbdex-js-results-failure.xml
@@ -0,0 +1,273 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ (tests/test-vectors.spec.ts:43:47)
+
+ + expected - actual
+
+ -true
+ +false
+ ]]>
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/.github/actions/specs-report/__tests__/assets/tbdex-js-results-success.xml b/.github/actions/specs-report/__tests__/assets/tbdex-js-results-success.xml
new file mode 100644
index 0000000..9d3bbc8
--- /dev/null
+++ b/.github/actions/specs-report/__tests__/assets/tbdex-js-results-success.xml
@@ -0,0 +1,274 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ (tests/exchange.spec.ts:109:16)
+ at process.processImmediate (node:internal/timers:478:21)]]>
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/.github/actions/specs-report/__tests__/index.test.ts b/.github/actions/specs-report/__tests__/index.test.ts
new file mode 100644
index 0000000..34a4dfe
--- /dev/null
+++ b/.github/actions/specs-report/__tests__/index.test.ts
@@ -0,0 +1,17 @@
+/**
+ * Unit tests for the action's entrypoint, src/index.ts
+ */
+
+import * as main from '../src/main'
+
+// Mock the action's entrypoint
+const runMock = jest.spyOn(main, 'run').mockImplementation()
+
+describe('index', () => {
+ it('calls run when imported', async () => {
+ // eslint-disable-next-line @typescript-eslint/no-require-imports
+ require('../src/index')
+
+ expect(runMock).toHaveBeenCalled()
+ })
+})
diff --git a/.github/actions/specs-report/__tests__/main.test.ts b/.github/actions/specs-report/__tests__/main.test.ts
new file mode 100644
index 0000000..f55f3e3
--- /dev/null
+++ b/.github/actions/specs-report/__tests__/main.test.ts
@@ -0,0 +1,172 @@
+/**
+ * Unit tests for the action's main functionality, src/main.ts
+ *
+ * These should be run as if the action was called from a workflow.
+ * Specifically, the inputs listed in `action.yml` should be set as environment
+ * variables following the pattern `INPUT_`.
+ */
+
+import * as core from '@actions/core'
+import * as main from '../src/main'
+import * as files from '../src/files'
+import * as prComment from '../src/pr-comment'
+import * as junit2json from 'junit2json'
+import { TBDEX_TEST_VECTORS_FILES, SUCCESS_MOCK, FAILURE_MOCK } from './mocks'
+
+// Mock the action's main function
+const runMock = jest.spyOn(main, 'run')
+
+// Mock the GitHub Actions core library
+let errorMock: jest.SpiedFunction
+let setFailedMock: jest.SpiedFunction
+let setOutputMock: jest.SpiedFunction
+let getInputMock: jest.SpiedFunction
+let summaryWriteMock: jest.SpiedFunction
+
+// Mock action used functions
+let getFilesMock: jest.SpiedFunction
+let readJsonFileMock: jest.SpiedFunction
+let junitParseMock: jest.SpiedFunction
+let addCommentToPrMock: jest.SpiedFunction
+
+const defaultGetInputMockImplementation = (): void => {
+ getInputMock.mockImplementation(name => {
+ switch (name) {
+ case 'junit-report-paths':
+ return '500'
+ case 'spec-path':
+ return '500'
+ case 'git-token':
+ return 'fake-token'
+ case 'comment-on-pr':
+ return 'true'
+ case 'fail-on-missing-vectors':
+ return 'false'
+ case 'fail-on-failed-test-cases':
+ return 'true'
+ default:
+ return ''
+ }
+ })
+}
+
+const VALID_SPEC_JSON_BODY = {
+ description: 'This is a valid tbdex spec',
+ input: {},
+ output: {}
+}
+
+describe('action', () => {
+ let output: Record = {}
+
+ beforeEach(() => {
+ jest.clearAllMocks()
+ output = {}
+
+ getInputMock = jest.spyOn(core, 'getInput').mockImplementation()
+ setOutputMock = jest
+ .spyOn(core, 'setOutput')
+ .mockImplementation((k, v) => (output[k] = v))
+ setFailedMock = jest.spyOn(core, 'setFailed').mockImplementation()
+ errorMock = jest.spyOn(core, 'error').mockImplementation()
+ summaryWriteMock = jest.spyOn(core.summary, 'write').mockImplementation()
+
+ getFilesMock = jest.spyOn(files, 'getFiles').mockImplementation()
+ readJsonFileMock = jest.spyOn(files, 'readJsonFile').mockImplementation()
+
+ addCommentToPrMock = jest
+ .spyOn(prComment, 'addCommentToPr')
+ .mockImplementation()
+
+ junitParseMock = jest.spyOn(junit2json, 'parse')
+ })
+
+ it('generates a report without any failures', async () => {
+ defaultGetInputMockImplementation()
+ getFilesMock
+ // get junit report xml first
+ .mockReturnValueOnce(Promise.resolve(SUCCESS_MOCK.junitFiles))
+ // get test vector json
+ .mockReturnValueOnce(Promise.resolve(TBDEX_TEST_VECTORS_FILES))
+ readJsonFileMock.mockReturnValue(VALID_SPEC_JSON_BODY)
+
+ const rawJunitFile = files.readFile(SUCCESS_MOCK.junitFiles[0])
+
+ await main.run()
+ expect(runMock).toHaveReturned()
+
+ expect(junitParseMock).toHaveReturnedTimes(1)
+ expect(junitParseMock).toHaveBeenCalledWith(rawJunitFile)
+
+ console.info({ output })
+ expect(setOutputMock).toHaveBeenCalled()
+ expect(output.success).toBe('true')
+ expect(setFailedMock).not.toHaveBeenCalled()
+ expect(errorMock).not.toHaveBeenCalled()
+
+ expect(output.summary).toMatch('✅ All test vectors passed')
+ expect(summaryWriteMock).toHaveBeenCalledTimes(1)
+
+ expect(addCommentToPrMock).toHaveBeenCalledWith(
+ output.summary,
+ 'fake-token'
+ )
+
+ const expectedReport = {
+ totalJunitFiles: 1,
+ totalTestVectors: 10,
+ totalJunitTestCases: 99,
+ specTestCases: 10,
+ specFailedTestCases: 0,
+ specPassedTestCases: 10,
+ specSkippedTestCases: 0,
+ ...SUCCESS_MOCK.vectors
+ }
+ console.info(output['test-vector-report'])
+ const testVectorReport = JSON.parse(output['test-vector-report'])
+ expect(testVectorReport).toEqual(expectedReport)
+ })
+
+ it('generates a report with failures', async () => {
+ defaultGetInputMockImplementation()
+ getFilesMock
+ // get junit report xml first
+ .mockReturnValueOnce(Promise.resolve(FAILURE_MOCK.junitFiles))
+ // get test vector json
+ .mockReturnValueOnce(Promise.resolve(TBDEX_TEST_VECTORS_FILES))
+ readJsonFileMock.mockReturnValue(VALID_SPEC_JSON_BODY)
+
+ const rawJunitFile = files.readFile(FAILURE_MOCK.junitFiles[0])
+
+ await main.run()
+ expect(runMock).toHaveReturned()
+
+ expect(junitParseMock).toHaveReturnedTimes(1)
+ expect(junitParseMock).toHaveBeenCalledWith(rawJunitFile)
+
+ console.info({ output })
+ expect(setOutputMock).toHaveBeenCalled()
+ expect(output.success).not.toBeDefined()
+ expect(setFailedMock).toHaveBeenCalledWith('❌ Failed test vectors found')
+ expect(errorMock).not.toHaveBeenCalled()
+
+ expect(output.summary).toMatch(
+ '7 out of 10 test vectors passed successfully'
+ )
+ expect(summaryWriteMock).toHaveBeenCalledTimes(1)
+
+ const expectedReport = {
+ totalJunitFiles: 1,
+ totalTestVectors: 10,
+ totalJunitTestCases: 97,
+ specTestCases: 8,
+ specFailedTestCases: 1,
+ specPassedTestCases: 7,
+ specSkippedTestCases: 0,
+ ...FAILURE_MOCK.vectors
+ }
+ console.info(output['test-vector-report'])
+ const testVectorReport = JSON.parse(output['test-vector-report'])
+ expect(testVectorReport).toEqual(expectedReport)
+ })
+})
diff --git a/.github/actions/specs-report/__tests__/mocks.ts b/.github/actions/specs-report/__tests__/mocks.ts
new file mode 100644
index 0000000..fb3631b
--- /dev/null
+++ b/.github/actions/specs-report/__tests__/mocks.ts
@@ -0,0 +1,272 @@
+export const TBDEX_TEST_VECTORS_FILES = [
+ '/home/runner/work/tbdex-js/tbdex-js/tbdex/hosted/test-vectors/protocol/vectors/parse-balance.json',
+ '/home/runner/work/tbdex-js/tbdex-js/tbdex/hosted/test-vectors/protocol/vectors/parse-cancel.json',
+ '/home/runner/work/tbdex-js/tbdex-js/tbdex/hosted/test-vectors/protocol/vectors/parse-close.json',
+ '/home/runner/work/tbdex-js/tbdex-js/tbdex/hosted/test-vectors/protocol/vectors/parse-offering.json',
+ '/home/runner/work/tbdex-js/tbdex-js/tbdex/hosted/test-vectors/protocol/vectors/parse-order.json',
+ '/home/runner/work/tbdex-js/tbdex-js/tbdex/hosted/test-vectors/protocol/vectors/parse-orderinstructions.json',
+ '/home/runner/work/tbdex-js/tbdex-js/tbdex/hosted/test-vectors/protocol/vectors/parse-orderstatus.json',
+ '/home/runner/work/tbdex-js/tbdex-js/tbdex/hosted/test-vectors/protocol/vectors/parse-quote.json',
+ '/home/runner/work/tbdex-js/tbdex-js/tbdex/hosted/test-vectors/protocol/vectors/parse-rfq-omit-private-data.json',
+ '/home/runner/work/tbdex-js/tbdex-js/tbdex/hosted/test-vectors/protocol/vectors/parse-rfq.json'
+]
+
+export const SUCCESS_MOCK = {
+ junitFiles: [`${__dirname}/assets/tbdex-js-results-success.xml`],
+ vectors: {
+ missingVectors: [],
+ failedVectors: [],
+ skippedVectors: [],
+ successVectors: [
+ {
+ category: 'protocol',
+ name: 'parse_balance',
+ file: '/home/runner/work/tbdex-js/tbdex-js/tbdex/hosted/test-vectors/protocol/vectors/parse-balance.json',
+ testCases: [
+ {
+ name: 'TbdexTestVectorsProtocol parse_balance',
+ time: 0,
+ classname: 'parse_balance'
+ }
+ ]
+ },
+ {
+ category: 'protocol',
+ name: 'parse_cancel',
+ file: '/home/runner/work/tbdex-js/tbdex-js/tbdex/hosted/test-vectors/protocol/vectors/parse-cancel.json',
+ testCases: [
+ {
+ name: 'TbdexTestVectorsProtocol parse_cancel',
+ time: 0.001,
+ classname: 'parse_cancel'
+ }
+ ]
+ },
+ {
+ category: 'protocol',
+ name: 'parse_close',
+ file: '/home/runner/work/tbdex-js/tbdex-js/tbdex/hosted/test-vectors/protocol/vectors/parse-close.json',
+ testCases: [
+ {
+ name: 'TbdexTestVectorsProtocol parse_close',
+ time: 0,
+ classname: 'parse_close'
+ }
+ ]
+ },
+ {
+ category: 'protocol',
+ name: 'parse_offering',
+ file: '/home/runner/work/tbdex-js/tbdex-js/tbdex/hosted/test-vectors/protocol/vectors/parse-offering.json',
+ testCases: [
+ {
+ name: 'TbdexTestVectorsProtocol parse_offering',
+ time: 0.001,
+ classname: 'parse_offering'
+ }
+ ]
+ },
+ {
+ category: 'protocol',
+ name: 'parse_order',
+ file: '/home/runner/work/tbdex-js/tbdex-js/tbdex/hosted/test-vectors/protocol/vectors/parse-order.json',
+ testCases: [
+ {
+ name: 'TbdexTestVectorsProtocol parse_order',
+ time: 0,
+ classname: 'parse_order'
+ }
+ ]
+ },
+ {
+ category: 'protocol',
+ name: 'parse_orderinstructions',
+ file: '/home/runner/work/tbdex-js/tbdex-js/tbdex/hosted/test-vectors/protocol/vectors/parse-orderinstructions.json',
+ testCases: [
+ {
+ name: 'TbdexTestVectorsProtocol parse_orderinstructions',
+ time: 0.001,
+ classname: 'parse_orderinstructions'
+ }
+ ]
+ },
+ {
+ category: 'protocol',
+ name: 'parse_orderstatus',
+ file: '/home/runner/work/tbdex-js/tbdex-js/tbdex/hosted/test-vectors/protocol/vectors/parse-orderstatus.json',
+ testCases: [
+ {
+ name: 'TbdexTestVectorsProtocol parse_orderstatus',
+ time: 0,
+ classname: 'parse_orderstatus'
+ }
+ ]
+ },
+ {
+ category: 'protocol',
+ name: 'parse_quote',
+ file: '/home/runner/work/tbdex-js/tbdex-js/tbdex/hosted/test-vectors/protocol/vectors/parse-quote.json',
+ testCases: [
+ {
+ name: 'TbdexTestVectorsProtocol parse_quote',
+ time: 0,
+ classname: 'parse_quote'
+ }
+ ]
+ },
+ {
+ category: 'protocol',
+ name: 'parse_rfq_omit_private_data',
+ file: '/home/runner/work/tbdex-js/tbdex-js/tbdex/hosted/test-vectors/protocol/vectors/parse-rfq-omit-private-data.json',
+ testCases: [
+ {
+ name: 'TbdexTestVectorsProtocol parse_rfq_omit_private_data',
+ time: 0,
+ classname: 'parse_rfq_omit_private_data'
+ }
+ ]
+ },
+ {
+ category: 'protocol',
+ name: 'parse_rfq',
+ file: '/home/runner/work/tbdex-js/tbdex-js/tbdex/hosted/test-vectors/protocol/vectors/parse-rfq.json',
+ testCases: [
+ {
+ name: 'TbdexTestVectorsProtocol parse_rfq',
+ time: 0.001,
+ classname: 'parse_rfq'
+ }
+ ]
+ }
+ ]
+ }
+}
+
+export const FAILURE_MOCK = {
+ junitFiles: [`${__dirname}/assets/tbdex-js-results-failure.xml`],
+ vectors: {
+ missingVectors: [
+ {
+ category: 'protocol',
+ name: 'parse_close',
+ file: '/home/runner/work/tbdex-js/tbdex-js/tbdex/hosted/test-vectors/protocol/vectors/parse-close.json',
+ testCases: []
+ },
+ {
+ category: 'protocol',
+ name: 'parse_offering',
+ file: '/home/runner/work/tbdex-js/tbdex-js/tbdex/hosted/test-vectors/protocol/vectors/parse-offering.json',
+ testCases: []
+ }
+ ],
+ failedVectors: [
+ {
+ category: 'protocol',
+ name: 'parse_cancel',
+ file: '/home/runner/work/tbdex-js/tbdex-js/tbdex/hosted/test-vectors/protocol/vectors/parse-cancel.json',
+ testCases: [
+ {
+ name: 'TbdexTestVectorsProtocol parse_cancel',
+ time: 0.001,
+ classname: 'parse_cancel',
+ failure: [
+ {
+ inner:
+ 'AssertionError: expected true to be false\n at Context. (tests/test-vectors.spec.ts:43:47)\n\n + expected - actual\n\n -true\n +false\n ',
+ message: 'expected true to be false',
+ type: 'AssertionError'
+ }
+ ]
+ }
+ ]
+ }
+ ],
+ skippedVectors: [],
+ successVectors: [
+ {
+ category: 'protocol',
+ name: 'parse_balance',
+ file: '/home/runner/work/tbdex-js/tbdex-js/tbdex/hosted/test-vectors/protocol/vectors/parse-balance.json',
+ testCases: [
+ {
+ name: 'TbdexTestVectorsProtocol parse_balance',
+ time: 0,
+ classname: 'parse_balance'
+ }
+ ]
+ },
+ {
+ category: 'protocol',
+ name: 'parse_order',
+ file: '/home/runner/work/tbdex-js/tbdex-js/tbdex/hosted/test-vectors/protocol/vectors/parse-order.json',
+ testCases: [
+ {
+ name: 'TbdexTestVectorsProtocol parse_order',
+ time: 0.001,
+ classname: 'parse_order'
+ }
+ ]
+ },
+ {
+ category: 'protocol',
+ name: 'parse_orderinstructions',
+ file: '/home/runner/work/tbdex-js/tbdex-js/tbdex/hosted/test-vectors/protocol/vectors/parse-orderinstructions.json',
+ testCases: [
+ {
+ name: 'TbdexTestVectorsProtocol parse_orderinstructions',
+ time: 0,
+ classname: 'parse_orderinstructions'
+ }
+ ]
+ },
+ {
+ category: 'protocol',
+ name: 'parse_orderstatus',
+ file: '/home/runner/work/tbdex-js/tbdex-js/tbdex/hosted/test-vectors/protocol/vectors/parse-orderstatus.json',
+ testCases: [
+ {
+ name: 'TbdexTestVectorsProtocol parse_orderstatus',
+ time: 0,
+ classname: 'parse_orderstatus'
+ }
+ ]
+ },
+ {
+ category: 'protocol',
+ name: 'parse_quote',
+ file: '/home/runner/work/tbdex-js/tbdex-js/tbdex/hosted/test-vectors/protocol/vectors/parse-quote.json',
+ testCases: [
+ {
+ name: 'TbdexTestVectorsProtocol parse_quote',
+ time: 0.001,
+ classname: 'parse_quote'
+ }
+ ]
+ },
+ {
+ category: 'protocol',
+ name: 'parse_rfq_omit_private_data',
+ file: '/home/runner/work/tbdex-js/tbdex-js/tbdex/hosted/test-vectors/protocol/vectors/parse-rfq-omit-private-data.json',
+ testCases: [
+ {
+ name: 'TbdexTestVectorsProtocol parse_rfq_omit_private_data',
+ time: 0,
+ classname: 'parse_rfq_omit_private_data'
+ }
+ ]
+ },
+ {
+ category: 'protocol',
+ name: 'parse_rfq',
+ file: '/home/runner/work/tbdex-js/tbdex-js/tbdex/hosted/test-vectors/protocol/vectors/parse-rfq.json',
+ testCases: [
+ {
+ name: 'TbdexTestVectorsProtocol parse_rfq',
+ time: 0,
+ classname: 'parse_rfq'
+ }
+ ]
+ }
+ ]
+ }
+}
diff --git a/.github/actions/specs-report/__tests__/pr-comment.test.ts b/.github/actions/specs-report/__tests__/pr-comment.test.ts
new file mode 100644
index 0000000..53f672c
--- /dev/null
+++ b/.github/actions/specs-report/__tests__/pr-comment.test.ts
@@ -0,0 +1,144 @@
+import * as core from '@actions/core'
+import * as github from '@actions/github'
+import { addCommentToPr } from '../src/pr-comment'
+
+// Mock the GitHub Actions core library
+let infoMock: jest.SpiedFunction
+let errorMock: jest.SpiedFunction
+let setFailedMock: jest.SpiedFunction
+
+// Mock Octokit
+const mockOctokit = {
+ rest: {
+ issues: {
+ listComments: jest.fn(),
+ updateComment: jest.fn(),
+ createComment: jest.fn()
+ }
+ }
+}
+
+jest.mock('@actions/github', () => ({
+ getOctokit: jest.fn().mockImplementation(() => mockOctokit),
+ context: {
+ eventName: 'pull_request',
+ payload: { pull_request: { number: 123 } },
+ repo: { owner: 'test-owner', repo: 'test-repo' }
+ }
+}))
+
+describe('addCommentToPr', () => {
+ beforeEach(() => {
+ jest.clearAllMocks()
+
+ infoMock = jest.spyOn(core, 'info').mockImplementation()
+ errorMock = jest.spyOn(core, 'error').mockImplementation()
+ setFailedMock = jest.spyOn(core, 'setFailed').mockImplementation()
+
+ // Reset github.context for each test
+ Object.defineProperty(github.context, 'repo', {
+ get: jest.fn(() => ({ owner: 'test-owner', repo: 'test-repo' })),
+ configurable: true
+ })
+ })
+
+ it('skips commenting if not a PR event', async () => {
+ github.context.eventName = 'push'
+ github.context.payload = {}
+
+ await addCommentToPr('Test summary', 'fake-token')
+
+ expect(infoMock).toHaveBeenCalledWith('Not a PR event, skipping comment...')
+ expect(mockOctokit.rest.issues.listComments).not.toHaveBeenCalled()
+ })
+
+ it('fails if no git token is provided', async () => {
+ github.context.eventName = 'pull_request'
+ github.context.payload = { pull_request: { number: 123 } }
+ Object.defineProperty(github.context, 'repo', {
+ value: {},
+ writable: true
+ })
+
+ await addCommentToPr('Test summary', '')
+
+ expect(errorMock).toHaveBeenCalledWith(
+ 'No git token found, skipping comment...'
+ )
+ expect(setFailedMock).toHaveBeenCalledWith(
+ 'No git token found to add the requested PR comment'
+ )
+ expect(mockOctokit.rest.issues.listComments).not.toHaveBeenCalled()
+ })
+
+ it('updates an existing comment if found', async () => {
+ const existingComment = {
+ id: 456,
+ body: 'Header summary\nOld content',
+ user: { type: 'Bot' },
+ html_url:
+ 'https://github.com/test-owner/test-repo/pull/123#issuecomment-456'
+ }
+
+ mockOctokit.rest.issues.listComments.mockResolvedValue({
+ data: [existingComment]
+ })
+
+ const newSummary = 'Header summary\nNew content'
+ await addCommentToPr(newSummary, 'fake-token')
+
+ expect(mockOctokit.rest.issues.listComments).toHaveBeenCalledWith({
+ owner: 'test-owner',
+ repo: 'test-repo',
+ issue_number: 123
+ })
+
+ expect(mockOctokit.rest.issues.updateComment).toHaveBeenCalledWith({
+ owner: 'test-owner',
+ repo: 'test-repo',
+ comment_id: 456,
+ body: newSummary
+ })
+
+ expect(infoMock).toHaveBeenCalledWith('Existing comment found, updating...')
+ expect(infoMock).toHaveBeenCalledWith(
+ `Comment updated ${existingComment.html_url}`
+ )
+ })
+
+ it('creates a new comment if no existing comment is found', async () => {
+ const newComment = {
+ html_url:
+ 'https://github.com/test-owner/test-repo/pull/123#issuecomment-789'
+ }
+
+ mockOctokit.rest.issues.createComment.mockResolvedValue({
+ data: newComment
+ })
+
+ mockOctokit.rest.issues.listComments.mockResolvedValue({ data: [] })
+
+ const newSummary = 'New summary\nNew content'
+ await addCommentToPr(newSummary, 'fake-token')
+
+ expect(mockOctokit.rest.issues.listComments).toHaveBeenCalledWith({
+ owner: 'test-owner',
+ repo: 'test-repo',
+ issue_number: 123
+ })
+
+ expect(mockOctokit.rest.issues.createComment).toHaveBeenCalledWith({
+ owner: 'test-owner',
+ repo: 'test-repo',
+ issue_number: 123,
+ body: newSummary
+ })
+
+ expect(infoMock).toHaveBeenCalledWith(
+ 'No existing comment found, creating new one...'
+ )
+ expect(infoMock).toHaveBeenCalledWith(
+ `Comment created ${newComment.html_url}`
+ )
+ })
+})
diff --git a/.github/actions/specs-report/action.yml b/.github/actions/specs-report/action.yml
new file mode 100644
index 0000000..2ec7d48
--- /dev/null
+++ b/.github/actions/specs-report/action.yml
@@ -0,0 +1,41 @@
+name: 'TBD Specs Test Vector Report'
+description: 'Generate and compare spec test vector report'
+author: 'TBD'
+
+inputs:
+ junit-report-paths:
+ description:
+ 'Glob Path with the JUnit test vectors report, can be a single path or a
+ list of paths in an array'
+ required: true
+ spec-path:
+ description: 'The relative path to the tbd spec submodule folder'
+ required: true
+ test-cases-prefix:
+ description: 'The prefix of the test cases to be filtered'
+ required: false
+ comment-on-pr:
+ description: 'Whether to comment the summary report on the PR'
+ required: false
+ git-token:
+ description:
+ 'The git token to add the comment to the PR (required for PR comments)'
+ required: false
+ fail-on-missing-vectors:
+ description: 'Whether to fail the job if missing test vectors are found'
+ required: false
+ fail-on-failed-test-cases:
+ description: 'Whether to fail the job if failed test cases are found'
+ required: false
+
+# Define your outputs here.
+outputs:
+ summary:
+ description:
+ 'The summary of the test vector report, same as the GITHUB_STEP_SUMMARY'
+ test-vector-report:
+ description: 'The test vector report object'
+
+runs:
+ using: node20
+ main: dist/index.js
diff --git a/.github/actions/specs-report/badges/coverage.svg b/.github/actions/specs-report/badges/coverage.svg
new file mode 100644
index 0000000..105b452
--- /dev/null
+++ b/.github/actions/specs-report/badges/coverage.svg
@@ -0,0 +1 @@
+
\ No newline at end of file
diff --git a/.github/actions/specs-report/dist/index.js b/.github/actions/specs-report/dist/index.js
new file mode 100644
index 0000000..33e66f4
--- /dev/null
+++ b/.github/actions/specs-report/dist/index.js
@@ -0,0 +1,40950 @@
+require('./sourcemap-register.js');/******/ (() => { // webpackBootstrap
+/******/ var __webpack_modules__ = ({
+
+/***/ 7351:
+/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
+
+"use strict";
+
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
+}) : (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+ o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+ __setModuleDefault(result, mod);
+ return result;
+};
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.issue = exports.issueCommand = void 0;
+const os = __importStar(__nccwpck_require__(2037));
+const utils_1 = __nccwpck_require__(5278);
+/**
+ * Commands
+ *
+ * Command Format:
+ * ::name key=value,key=value::message
+ *
+ * Examples:
+ * ::warning::This is the message
+ * ::set-env name=MY_VAR::some value
+ */
+function issueCommand(command, properties, message) {
+ const cmd = new Command(command, properties, message);
+ process.stdout.write(cmd.toString() + os.EOL);
+}
+exports.issueCommand = issueCommand;
+function issue(name, message = '') {
+ issueCommand(name, {}, message);
+}
+exports.issue = issue;
+const CMD_STRING = '::';
+class Command {
+ constructor(command, properties, message) {
+ if (!command) {
+ command = 'missing.command';
+ }
+ this.command = command;
+ this.properties = properties;
+ this.message = message;
+ }
+ toString() {
+ let cmdStr = CMD_STRING + this.command;
+ if (this.properties && Object.keys(this.properties).length > 0) {
+ cmdStr += ' ';
+ let first = true;
+ for (const key in this.properties) {
+ if (this.properties.hasOwnProperty(key)) {
+ const val = this.properties[key];
+ if (val) {
+ if (first) {
+ first = false;
+ }
+ else {
+ cmdStr += ',';
+ }
+ cmdStr += `${key}=${escapeProperty(val)}`;
+ }
+ }
+ }
+ }
+ cmdStr += `${CMD_STRING}${escapeData(this.message)}`;
+ return cmdStr;
+ }
+}
+function escapeData(s) {
+ return utils_1.toCommandValue(s)
+ .replace(/%/g, '%25')
+ .replace(/\r/g, '%0D')
+ .replace(/\n/g, '%0A');
+}
+function escapeProperty(s) {
+ return utils_1.toCommandValue(s)
+ .replace(/%/g, '%25')
+ .replace(/\r/g, '%0D')
+ .replace(/\n/g, '%0A')
+ .replace(/:/g, '%3A')
+ .replace(/,/g, '%2C');
+}
+//# sourceMappingURL=command.js.map
+
+/***/ }),
+
+/***/ 2186:
+/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
+
+"use strict";
+
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
+}) : (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+ o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+ __setModuleDefault(result, mod);
+ return result;
+};
+var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
+ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
+ return new (P || (P = Promise))(function (resolve, reject) {
+ function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+ function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+ function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
+ });
+};
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.getIDToken = exports.getState = exports.saveState = exports.group = exports.endGroup = exports.startGroup = exports.info = exports.notice = exports.warning = exports.error = exports.debug = exports.isDebug = exports.setFailed = exports.setCommandEcho = exports.setOutput = exports.getBooleanInput = exports.getMultilineInput = exports.getInput = exports.addPath = exports.setSecret = exports.exportVariable = exports.ExitCode = void 0;
+const command_1 = __nccwpck_require__(7351);
+const file_command_1 = __nccwpck_require__(717);
+const utils_1 = __nccwpck_require__(5278);
+const os = __importStar(__nccwpck_require__(2037));
+const path = __importStar(__nccwpck_require__(1017));
+const oidc_utils_1 = __nccwpck_require__(8041);
+/**
+ * The code to exit an action
+ */
+var ExitCode;
+(function (ExitCode) {
+ /**
+ * A code indicating that the action was successful
+ */
+ ExitCode[ExitCode["Success"] = 0] = "Success";
+ /**
+ * A code indicating that the action was a failure
+ */
+ ExitCode[ExitCode["Failure"] = 1] = "Failure";
+})(ExitCode = exports.ExitCode || (exports.ExitCode = {}));
+//-----------------------------------------------------------------------
+// Variables
+//-----------------------------------------------------------------------
+/**
+ * Sets env variable for this action and future actions in the job
+ * @param name the name of the variable to set
+ * @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify
+ */
+// eslint-disable-next-line @typescript-eslint/no-explicit-any
+function exportVariable(name, val) {
+ const convertedVal = utils_1.toCommandValue(val);
+ process.env[name] = convertedVal;
+ const filePath = process.env['GITHUB_ENV'] || '';
+ if (filePath) {
+ return file_command_1.issueFileCommand('ENV', file_command_1.prepareKeyValueMessage(name, val));
+ }
+ command_1.issueCommand('set-env', { name }, convertedVal);
+}
+exports.exportVariable = exportVariable;
+/**
+ * Registers a secret which will get masked from logs
+ * @param secret value of the secret
+ */
+function setSecret(secret) {
+ command_1.issueCommand('add-mask', {}, secret);
+}
+exports.setSecret = setSecret;
+/**
+ * Prepends inputPath to the PATH (for this action and future actions)
+ * @param inputPath
+ */
+function addPath(inputPath) {
+ const filePath = process.env['GITHUB_PATH'] || '';
+ if (filePath) {
+ file_command_1.issueFileCommand('PATH', inputPath);
+ }
+ else {
+ command_1.issueCommand('add-path', {}, inputPath);
+ }
+ process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`;
+}
+exports.addPath = addPath;
+/**
+ * Gets the value of an input.
+ * Unless trimWhitespace is set to false in InputOptions, the value is also trimmed.
+ * Returns an empty string if the value is not defined.
+ *
+ * @param name name of the input to get
+ * @param options optional. See InputOptions.
+ * @returns string
+ */
+function getInput(name, options) {
+ const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || '';
+ if (options && options.required && !val) {
+ throw new Error(`Input required and not supplied: ${name}`);
+ }
+ if (options && options.trimWhitespace === false) {
+ return val;
+ }
+ return val.trim();
+}
+exports.getInput = getInput;
+/**
+ * Gets the values of an multiline input. Each value is also trimmed.
+ *
+ * @param name name of the input to get
+ * @param options optional. See InputOptions.
+ * @returns string[]
+ *
+ */
+function getMultilineInput(name, options) {
+ const inputs = getInput(name, options)
+ .split('\n')
+ .filter(x => x !== '');
+ if (options && options.trimWhitespace === false) {
+ return inputs;
+ }
+ return inputs.map(input => input.trim());
+}
+exports.getMultilineInput = getMultilineInput;
+/**
+ * Gets the input value of the boolean type in the YAML 1.2 "core schema" specification.
+ * Support boolean input list: `true | True | TRUE | false | False | FALSE` .
+ * The return value is also in boolean type.
+ * ref: https://yaml.org/spec/1.2/spec.html#id2804923
+ *
+ * @param name name of the input to get
+ * @param options optional. See InputOptions.
+ * @returns boolean
+ */
+function getBooleanInput(name, options) {
+ const trueValue = ['true', 'True', 'TRUE'];
+ const falseValue = ['false', 'False', 'FALSE'];
+ const val = getInput(name, options);
+ if (trueValue.includes(val))
+ return true;
+ if (falseValue.includes(val))
+ return false;
+ throw new TypeError(`Input does not meet YAML 1.2 "Core Schema" specification: ${name}\n` +
+ `Support boolean input list: \`true | True | TRUE | false | False | FALSE\``);
+}
+exports.getBooleanInput = getBooleanInput;
+/**
+ * Sets the value of an output.
+ *
+ * @param name name of the output to set
+ * @param value value to store. Non-string values will be converted to a string via JSON.stringify
+ */
+// eslint-disable-next-line @typescript-eslint/no-explicit-any
+function setOutput(name, value) {
+ const filePath = process.env['GITHUB_OUTPUT'] || '';
+ if (filePath) {
+ return file_command_1.issueFileCommand('OUTPUT', file_command_1.prepareKeyValueMessage(name, value));
+ }
+ process.stdout.write(os.EOL);
+ command_1.issueCommand('set-output', { name }, utils_1.toCommandValue(value));
+}
+exports.setOutput = setOutput;
+/**
+ * Enables or disables the echoing of commands into stdout for the rest of the step.
+ * Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set.
+ *
+ */
+function setCommandEcho(enabled) {
+ command_1.issue('echo', enabled ? 'on' : 'off');
+}
+exports.setCommandEcho = setCommandEcho;
+//-----------------------------------------------------------------------
+// Results
+//-----------------------------------------------------------------------
+/**
+ * Sets the action status to failed.
+ * When the action exits it will be with an exit code of 1
+ * @param message add error issue message
+ */
+function setFailed(message) {
+ process.exitCode = ExitCode.Failure;
+ error(message);
+}
+exports.setFailed = setFailed;
+//-----------------------------------------------------------------------
+// Logging Commands
+//-----------------------------------------------------------------------
+/**
+ * Gets whether Actions Step Debug is on or not
+ */
+function isDebug() {
+ return process.env['RUNNER_DEBUG'] === '1';
+}
+exports.isDebug = isDebug;
+/**
+ * Writes debug message to user log
+ * @param message debug message
+ */
+function debug(message) {
+ command_1.issueCommand('debug', {}, message);
+}
+exports.debug = debug;
+/**
+ * Adds an error issue
+ * @param message error issue message. Errors will be converted to string via toString()
+ * @param properties optional properties to add to the annotation.
+ */
+function error(message, properties = {}) {
+ command_1.issueCommand('error', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);
+}
+exports.error = error;
+/**
+ * Adds a warning issue
+ * @param message warning issue message. Errors will be converted to string via toString()
+ * @param properties optional properties to add to the annotation.
+ */
+function warning(message, properties = {}) {
+ command_1.issueCommand('warning', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);
+}
+exports.warning = warning;
+/**
+ * Adds a notice issue
+ * @param message notice issue message. Errors will be converted to string via toString()
+ * @param properties optional properties to add to the annotation.
+ */
+function notice(message, properties = {}) {
+ command_1.issueCommand('notice', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);
+}
+exports.notice = notice;
+/**
+ * Writes info to log with console.log.
+ * @param message info message
+ */
+function info(message) {
+ process.stdout.write(message + os.EOL);
+}
+exports.info = info;
+/**
+ * Begin an output group.
+ *
+ * Output until the next `groupEnd` will be foldable in this group
+ *
+ * @param name The name of the output group
+ */
+function startGroup(name) {
+ command_1.issue('group', name);
+}
+exports.startGroup = startGroup;
+/**
+ * End an output group.
+ */
+function endGroup() {
+ command_1.issue('endgroup');
+}
+exports.endGroup = endGroup;
+/**
+ * Wrap an asynchronous function call in a group.
+ *
+ * Returns the same type as the function itself.
+ *
+ * @param name The name of the group
+ * @param fn The function to wrap in the group
+ */
+function group(name, fn) {
+ return __awaiter(this, void 0, void 0, function* () {
+ startGroup(name);
+ let result;
+ try {
+ result = yield fn();
+ }
+ finally {
+ endGroup();
+ }
+ return result;
+ });
+}
+exports.group = group;
+//-----------------------------------------------------------------------
+// Wrapper action state
+//-----------------------------------------------------------------------
+/**
+ * Saves state for current action, the state can only be retrieved by this action's post job execution.
+ *
+ * @param name name of the state to store
+ * @param value value to store. Non-string values will be converted to a string via JSON.stringify
+ */
+// eslint-disable-next-line @typescript-eslint/no-explicit-any
+function saveState(name, value) {
+ const filePath = process.env['GITHUB_STATE'] || '';
+ if (filePath) {
+ return file_command_1.issueFileCommand('STATE', file_command_1.prepareKeyValueMessage(name, value));
+ }
+ command_1.issueCommand('save-state', { name }, utils_1.toCommandValue(value));
+}
+exports.saveState = saveState;
+/**
+ * Gets the value of an state set by this action's main execution.
+ *
+ * @param name name of the state to get
+ * @returns string
+ */
+function getState(name) {
+ return process.env[`STATE_${name}`] || '';
+}
+exports.getState = getState;
+function getIDToken(aud) {
+ return __awaiter(this, void 0, void 0, function* () {
+ return yield oidc_utils_1.OidcClient.getIDToken(aud);
+ });
+}
+exports.getIDToken = getIDToken;
+/**
+ * Summary exports
+ */
+var summary_1 = __nccwpck_require__(1327);
+Object.defineProperty(exports, "summary", ({ enumerable: true, get: function () { return summary_1.summary; } }));
+/**
+ * @deprecated use core.summary
+ */
+var summary_2 = __nccwpck_require__(1327);
+Object.defineProperty(exports, "markdownSummary", ({ enumerable: true, get: function () { return summary_2.markdownSummary; } }));
+/**
+ * Path exports
+ */
+var path_utils_1 = __nccwpck_require__(2981);
+Object.defineProperty(exports, "toPosixPath", ({ enumerable: true, get: function () { return path_utils_1.toPosixPath; } }));
+Object.defineProperty(exports, "toWin32Path", ({ enumerable: true, get: function () { return path_utils_1.toWin32Path; } }));
+Object.defineProperty(exports, "toPlatformPath", ({ enumerable: true, get: function () { return path_utils_1.toPlatformPath; } }));
+//# sourceMappingURL=core.js.map
+
+/***/ }),
+
+/***/ 717:
+/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
+
+"use strict";
+
+// For internal use, subject to change.
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
+}) : (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+ o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+ __setModuleDefault(result, mod);
+ return result;
+};
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.prepareKeyValueMessage = exports.issueFileCommand = void 0;
+// We use any as a valid input type
+/* eslint-disable @typescript-eslint/no-explicit-any */
+const fs = __importStar(__nccwpck_require__(7147));
+const os = __importStar(__nccwpck_require__(2037));
+const uuid_1 = __nccwpck_require__(5840);
+const utils_1 = __nccwpck_require__(5278);
+function issueFileCommand(command, message) {
+ const filePath = process.env[`GITHUB_${command}`];
+ if (!filePath) {
+ throw new Error(`Unable to find environment variable for file command ${command}`);
+ }
+ if (!fs.existsSync(filePath)) {
+ throw new Error(`Missing file at path: ${filePath}`);
+ }
+ fs.appendFileSync(filePath, `${utils_1.toCommandValue(message)}${os.EOL}`, {
+ encoding: 'utf8'
+ });
+}
+exports.issueFileCommand = issueFileCommand;
+function prepareKeyValueMessage(key, value) {
+ const delimiter = `ghadelimiter_${uuid_1.v4()}`;
+ const convertedValue = utils_1.toCommandValue(value);
+ // These should realistically never happen, but just in case someone finds a
+ // way to exploit uuid generation let's not allow keys or values that contain
+ // the delimiter.
+ if (key.includes(delimiter)) {
+ throw new Error(`Unexpected input: name should not contain the delimiter "${delimiter}"`);
+ }
+ if (convertedValue.includes(delimiter)) {
+ throw new Error(`Unexpected input: value should not contain the delimiter "${delimiter}"`);
+ }
+ return `${key}<<${delimiter}${os.EOL}${convertedValue}${os.EOL}${delimiter}`;
+}
+exports.prepareKeyValueMessage = prepareKeyValueMessage;
+//# sourceMappingURL=file-command.js.map
+
+/***/ }),
+
+/***/ 8041:
+/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
+
+"use strict";
+
+var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
+ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
+ return new (P || (P = Promise))(function (resolve, reject) {
+ function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+ function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+ function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
+ });
+};
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.OidcClient = void 0;
+const http_client_1 = __nccwpck_require__(6255);
+const auth_1 = __nccwpck_require__(5526);
+const core_1 = __nccwpck_require__(2186);
+class OidcClient {
+ static createHttpClient(allowRetry = true, maxRetry = 10) {
+ const requestOptions = {
+ allowRetries: allowRetry,
+ maxRetries: maxRetry
+ };
+ return new http_client_1.HttpClient('actions/oidc-client', [new auth_1.BearerCredentialHandler(OidcClient.getRequestToken())], requestOptions);
+ }
+ static getRequestToken() {
+ const token = process.env['ACTIONS_ID_TOKEN_REQUEST_TOKEN'];
+ if (!token) {
+ throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_TOKEN env variable');
+ }
+ return token;
+ }
+ static getIDTokenUrl() {
+ const runtimeUrl = process.env['ACTIONS_ID_TOKEN_REQUEST_URL'];
+ if (!runtimeUrl) {
+ throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_URL env variable');
+ }
+ return runtimeUrl;
+ }
+ static getCall(id_token_url) {
+ var _a;
+ return __awaiter(this, void 0, void 0, function* () {
+ const httpclient = OidcClient.createHttpClient();
+ const res = yield httpclient
+ .getJson(id_token_url)
+ .catch(error => {
+ throw new Error(`Failed to get ID Token. \n
+ Error Code : ${error.statusCode}\n
+ Error Message: ${error.message}`);
+ });
+ const id_token = (_a = res.result) === null || _a === void 0 ? void 0 : _a.value;
+ if (!id_token) {
+ throw new Error('Response json body do not have ID Token field');
+ }
+ return id_token;
+ });
+ }
+ static getIDToken(audience) {
+ return __awaiter(this, void 0, void 0, function* () {
+ try {
+ // New ID Token is requested from action service
+ let id_token_url = OidcClient.getIDTokenUrl();
+ if (audience) {
+ const encodedAudience = encodeURIComponent(audience);
+ id_token_url = `${id_token_url}&audience=${encodedAudience}`;
+ }
+ core_1.debug(`ID token url is ${id_token_url}`);
+ const id_token = yield OidcClient.getCall(id_token_url);
+ core_1.setSecret(id_token);
+ return id_token;
+ }
+ catch (error) {
+ throw new Error(`Error message: ${error.message}`);
+ }
+ });
+ }
+}
+exports.OidcClient = OidcClient;
+//# sourceMappingURL=oidc-utils.js.map
+
+/***/ }),
+
+/***/ 2981:
+/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
+
+"use strict";
+
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
+}) : (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+ o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+ __setModuleDefault(result, mod);
+ return result;
+};
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.toPlatformPath = exports.toWin32Path = exports.toPosixPath = void 0;
+const path = __importStar(__nccwpck_require__(1017));
+/**
+ * toPosixPath converts the given path to the posix form. On Windows, \\ will be
+ * replaced with /.
+ *
+ * @param pth. Path to transform.
+ * @return string Posix path.
+ */
+function toPosixPath(pth) {
+ return pth.replace(/[\\]/g, '/');
+}
+exports.toPosixPath = toPosixPath;
+/**
+ * toWin32Path converts the given path to the win32 form. On Linux, / will be
+ * replaced with \\.
+ *
+ * @param pth. Path to transform.
+ * @return string Win32 path.
+ */
+function toWin32Path(pth) {
+ return pth.replace(/[/]/g, '\\');
+}
+exports.toWin32Path = toWin32Path;
+/**
+ * toPlatformPath converts the given path to a platform-specific path. It does
+ * this by replacing instances of / and \ with the platform-specific path
+ * separator.
+ *
+ * @param pth The path to platformize.
+ * @return string The platform-specific path.
+ */
+function toPlatformPath(pth) {
+ return pth.replace(/[/\\]/g, path.sep);
+}
+exports.toPlatformPath = toPlatformPath;
+//# sourceMappingURL=path-utils.js.map
+
+/***/ }),
+
+/***/ 1327:
+/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
+
+"use strict";
+
+var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
+ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
+ return new (P || (P = Promise))(function (resolve, reject) {
+ function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+ function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+ function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
+ });
+};
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.summary = exports.markdownSummary = exports.SUMMARY_DOCS_URL = exports.SUMMARY_ENV_VAR = void 0;
+const os_1 = __nccwpck_require__(2037);
+const fs_1 = __nccwpck_require__(7147);
+const { access, appendFile, writeFile } = fs_1.promises;
+exports.SUMMARY_ENV_VAR = 'GITHUB_STEP_SUMMARY';
+exports.SUMMARY_DOCS_URL = 'https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary';
+class Summary {
+ constructor() {
+ this._buffer = '';
+ }
+ /**
+ * Finds the summary file path from the environment, rejects if env var is not found or file does not exist
+ * Also checks r/w permissions.
+ *
+ * @returns step summary file path
+ */
+ filePath() {
+ return __awaiter(this, void 0, void 0, function* () {
+ if (this._filePath) {
+ return this._filePath;
+ }
+ const pathFromEnv = process.env[exports.SUMMARY_ENV_VAR];
+ if (!pathFromEnv) {
+ throw new Error(`Unable to find environment variable for $${exports.SUMMARY_ENV_VAR}. Check if your runtime environment supports job summaries.`);
+ }
+ try {
+ yield access(pathFromEnv, fs_1.constants.R_OK | fs_1.constants.W_OK);
+ }
+ catch (_a) {
+ throw new Error(`Unable to access summary file: '${pathFromEnv}'. Check if the file has correct read/write permissions.`);
+ }
+ this._filePath = pathFromEnv;
+ return this._filePath;
+ });
+ }
+ /**
+ * Wraps content in an HTML tag, adding any HTML attributes
+ *
+ * @param {string} tag HTML tag to wrap
+ * @param {string | null} content content within the tag
+ * @param {[attribute: string]: string} attrs key-value list of HTML attributes to add
+ *
+ * @returns {string} content wrapped in HTML element
+ */
+ wrap(tag, content, attrs = {}) {
+ const htmlAttrs = Object.entries(attrs)
+ .map(([key, value]) => ` ${key}="${value}"`)
+ .join('');
+ if (!content) {
+ return `<${tag}${htmlAttrs}>`;
+ }
+ return `<${tag}${htmlAttrs}>${content}${tag}>`;
+ }
+ /**
+ * Writes text in the buffer to the summary buffer file and empties buffer. Will append by default.
+ *
+ * @param {SummaryWriteOptions} [options] (optional) options for write operation
+ *
+ * @returns {Promise} summary instance
+ */
+ write(options) {
+ return __awaiter(this, void 0, void 0, function* () {
+ const overwrite = !!(options === null || options === void 0 ? void 0 : options.overwrite);
+ const filePath = yield this.filePath();
+ const writeFunc = overwrite ? writeFile : appendFile;
+ yield writeFunc(filePath, this._buffer, { encoding: 'utf8' });
+ return this.emptyBuffer();
+ });
+ }
+ /**
+ * Clears the summary buffer and wipes the summary file
+ *
+ * @returns {Summary} summary instance
+ */
+ clear() {
+ return __awaiter(this, void 0, void 0, function* () {
+ return this.emptyBuffer().write({ overwrite: true });
+ });
+ }
+ /**
+ * Returns the current summary buffer as a string
+ *
+ * @returns {string} string of summary buffer
+ */
+ stringify() {
+ return this._buffer;
+ }
+ /**
+ * If the summary buffer is empty
+ *
+ * @returns {boolen} true if the buffer is empty
+ */
+ isEmptyBuffer() {
+ return this._buffer.length === 0;
+ }
+ /**
+ * Resets the summary buffer without writing to summary file
+ *
+ * @returns {Summary} summary instance
+ */
+ emptyBuffer() {
+ this._buffer = '';
+ return this;
+ }
+ /**
+ * Adds raw text to the summary buffer
+ *
+ * @param {string} text content to add
+ * @param {boolean} [addEOL=false] (optional) append an EOL to the raw text (default: false)
+ *
+ * @returns {Summary} summary instance
+ */
+ addRaw(text, addEOL = false) {
+ this._buffer += text;
+ return addEOL ? this.addEOL() : this;
+ }
+ /**
+ * Adds the operating system-specific end-of-line marker to the buffer
+ *
+ * @returns {Summary} summary instance
+ */
+ addEOL() {
+ return this.addRaw(os_1.EOL);
+ }
+ /**
+ * Adds an HTML codeblock to the summary buffer
+ *
+ * @param {string} code content to render within fenced code block
+ * @param {string} lang (optional) language to syntax highlight code
+ *
+ * @returns {Summary} summary instance
+ */
+ addCodeBlock(code, lang) {
+ const attrs = Object.assign({}, (lang && { lang }));
+ const element = this.wrap('pre', this.wrap('code', code), attrs);
+ return this.addRaw(element).addEOL();
+ }
+ /**
+ * Adds an HTML list to the summary buffer
+ *
+ * @param {string[]} items list of items to render
+ * @param {boolean} [ordered=false] (optional) if the rendered list should be ordered or not (default: false)
+ *
+ * @returns {Summary} summary instance
+ */
+ addList(items, ordered = false) {
+ const tag = ordered ? 'ol' : 'ul';
+ const listItems = items.map(item => this.wrap('li', item)).join('');
+ const element = this.wrap(tag, listItems);
+ return this.addRaw(element).addEOL();
+ }
+ /**
+ * Adds an HTML table to the summary buffer
+ *
+ * @param {SummaryTableCell[]} rows table rows
+ *
+ * @returns {Summary} summary instance
+ */
+ addTable(rows) {
+ const tableBody = rows
+ .map(row => {
+ const cells = row
+ .map(cell => {
+ if (typeof cell === 'string') {
+ return this.wrap('td', cell);
+ }
+ const { header, data, colspan, rowspan } = cell;
+ const tag = header ? 'th' : 'td';
+ const attrs = Object.assign(Object.assign({}, (colspan && { colspan })), (rowspan && { rowspan }));
+ return this.wrap(tag, data, attrs);
+ })
+ .join('');
+ return this.wrap('tr', cells);
+ })
+ .join('');
+ const element = this.wrap('table', tableBody);
+ return this.addRaw(element).addEOL();
+ }
+ /**
+ * Adds a collapsable HTML details element to the summary buffer
+ *
+ * @param {string} label text for the closed state
+ * @param {string} content collapsable content
+ *
+ * @returns {Summary} summary instance
+ */
+ addDetails(label, content) {
+ const element = this.wrap('details', this.wrap('summary', label) + content);
+ return this.addRaw(element).addEOL();
+ }
+ /**
+ * Adds an HTML image tag to the summary buffer
+ *
+ * @param {string} src path to the image you to embed
+ * @param {string} alt text description of the image
+ * @param {SummaryImageOptions} options (optional) addition image attributes
+ *
+ * @returns {Summary} summary instance
+ */
+ addImage(src, alt, options) {
+ const { width, height } = options || {};
+ const attrs = Object.assign(Object.assign({}, (width && { width })), (height && { height }));
+ const element = this.wrap('img', null, Object.assign({ src, alt }, attrs));
+ return this.addRaw(element).addEOL();
+ }
+ /**
+ * Adds an HTML section heading element
+ *
+ * @param {string} text heading text
+ * @param {number | string} [level=1] (optional) the heading level, default: 1
+ *
+ * @returns {Summary} summary instance
+ */
+ addHeading(text, level) {
+ const tag = `h${level}`;
+ const allowedTag = ['h1', 'h2', 'h3', 'h4', 'h5', 'h6'].includes(tag)
+ ? tag
+ : 'h1';
+ const element = this.wrap(allowedTag, text);
+ return this.addRaw(element).addEOL();
+ }
+ /**
+ * Adds an HTML thematic break (
) to the summary buffer
+ *
+ * @returns {Summary} summary instance
+ */
+ addSeparator() {
+ const element = this.wrap('hr', null);
+ return this.addRaw(element).addEOL();
+ }
+ /**
+ * Adds an HTML line break (
) to the summary buffer
+ *
+ * @returns {Summary} summary instance
+ */
+ addBreak() {
+ const element = this.wrap('br', null);
+ return this.addRaw(element).addEOL();
+ }
+ /**
+ * Adds an HTML blockquote to the summary buffer
+ *
+ * @param {string} text quote text
+ * @param {string} cite (optional) citation url
+ *
+ * @returns {Summary} summary instance
+ */
+ addQuote(text, cite) {
+ const attrs = Object.assign({}, (cite && { cite }));
+ const element = this.wrap('blockquote', text, attrs);
+ return this.addRaw(element).addEOL();
+ }
+ /**
+ * Adds an HTML anchor tag to the summary buffer
+ *
+ * @param {string} text link text/content
+ * @param {string} href hyperlink
+ *
+ * @returns {Summary} summary instance
+ */
+ addLink(text, href) {
+ const element = this.wrap('a', text, { href });
+ return this.addRaw(element).addEOL();
+ }
+}
+const _summary = new Summary();
+/**
+ * @deprecated use `core.summary`
+ */
+exports.markdownSummary = _summary;
+exports.summary = _summary;
+//# sourceMappingURL=summary.js.map
+
+/***/ }),
+
+/***/ 5278:
+/***/ ((__unused_webpack_module, exports) => {
+
+"use strict";
+
+// We use any as a valid input type
+/* eslint-disable @typescript-eslint/no-explicit-any */
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.toCommandProperties = exports.toCommandValue = void 0;
+/**
+ * Sanitizes an input into a string so it can be passed into issueCommand safely
+ * @param input input to sanitize into a string
+ */
+function toCommandValue(input) {
+ if (input === null || input === undefined) {
+ return '';
+ }
+ else if (typeof input === 'string' || input instanceof String) {
+ return input;
+ }
+ return JSON.stringify(input);
+}
+exports.toCommandValue = toCommandValue;
+/**
+ *
+ * @param annotationProperties
+ * @returns The command properties to send with the actual annotation command
+ * See IssueCommandProperties: https://github.com/actions/runner/blob/main/src/Runner.Worker/ActionCommandManager.cs#L646
+ */
+function toCommandProperties(annotationProperties) {
+ if (!Object.keys(annotationProperties).length) {
+ return {};
+ }
+ return {
+ title: annotationProperties.title,
+ file: annotationProperties.file,
+ line: annotationProperties.startLine,
+ endLine: annotationProperties.endLine,
+ col: annotationProperties.startColumn,
+ endColumn: annotationProperties.endColumn
+ };
+}
+exports.toCommandProperties = toCommandProperties;
+//# sourceMappingURL=utils.js.map
+
+/***/ }),
+
+/***/ 4087:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.Context = void 0;
+const fs_1 = __nccwpck_require__(7147);
+const os_1 = __nccwpck_require__(2037);
+class Context {
+ /**
+ * Hydrate the context from the environment
+ */
+ constructor() {
+ var _a, _b, _c;
+ this.payload = {};
+ if (process.env.GITHUB_EVENT_PATH) {
+ if ((0, fs_1.existsSync)(process.env.GITHUB_EVENT_PATH)) {
+ this.payload = JSON.parse((0, fs_1.readFileSync)(process.env.GITHUB_EVENT_PATH, { encoding: 'utf8' }));
+ }
+ else {
+ const path = process.env.GITHUB_EVENT_PATH;
+ process.stdout.write(`GITHUB_EVENT_PATH ${path} does not exist${os_1.EOL}`);
+ }
+ }
+ this.eventName = process.env.GITHUB_EVENT_NAME;
+ this.sha = process.env.GITHUB_SHA;
+ this.ref = process.env.GITHUB_REF;
+ this.workflow = process.env.GITHUB_WORKFLOW;
+ this.action = process.env.GITHUB_ACTION;
+ this.actor = process.env.GITHUB_ACTOR;
+ this.job = process.env.GITHUB_JOB;
+ this.runNumber = parseInt(process.env.GITHUB_RUN_NUMBER, 10);
+ this.runId = parseInt(process.env.GITHUB_RUN_ID, 10);
+ this.apiUrl = (_a = process.env.GITHUB_API_URL) !== null && _a !== void 0 ? _a : `https://api.github.com`;
+ this.serverUrl = (_b = process.env.GITHUB_SERVER_URL) !== null && _b !== void 0 ? _b : `https://github.com`;
+ this.graphqlUrl =
+ (_c = process.env.GITHUB_GRAPHQL_URL) !== null && _c !== void 0 ? _c : `https://api.github.com/graphql`;
+ }
+ get issue() {
+ const payload = this.payload;
+ return Object.assign(Object.assign({}, this.repo), { number: (payload.issue || payload.pull_request || payload).number });
+ }
+ get repo() {
+ if (process.env.GITHUB_REPOSITORY) {
+ const [owner, repo] = process.env.GITHUB_REPOSITORY.split('/');
+ return { owner, repo };
+ }
+ if (this.payload.repository) {
+ return {
+ owner: this.payload.repository.owner.login,
+ repo: this.payload.repository.name
+ };
+ }
+ throw new Error("context.repo requires a GITHUB_REPOSITORY environment variable like 'owner/repo'");
+ }
+}
+exports.Context = Context;
+//# sourceMappingURL=context.js.map
+
+/***/ }),
+
+/***/ 5438:
+/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
+
+"use strict";
+
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ var desc = Object.getOwnPropertyDescriptor(m, k);
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+ desc = { enumerable: true, get: function() { return m[k]; } };
+ }
+ Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+ o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+ __setModuleDefault(result, mod);
+ return result;
+};
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.getOctokit = exports.context = void 0;
+const Context = __importStar(__nccwpck_require__(4087));
+const utils_1 = __nccwpck_require__(3030);
+exports.context = new Context.Context();
+/**
+ * Returns a hydrated octokit ready to use for GitHub Actions
+ *
+ * @param token the repo PAT or GITHUB_TOKEN
+ * @param options other options to set
+ */
+function getOctokit(token, options, ...additionalPlugins) {
+ const GitHubWithPlugins = utils_1.GitHub.plugin(...additionalPlugins);
+ return new GitHubWithPlugins((0, utils_1.getOctokitOptions)(token, options));
+}
+exports.getOctokit = getOctokit;
+//# sourceMappingURL=github.js.map
+
+/***/ }),
+
+/***/ 7914:
+/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
+
+"use strict";
+
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ var desc = Object.getOwnPropertyDescriptor(m, k);
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+ desc = { enumerable: true, get: function() { return m[k]; } };
+ }
+ Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+ o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+ __setModuleDefault(result, mod);
+ return result;
+};
+var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
+ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
+ return new (P || (P = Promise))(function (resolve, reject) {
+ function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+ function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+ function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
+ });
+};
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.getApiBaseUrl = exports.getProxyFetch = exports.getProxyAgentDispatcher = exports.getProxyAgent = exports.getAuthString = void 0;
+const httpClient = __importStar(__nccwpck_require__(6255));
+const undici_1 = __nccwpck_require__(1773);
+function getAuthString(token, options) {
+ if (!token && !options.auth) {
+ throw new Error('Parameter token or opts.auth is required');
+ }
+ else if (token && options.auth) {
+ throw new Error('Parameters token and opts.auth may not both be specified');
+ }
+ return typeof options.auth === 'string' ? options.auth : `token ${token}`;
+}
+exports.getAuthString = getAuthString;
+function getProxyAgent(destinationUrl) {
+ const hc = new httpClient.HttpClient();
+ return hc.getAgent(destinationUrl);
+}
+exports.getProxyAgent = getProxyAgent;
+function getProxyAgentDispatcher(destinationUrl) {
+ const hc = new httpClient.HttpClient();
+ return hc.getAgentDispatcher(destinationUrl);
+}
+exports.getProxyAgentDispatcher = getProxyAgentDispatcher;
+function getProxyFetch(destinationUrl) {
+ const httpDispatcher = getProxyAgentDispatcher(destinationUrl);
+ const proxyFetch = (url, opts) => __awaiter(this, void 0, void 0, function* () {
+ return (0, undici_1.fetch)(url, Object.assign(Object.assign({}, opts), { dispatcher: httpDispatcher }));
+ });
+ return proxyFetch;
+}
+exports.getProxyFetch = getProxyFetch;
+function getApiBaseUrl() {
+ return process.env['GITHUB_API_URL'] || 'https://api.github.com';
+}
+exports.getApiBaseUrl = getApiBaseUrl;
+//# sourceMappingURL=utils.js.map
+
+/***/ }),
+
+/***/ 3030:
+/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
+
+"use strict";
+
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ var desc = Object.getOwnPropertyDescriptor(m, k);
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+ desc = { enumerable: true, get: function() { return m[k]; } };
+ }
+ Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+ o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+ __setModuleDefault(result, mod);
+ return result;
+};
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.getOctokitOptions = exports.GitHub = exports.defaults = exports.context = void 0;
+const Context = __importStar(__nccwpck_require__(4087));
+const Utils = __importStar(__nccwpck_require__(7914));
+// octokit + plugins
+const core_1 = __nccwpck_require__(6762);
+const plugin_rest_endpoint_methods_1 = __nccwpck_require__(3044);
+const plugin_paginate_rest_1 = __nccwpck_require__(4193);
+exports.context = new Context.Context();
+const baseUrl = Utils.getApiBaseUrl();
+exports.defaults = {
+ baseUrl,
+ request: {
+ agent: Utils.getProxyAgent(baseUrl),
+ fetch: Utils.getProxyFetch(baseUrl)
+ }
+};
+exports.GitHub = core_1.Octokit.plugin(plugin_rest_endpoint_methods_1.restEndpointMethods, plugin_paginate_rest_1.paginateRest).defaults(exports.defaults);
+/**
+ * Convience function to correctly format Octokit Options to pass into the constructor.
+ *
+ * @param token the repo PAT or GITHUB_TOKEN
+ * @param options other options to set
+ */
+function getOctokitOptions(token, options) {
+ const opts = Object.assign({}, options || {}); // Shallow clone - don't mutate the object provided by the caller
+ // Auth
+ const auth = Utils.getAuthString(token, opts);
+ if (auth) {
+ opts.auth = auth;
+ }
+ return opts;
+}
+exports.getOctokitOptions = getOctokitOptions;
+//# sourceMappingURL=utils.js.map
+
+/***/ }),
+
+/***/ 8090:
+/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
+
+"use strict";
+
+var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
+ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
+ return new (P || (P = Promise))(function (resolve, reject) {
+ function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+ function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+ function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
+ });
+};
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.hashFiles = exports.create = void 0;
+const internal_globber_1 = __nccwpck_require__(8298);
+const internal_hash_files_1 = __nccwpck_require__(2448);
+/**
+ * Constructs a globber
+ *
+ * @param patterns Patterns separated by newlines
+ * @param options Glob options
+ */
+function create(patterns, options) {
+ return __awaiter(this, void 0, void 0, function* () {
+ return yield internal_globber_1.DefaultGlobber.create(patterns, options);
+ });
+}
+exports.create = create;
+/**
+ * Computes the sha256 hash of a glob
+ *
+ * @param patterns Patterns separated by newlines
+ * @param currentWorkspace Workspace used when matching files
+ * @param options Glob options
+ * @param verbose Enables verbose logging
+ */
+function hashFiles(patterns, currentWorkspace = '', options, verbose = false) {
+ return __awaiter(this, void 0, void 0, function* () {
+ let followSymbolicLinks = true;
+ if (options && typeof options.followSymbolicLinks === 'boolean') {
+ followSymbolicLinks = options.followSymbolicLinks;
+ }
+ const globber = yield create(patterns, { followSymbolicLinks });
+ return internal_hash_files_1.hashFiles(globber, currentWorkspace, verbose);
+ });
+}
+exports.hashFiles = hashFiles;
+//# sourceMappingURL=glob.js.map
+
+/***/ }),
+
+/***/ 1026:
+/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
+
+"use strict";
+
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
+}) : (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+ o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+ __setModuleDefault(result, mod);
+ return result;
+};
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.getOptions = void 0;
+const core = __importStar(__nccwpck_require__(2186));
+/**
+ * Returns a copy with defaults filled in.
+ */
+function getOptions(copy) {
+ const result = {
+ followSymbolicLinks: true,
+ implicitDescendants: true,
+ matchDirectories: true,
+ omitBrokenSymbolicLinks: true
+ };
+ if (copy) {
+ if (typeof copy.followSymbolicLinks === 'boolean') {
+ result.followSymbolicLinks = copy.followSymbolicLinks;
+ core.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`);
+ }
+ if (typeof copy.implicitDescendants === 'boolean') {
+ result.implicitDescendants = copy.implicitDescendants;
+ core.debug(`implicitDescendants '${result.implicitDescendants}'`);
+ }
+ if (typeof copy.matchDirectories === 'boolean') {
+ result.matchDirectories = copy.matchDirectories;
+ core.debug(`matchDirectories '${result.matchDirectories}'`);
+ }
+ if (typeof copy.omitBrokenSymbolicLinks === 'boolean') {
+ result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks;
+ core.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`);
+ }
+ }
+ return result;
+}
+exports.getOptions = getOptions;
+//# sourceMappingURL=internal-glob-options-helper.js.map
+
+/***/ }),
+
+/***/ 8298:
+/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
+
+"use strict";
+
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
+}) : (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+ o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+ __setModuleDefault(result, mod);
+ return result;
+};
+var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
+ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
+ return new (P || (P = Promise))(function (resolve, reject) {
+ function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+ function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+ function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
+ });
+};
+var __asyncValues = (this && this.__asyncValues) || function (o) {
+ if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
+ var m = o[Symbol.asyncIterator], i;
+ return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i);
+ function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }
+ function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }
+};
+var __await = (this && this.__await) || function (v) { return this instanceof __await ? (this.v = v, this) : new __await(v); }
+var __asyncGenerator = (this && this.__asyncGenerator) || function (thisArg, _arguments, generator) {
+ if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
+ var g = generator.apply(thisArg, _arguments || []), i, q = [];
+ return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i;
+ function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; }
+ function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } }
+ function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); }
+ function fulfill(value) { resume("next", value); }
+ function reject(value) { resume("throw", value); }
+ function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); }
+};
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.DefaultGlobber = void 0;
+const core = __importStar(__nccwpck_require__(2186));
+const fs = __importStar(__nccwpck_require__(7147));
+const globOptionsHelper = __importStar(__nccwpck_require__(1026));
+const path = __importStar(__nccwpck_require__(1017));
+const patternHelper = __importStar(__nccwpck_require__(9005));
+const internal_match_kind_1 = __nccwpck_require__(1063);
+const internal_pattern_1 = __nccwpck_require__(4536);
+const internal_search_state_1 = __nccwpck_require__(9117);
+const IS_WINDOWS = process.platform === 'win32';
+class DefaultGlobber {
+ constructor(options) {
+ this.patterns = [];
+ this.searchPaths = [];
+ this.options = globOptionsHelper.getOptions(options);
+ }
+ getSearchPaths() {
+ // Return a copy
+ return this.searchPaths.slice();
+ }
+ glob() {
+ var e_1, _a;
+ return __awaiter(this, void 0, void 0, function* () {
+ const result = [];
+ try {
+ for (var _b = __asyncValues(this.globGenerator()), _c; _c = yield _b.next(), !_c.done;) {
+ const itemPath = _c.value;
+ result.push(itemPath);
+ }
+ }
+ catch (e_1_1) { e_1 = { error: e_1_1 }; }
+ finally {
+ try {
+ if (_c && !_c.done && (_a = _b.return)) yield _a.call(_b);
+ }
+ finally { if (e_1) throw e_1.error; }
+ }
+ return result;
+ });
+ }
+ globGenerator() {
+ return __asyncGenerator(this, arguments, function* globGenerator_1() {
+ // Fill in defaults options
+ const options = globOptionsHelper.getOptions(this.options);
+ // Implicit descendants?
+ const patterns = [];
+ for (const pattern of this.patterns) {
+ patterns.push(pattern);
+ if (options.implicitDescendants &&
+ (pattern.trailingSeparator ||
+ pattern.segments[pattern.segments.length - 1] !== '**')) {
+ patterns.push(new internal_pattern_1.Pattern(pattern.negate, true, pattern.segments.concat('**')));
+ }
+ }
+ // Push the search paths
+ const stack = [];
+ for (const searchPath of patternHelper.getSearchPaths(patterns)) {
+ core.debug(`Search path '${searchPath}'`);
+ // Exists?
+ try {
+ // Intentionally using lstat. Detection for broken symlink
+ // will be performed later (if following symlinks).
+ yield __await(fs.promises.lstat(searchPath));
+ }
+ catch (err) {
+ if (err.code === 'ENOENT') {
+ continue;
+ }
+ throw err;
+ }
+ stack.unshift(new internal_search_state_1.SearchState(searchPath, 1));
+ }
+ // Search
+ const traversalChain = []; // used to detect cycles
+ while (stack.length) {
+ // Pop
+ const item = stack.pop();
+ // Match?
+ const match = patternHelper.match(patterns, item.path);
+ const partialMatch = !!match || patternHelper.partialMatch(patterns, item.path);
+ if (!match && !partialMatch) {
+ continue;
+ }
+ // Stat
+ const stats = yield __await(DefaultGlobber.stat(item, options, traversalChain)
+ // Broken symlink, or symlink cycle detected, or no longer exists
+ );
+ // Broken symlink, or symlink cycle detected, or no longer exists
+ if (!stats) {
+ continue;
+ }
+ // Directory
+ if (stats.isDirectory()) {
+ // Matched
+ if (match & internal_match_kind_1.MatchKind.Directory && options.matchDirectories) {
+ yield yield __await(item.path);
+ }
+ // Descend?
+ else if (!partialMatch) {
+ continue;
+ }
+ // Push the child items in reverse
+ const childLevel = item.level + 1;
+ const childItems = (yield __await(fs.promises.readdir(item.path))).map(x => new internal_search_state_1.SearchState(path.join(item.path, x), childLevel));
+ stack.push(...childItems.reverse());
+ }
+ // File
+ else if (match & internal_match_kind_1.MatchKind.File) {
+ yield yield __await(item.path);
+ }
+ }
+ });
+ }
+ /**
+ * Constructs a DefaultGlobber
+ */
+ static create(patterns, options) {
+ return __awaiter(this, void 0, void 0, function* () {
+ const result = new DefaultGlobber(options);
+ if (IS_WINDOWS) {
+ patterns = patterns.replace(/\r\n/g, '\n');
+ patterns = patterns.replace(/\r/g, '\n');
+ }
+ const lines = patterns.split('\n').map(x => x.trim());
+ for (const line of lines) {
+ // Empty or comment
+ if (!line || line.startsWith('#')) {
+ continue;
+ }
+ // Pattern
+ else {
+ result.patterns.push(new internal_pattern_1.Pattern(line));
+ }
+ }
+ result.searchPaths.push(...patternHelper.getSearchPaths(result.patterns));
+ return result;
+ });
+ }
+ static stat(item, options, traversalChain) {
+ return __awaiter(this, void 0, void 0, function* () {
+ // Note:
+ // `stat` returns info about the target of a symlink (or symlink chain)
+ // `lstat` returns info about a symlink itself
+ let stats;
+ if (options.followSymbolicLinks) {
+ try {
+ // Use `stat` (following symlinks)
+ stats = yield fs.promises.stat(item.path);
+ }
+ catch (err) {
+ if (err.code === 'ENOENT') {
+ if (options.omitBrokenSymbolicLinks) {
+ core.debug(`Broken symlink '${item.path}'`);
+ return undefined;
+ }
+ throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`);
+ }
+ throw err;
+ }
+ }
+ else {
+ // Use `lstat` (not following symlinks)
+ stats = yield fs.promises.lstat(item.path);
+ }
+ // Note, isDirectory() returns false for the lstat of a symlink
+ if (stats.isDirectory() && options.followSymbolicLinks) {
+ // Get the realpath
+ const realPath = yield fs.promises.realpath(item.path);
+ // Fixup the traversal chain to match the item level
+ while (traversalChain.length >= item.level) {
+ traversalChain.pop();
+ }
+ // Test for a cycle
+ if (traversalChain.some((x) => x === realPath)) {
+ core.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`);
+ return undefined;
+ }
+ // Update the traversal chain
+ traversalChain.push(realPath);
+ }
+ return stats;
+ });
+ }
+}
+exports.DefaultGlobber = DefaultGlobber;
+//# sourceMappingURL=internal-globber.js.map
+
+/***/ }),
+
+/***/ 2448:
+/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
+
+"use strict";
+
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
+}) : (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+ o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+ __setModuleDefault(result, mod);
+ return result;
+};
+var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
+ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
+ return new (P || (P = Promise))(function (resolve, reject) {
+ function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+ function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+ function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
+ });
+};
+var __asyncValues = (this && this.__asyncValues) || function (o) {
+ if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
+ var m = o[Symbol.asyncIterator], i;
+ return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i);
+ function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }
+ function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }
+};
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.hashFiles = void 0;
+const crypto = __importStar(__nccwpck_require__(6113));
+const core = __importStar(__nccwpck_require__(2186));
+const fs = __importStar(__nccwpck_require__(7147));
+const stream = __importStar(__nccwpck_require__(2781));
+const util = __importStar(__nccwpck_require__(3837));
+const path = __importStar(__nccwpck_require__(1017));
+function hashFiles(globber, currentWorkspace, verbose = false) {
+ var e_1, _a;
+ var _b;
+ return __awaiter(this, void 0, void 0, function* () {
+ const writeDelegate = verbose ? core.info : core.debug;
+ let hasMatch = false;
+ const githubWorkspace = currentWorkspace
+ ? currentWorkspace
+ : (_b = process.env['GITHUB_WORKSPACE']) !== null && _b !== void 0 ? _b : process.cwd();
+ const result = crypto.createHash('sha256');
+ let count = 0;
+ try {
+ for (var _c = __asyncValues(globber.globGenerator()), _d; _d = yield _c.next(), !_d.done;) {
+ const file = _d.value;
+ writeDelegate(file);
+ if (!file.startsWith(`${githubWorkspace}${path.sep}`)) {
+ writeDelegate(`Ignore '${file}' since it is not under GITHUB_WORKSPACE.`);
+ continue;
+ }
+ if (fs.statSync(file).isDirectory()) {
+ writeDelegate(`Skip directory '${file}'.`);
+ continue;
+ }
+ const hash = crypto.createHash('sha256');
+ const pipeline = util.promisify(stream.pipeline);
+ yield pipeline(fs.createReadStream(file), hash);
+ result.write(hash.digest());
+ count++;
+ if (!hasMatch) {
+ hasMatch = true;
+ }
+ }
+ }
+ catch (e_1_1) { e_1 = { error: e_1_1 }; }
+ finally {
+ try {
+ if (_d && !_d.done && (_a = _c.return)) yield _a.call(_c);
+ }
+ finally { if (e_1) throw e_1.error; }
+ }
+ result.end();
+ if (hasMatch) {
+ writeDelegate(`Found ${count} files to hash.`);
+ return result.digest('hex');
+ }
+ else {
+ writeDelegate(`No matches found for glob`);
+ return '';
+ }
+ });
+}
+exports.hashFiles = hashFiles;
+//# sourceMappingURL=internal-hash-files.js.map
+
+/***/ }),
+
+/***/ 1063:
+/***/ ((__unused_webpack_module, exports) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.MatchKind = void 0;
+/**
+ * Indicates whether a pattern matches a path
+ */
+var MatchKind;
+(function (MatchKind) {
+ /** Not matched */
+ MatchKind[MatchKind["None"] = 0] = "None";
+ /** Matched if the path is a directory */
+ MatchKind[MatchKind["Directory"] = 1] = "Directory";
+ /** Matched if the path is a regular file */
+ MatchKind[MatchKind["File"] = 2] = "File";
+ /** Matched */
+ MatchKind[MatchKind["All"] = 3] = "All";
+})(MatchKind = exports.MatchKind || (exports.MatchKind = {}));
+//# sourceMappingURL=internal-match-kind.js.map
+
+/***/ }),
+
+/***/ 1849:
+/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
+
+"use strict";
+
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
+}) : (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+ o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+ __setModuleDefault(result, mod);
+ return result;
+};
+var __importDefault = (this && this.__importDefault) || function (mod) {
+ return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.safeTrimTrailingSeparator = exports.normalizeSeparators = exports.hasRoot = exports.hasAbsoluteRoot = exports.ensureAbsoluteRoot = exports.dirname = void 0;
+const path = __importStar(__nccwpck_require__(1017));
+const assert_1 = __importDefault(__nccwpck_require__(9491));
+const IS_WINDOWS = process.platform === 'win32';
+/**
+ * Similar to path.dirname except normalizes the path separators and slightly better handling for Windows UNC paths.
+ *
+ * For example, on Linux/macOS:
+ * - `/ => /`
+ * - `/hello => /`
+ *
+ * For example, on Windows:
+ * - `C:\ => C:\`
+ * - `C:\hello => C:\`
+ * - `C: => C:`
+ * - `C:hello => C:`
+ * - `\ => \`
+ * - `\hello => \`
+ * - `\\hello => \\hello`
+ * - `\\hello\world => \\hello\world`
+ */
+function dirname(p) {
+ // Normalize slashes and trim unnecessary trailing slash
+ p = safeTrimTrailingSeparator(p);
+ // Windows UNC root, e.g. \\hello or \\hello\world
+ if (IS_WINDOWS && /^\\\\[^\\]+(\\[^\\]+)?$/.test(p)) {
+ return p;
+ }
+ // Get dirname
+ let result = path.dirname(p);
+ // Trim trailing slash for Windows UNC root, e.g. \\hello\world\
+ if (IS_WINDOWS && /^\\\\[^\\]+\\[^\\]+\\$/.test(result)) {
+ result = safeTrimTrailingSeparator(result);
+ }
+ return result;
+}
+exports.dirname = dirname;
+/**
+ * Roots the path if not already rooted. On Windows, relative roots like `\`
+ * or `C:` are expanded based on the current working directory.
+ */
+function ensureAbsoluteRoot(root, itemPath) {
+ assert_1.default(root, `ensureAbsoluteRoot parameter 'root' must not be empty`);
+ assert_1.default(itemPath, `ensureAbsoluteRoot parameter 'itemPath' must not be empty`);
+ // Already rooted
+ if (hasAbsoluteRoot(itemPath)) {
+ return itemPath;
+ }
+ // Windows
+ if (IS_WINDOWS) {
+ // Check for itemPath like C: or C:foo
+ if (itemPath.match(/^[A-Z]:[^\\/]|^[A-Z]:$/i)) {
+ let cwd = process.cwd();
+ assert_1.default(cwd.match(/^[A-Z]:\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`);
+ // Drive letter matches cwd? Expand to cwd
+ if (itemPath[0].toUpperCase() === cwd[0].toUpperCase()) {
+ // Drive only, e.g. C:
+ if (itemPath.length === 2) {
+ // Preserve specified drive letter case (upper or lower)
+ return `${itemPath[0]}:\\${cwd.substr(3)}`;
+ }
+ // Drive + path, e.g. C:foo
+ else {
+ if (!cwd.endsWith('\\')) {
+ cwd += '\\';
+ }
+ // Preserve specified drive letter case (upper or lower)
+ return `${itemPath[0]}:\\${cwd.substr(3)}${itemPath.substr(2)}`;
+ }
+ }
+ // Different drive
+ else {
+ return `${itemPath[0]}:\\${itemPath.substr(2)}`;
+ }
+ }
+ // Check for itemPath like \ or \foo
+ else if (normalizeSeparators(itemPath).match(/^\\$|^\\[^\\]/)) {
+ const cwd = process.cwd();
+ assert_1.default(cwd.match(/^[A-Z]:\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`);
+ return `${cwd[0]}:\\${itemPath.substr(1)}`;
+ }
+ }
+ assert_1.default(hasAbsoluteRoot(root), `ensureAbsoluteRoot parameter 'root' must have an absolute root`);
+ // Otherwise ensure root ends with a separator
+ if (root.endsWith('/') || (IS_WINDOWS && root.endsWith('\\'))) {
+ // Intentionally empty
+ }
+ else {
+ // Append separator
+ root += path.sep;
+ }
+ return root + itemPath;
+}
+exports.ensureAbsoluteRoot = ensureAbsoluteRoot;
+/**
+ * On Linux/macOS, true if path starts with `/`. On Windows, true for paths like:
+ * `\\hello\share` and `C:\hello` (and using alternate separator).
+ */
+function hasAbsoluteRoot(itemPath) {
+ assert_1.default(itemPath, `hasAbsoluteRoot parameter 'itemPath' must not be empty`);
+ // Normalize separators
+ itemPath = normalizeSeparators(itemPath);
+ // Windows
+ if (IS_WINDOWS) {
+ // E.g. \\hello\share or C:\hello
+ return itemPath.startsWith('\\\\') || /^[A-Z]:\\/i.test(itemPath);
+ }
+ // E.g. /hello
+ return itemPath.startsWith('/');
+}
+exports.hasAbsoluteRoot = hasAbsoluteRoot;
+/**
+ * On Linux/macOS, true if path starts with `/`. On Windows, true for paths like:
+ * `\`, `\hello`, `\\hello\share`, `C:`, and `C:\hello` (and using alternate separator).
+ */
+function hasRoot(itemPath) {
+ assert_1.default(itemPath, `isRooted parameter 'itemPath' must not be empty`);
+ // Normalize separators
+ itemPath = normalizeSeparators(itemPath);
+ // Windows
+ if (IS_WINDOWS) {
+ // E.g. \ or \hello or \\hello
+ // E.g. C: or C:\hello
+ return itemPath.startsWith('\\') || /^[A-Z]:/i.test(itemPath);
+ }
+ // E.g. /hello
+ return itemPath.startsWith('/');
+}
+exports.hasRoot = hasRoot;
+/**
+ * Removes redundant slashes and converts `/` to `\` on Windows
+ */
+function normalizeSeparators(p) {
+ p = p || '';
+ // Windows
+ if (IS_WINDOWS) {
+ // Convert slashes on Windows
+ p = p.replace(/\//g, '\\');
+ // Remove redundant slashes
+ const isUnc = /^\\\\+[^\\]/.test(p); // e.g. \\hello
+ return (isUnc ? '\\' : '') + p.replace(/\\\\+/g, '\\'); // preserve leading \\ for UNC
+ }
+ // Remove redundant slashes
+ return p.replace(/\/\/+/g, '/');
+}
+exports.normalizeSeparators = normalizeSeparators;
+/**
+ * Normalizes the path separators and trims the trailing separator (when safe).
+ * For example, `/foo/ => /foo` but `/ => /`
+ */
+function safeTrimTrailingSeparator(p) {
+ // Short-circuit if empty
+ if (!p) {
+ return '';
+ }
+ // Normalize separators
+ p = normalizeSeparators(p);
+ // No trailing slash
+ if (!p.endsWith(path.sep)) {
+ return p;
+ }
+ // Check '/' on Linux/macOS and '\' on Windows
+ if (p === path.sep) {
+ return p;
+ }
+ // On Windows check if drive root. E.g. C:\
+ if (IS_WINDOWS && /^[A-Z]:\\$/i.test(p)) {
+ return p;
+ }
+ // Otherwise trim trailing slash
+ return p.substr(0, p.length - 1);
+}
+exports.safeTrimTrailingSeparator = safeTrimTrailingSeparator;
+//# sourceMappingURL=internal-path-helper.js.map
+
+/***/ }),
+
+/***/ 6836:
+/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
+
+"use strict";
+
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
+}) : (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+ o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+ __setModuleDefault(result, mod);
+ return result;
+};
+var __importDefault = (this && this.__importDefault) || function (mod) {
+ return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.Path = void 0;
+const path = __importStar(__nccwpck_require__(1017));
+const pathHelper = __importStar(__nccwpck_require__(1849));
+const assert_1 = __importDefault(__nccwpck_require__(9491));
+const IS_WINDOWS = process.platform === 'win32';
+/**
+ * Helper class for parsing paths into segments
+ */
+class Path {
+ /**
+ * Constructs a Path
+ * @param itemPath Path or array of segments
+ */
+ constructor(itemPath) {
+ this.segments = [];
+ // String
+ if (typeof itemPath === 'string') {
+ assert_1.default(itemPath, `Parameter 'itemPath' must not be empty`);
+ // Normalize slashes and trim unnecessary trailing slash
+ itemPath = pathHelper.safeTrimTrailingSeparator(itemPath);
+ // Not rooted
+ if (!pathHelper.hasRoot(itemPath)) {
+ this.segments = itemPath.split(path.sep);
+ }
+ // Rooted
+ else {
+ // Add all segments, while not at the root
+ let remaining = itemPath;
+ let dir = pathHelper.dirname(remaining);
+ while (dir !== remaining) {
+ // Add the segment
+ const basename = path.basename(remaining);
+ this.segments.unshift(basename);
+ // Truncate the last segment
+ remaining = dir;
+ dir = pathHelper.dirname(remaining);
+ }
+ // Remainder is the root
+ this.segments.unshift(remaining);
+ }
+ }
+ // Array
+ else {
+ // Must not be empty
+ assert_1.default(itemPath.length > 0, `Parameter 'itemPath' must not be an empty array`);
+ // Each segment
+ for (let i = 0; i < itemPath.length; i++) {
+ let segment = itemPath[i];
+ // Must not be empty
+ assert_1.default(segment, `Parameter 'itemPath' must not contain any empty segments`);
+ // Normalize slashes
+ segment = pathHelper.normalizeSeparators(itemPath[i]);
+ // Root segment
+ if (i === 0 && pathHelper.hasRoot(segment)) {
+ segment = pathHelper.safeTrimTrailingSeparator(segment);
+ assert_1.default(segment === pathHelper.dirname(segment), `Parameter 'itemPath' root segment contains information for multiple segments`);
+ this.segments.push(segment);
+ }
+ // All other segments
+ else {
+ // Must not contain slash
+ assert_1.default(!segment.includes(path.sep), `Parameter 'itemPath' contains unexpected path separators`);
+ this.segments.push(segment);
+ }
+ }
+ }
+ }
+ /**
+ * Converts the path to it's string representation
+ */
+ toString() {
+ // First segment
+ let result = this.segments[0];
+ // All others
+ let skipSlash = result.endsWith(path.sep) || (IS_WINDOWS && /^[A-Z]:$/i.test(result));
+ for (let i = 1; i < this.segments.length; i++) {
+ if (skipSlash) {
+ skipSlash = false;
+ }
+ else {
+ result += path.sep;
+ }
+ result += this.segments[i];
+ }
+ return result;
+ }
+}
+exports.Path = Path;
+//# sourceMappingURL=internal-path.js.map
+
+/***/ }),
+
+/***/ 9005:
+/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
+
+"use strict";
+
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
+}) : (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+ o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+ __setModuleDefault(result, mod);
+ return result;
+};
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.partialMatch = exports.match = exports.getSearchPaths = void 0;
+const pathHelper = __importStar(__nccwpck_require__(1849));
+const internal_match_kind_1 = __nccwpck_require__(1063);
+const IS_WINDOWS = process.platform === 'win32';
+/**
+ * Given an array of patterns, returns an array of paths to search.
+ * Duplicates and paths under other included paths are filtered out.
+ */
+function getSearchPaths(patterns) {
+ // Ignore negate patterns
+ patterns = patterns.filter(x => !x.negate);
+ // Create a map of all search paths
+ const searchPathMap = {};
+ for (const pattern of patterns) {
+ const key = IS_WINDOWS
+ ? pattern.searchPath.toUpperCase()
+ : pattern.searchPath;
+ searchPathMap[key] = 'candidate';
+ }
+ const result = [];
+ for (const pattern of patterns) {
+ // Check if already included
+ const key = IS_WINDOWS
+ ? pattern.searchPath.toUpperCase()
+ : pattern.searchPath;
+ if (searchPathMap[key] === 'included') {
+ continue;
+ }
+ // Check for an ancestor search path
+ let foundAncestor = false;
+ let tempKey = key;
+ let parent = pathHelper.dirname(tempKey);
+ while (parent !== tempKey) {
+ if (searchPathMap[parent]) {
+ foundAncestor = true;
+ break;
+ }
+ tempKey = parent;
+ parent = pathHelper.dirname(tempKey);
+ }
+ // Include the search pattern in the result
+ if (!foundAncestor) {
+ result.push(pattern.searchPath);
+ searchPathMap[key] = 'included';
+ }
+ }
+ return result;
+}
+exports.getSearchPaths = getSearchPaths;
+/**
+ * Matches the patterns against the path
+ */
+function match(patterns, itemPath) {
+ let result = internal_match_kind_1.MatchKind.None;
+ for (const pattern of patterns) {
+ if (pattern.negate) {
+ result &= ~pattern.match(itemPath);
+ }
+ else {
+ result |= pattern.match(itemPath);
+ }
+ }
+ return result;
+}
+exports.match = match;
+/**
+ * Checks whether to descend further into the directory
+ */
+function partialMatch(patterns, itemPath) {
+ return patterns.some(x => !x.negate && x.partialMatch(itemPath));
+}
+exports.partialMatch = partialMatch;
+//# sourceMappingURL=internal-pattern-helper.js.map
+
+/***/ }),
+
+/***/ 4536:
+/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
+
+"use strict";
+
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
+}) : (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+ o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+ __setModuleDefault(result, mod);
+ return result;
+};
+var __importDefault = (this && this.__importDefault) || function (mod) {
+ return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.Pattern = void 0;
+const os = __importStar(__nccwpck_require__(2037));
+const path = __importStar(__nccwpck_require__(1017));
+const pathHelper = __importStar(__nccwpck_require__(1849));
+const assert_1 = __importDefault(__nccwpck_require__(9491));
+const minimatch_1 = __nccwpck_require__(2680);
+const internal_match_kind_1 = __nccwpck_require__(1063);
+const internal_path_1 = __nccwpck_require__(6836);
+const IS_WINDOWS = process.platform === 'win32';
+class Pattern {
+ constructor(patternOrNegate, isImplicitPattern = false, segments, homedir) {
+ /**
+ * Indicates whether matches should be excluded from the result set
+ */
+ this.negate = false;
+ // Pattern overload
+ let pattern;
+ if (typeof patternOrNegate === 'string') {
+ pattern = patternOrNegate.trim();
+ }
+ // Segments overload
+ else {
+ // Convert to pattern
+ segments = segments || [];
+ assert_1.default(segments.length, `Parameter 'segments' must not empty`);
+ const root = Pattern.getLiteral(segments[0]);
+ assert_1.default(root && pathHelper.hasAbsoluteRoot(root), `Parameter 'segments' first element must be a root path`);
+ pattern = new internal_path_1.Path(segments).toString().trim();
+ if (patternOrNegate) {
+ pattern = `!${pattern}`;
+ }
+ }
+ // Negate
+ while (pattern.startsWith('!')) {
+ this.negate = !this.negate;
+ pattern = pattern.substr(1).trim();
+ }
+ // Normalize slashes and ensures absolute root
+ pattern = Pattern.fixupPattern(pattern, homedir);
+ // Segments
+ this.segments = new internal_path_1.Path(pattern).segments;
+ // Trailing slash indicates the pattern should only match directories, not regular files
+ this.trailingSeparator = pathHelper
+ .normalizeSeparators(pattern)
+ .endsWith(path.sep);
+ pattern = pathHelper.safeTrimTrailingSeparator(pattern);
+ // Search path (literal path prior to the first glob segment)
+ let foundGlob = false;
+ const searchSegments = this.segments
+ .map(x => Pattern.getLiteral(x))
+ .filter(x => !foundGlob && !(foundGlob = x === ''));
+ this.searchPath = new internal_path_1.Path(searchSegments).toString();
+ // Root RegExp (required when determining partial match)
+ this.rootRegExp = new RegExp(Pattern.regExpEscape(searchSegments[0]), IS_WINDOWS ? 'i' : '');
+ this.isImplicitPattern = isImplicitPattern;
+ // Create minimatch
+ const minimatchOptions = {
+ dot: true,
+ nobrace: true,
+ nocase: IS_WINDOWS,
+ nocomment: true,
+ noext: true,
+ nonegate: true
+ };
+ pattern = IS_WINDOWS ? pattern.replace(/\\/g, '/') : pattern;
+ this.minimatch = new minimatch_1.Minimatch(pattern, minimatchOptions);
+ }
+ /**
+ * Matches the pattern against the specified path
+ */
+ match(itemPath) {
+ // Last segment is globstar?
+ if (this.segments[this.segments.length - 1] === '**') {
+ // Normalize slashes
+ itemPath = pathHelper.normalizeSeparators(itemPath);
+ // Append a trailing slash. Otherwise Minimatch will not match the directory immediately
+ // preceding the globstar. For example, given the pattern `/foo/**`, Minimatch returns
+ // false for `/foo` but returns true for `/foo/`. Append a trailing slash to handle that quirk.
+ if (!itemPath.endsWith(path.sep) && this.isImplicitPattern === false) {
+ // Note, this is safe because the constructor ensures the pattern has an absolute root.
+ // For example, formats like C: and C:foo on Windows are resolved to an absolute root.
+ itemPath = `${itemPath}${path.sep}`;
+ }
+ }
+ else {
+ // Normalize slashes and trim unnecessary trailing slash
+ itemPath = pathHelper.safeTrimTrailingSeparator(itemPath);
+ }
+ // Match
+ if (this.minimatch.match(itemPath)) {
+ return this.trailingSeparator ? internal_match_kind_1.MatchKind.Directory : internal_match_kind_1.MatchKind.All;
+ }
+ return internal_match_kind_1.MatchKind.None;
+ }
+ /**
+ * Indicates whether the pattern may match descendants of the specified path
+ */
+ partialMatch(itemPath) {
+ // Normalize slashes and trim unnecessary trailing slash
+ itemPath = pathHelper.safeTrimTrailingSeparator(itemPath);
+ // matchOne does not handle root path correctly
+ if (pathHelper.dirname(itemPath) === itemPath) {
+ return this.rootRegExp.test(itemPath);
+ }
+ return this.minimatch.matchOne(itemPath.split(IS_WINDOWS ? /\\+/ : /\/+/), this.minimatch.set[0], true);
+ }
+ /**
+ * Escapes glob patterns within a path
+ */
+ static globEscape(s) {
+ return (IS_WINDOWS ? s : s.replace(/\\/g, '\\\\')) // escape '\' on Linux/macOS
+ .replace(/(\[)(?=[^/]+\])/g, '[[]') // escape '[' when ']' follows within the path segment
+ .replace(/\?/g, '[?]') // escape '?'
+ .replace(/\*/g, '[*]'); // escape '*'
+ }
+ /**
+ * Normalizes slashes and ensures absolute root
+ */
+ static fixupPattern(pattern, homedir) {
+ // Empty
+ assert_1.default(pattern, 'pattern cannot be empty');
+ // Must not contain `.` segment, unless first segment
+ // Must not contain `..` segment
+ const literalSegments = new internal_path_1.Path(pattern).segments.map(x => Pattern.getLiteral(x));
+ assert_1.default(literalSegments.every((x, i) => (x !== '.' || i === 0) && x !== '..'), `Invalid pattern '${pattern}'. Relative pathing '.' and '..' is not allowed.`);
+ // Must not contain globs in root, e.g. Windows UNC path \\foo\b*r
+ assert_1.default(!pathHelper.hasRoot(pattern) || literalSegments[0], `Invalid pattern '${pattern}'. Root segment must not contain globs.`);
+ // Normalize slashes
+ pattern = pathHelper.normalizeSeparators(pattern);
+ // Replace leading `.` segment
+ if (pattern === '.' || pattern.startsWith(`.${path.sep}`)) {
+ pattern = Pattern.globEscape(process.cwd()) + pattern.substr(1);
+ }
+ // Replace leading `~` segment
+ else if (pattern === '~' || pattern.startsWith(`~${path.sep}`)) {
+ homedir = homedir || os.homedir();
+ assert_1.default(homedir, 'Unable to determine HOME directory');
+ assert_1.default(pathHelper.hasAbsoluteRoot(homedir), `Expected HOME directory to be a rooted path. Actual '${homedir}'`);
+ pattern = Pattern.globEscape(homedir) + pattern.substr(1);
+ }
+ // Replace relative drive root, e.g. pattern is C: or C:foo
+ else if (IS_WINDOWS &&
+ (pattern.match(/^[A-Z]:$/i) || pattern.match(/^[A-Z]:[^\\]/i))) {
+ let root = pathHelper.ensureAbsoluteRoot('C:\\dummy-root', pattern.substr(0, 2));
+ if (pattern.length > 2 && !root.endsWith('\\')) {
+ root += '\\';
+ }
+ pattern = Pattern.globEscape(root) + pattern.substr(2);
+ }
+ // Replace relative root, e.g. pattern is \ or \foo
+ else if (IS_WINDOWS && (pattern === '\\' || pattern.match(/^\\[^\\]/))) {
+ let root = pathHelper.ensureAbsoluteRoot('C:\\dummy-root', '\\');
+ if (!root.endsWith('\\')) {
+ root += '\\';
+ }
+ pattern = Pattern.globEscape(root) + pattern.substr(1);
+ }
+ // Otherwise ensure absolute root
+ else {
+ pattern = pathHelper.ensureAbsoluteRoot(Pattern.globEscape(process.cwd()), pattern);
+ }
+ return pathHelper.normalizeSeparators(pattern);
+ }
+ /**
+ * Attempts to unescape a pattern segment to create a literal path segment.
+ * Otherwise returns empty string.
+ */
+ static getLiteral(segment) {
+ let literal = '';
+ for (let i = 0; i < segment.length; i++) {
+ const c = segment[i];
+ // Escape
+ if (c === '\\' && !IS_WINDOWS && i + 1 < segment.length) {
+ literal += segment[++i];
+ continue;
+ }
+ // Wildcard
+ else if (c === '*' || c === '?') {
+ return '';
+ }
+ // Character set
+ else if (c === '[' && i + 1 < segment.length) {
+ let set = '';
+ let closed = -1;
+ for (let i2 = i + 1; i2 < segment.length; i2++) {
+ const c2 = segment[i2];
+ // Escape
+ if (c2 === '\\' && !IS_WINDOWS && i2 + 1 < segment.length) {
+ set += segment[++i2];
+ continue;
+ }
+ // Closed
+ else if (c2 === ']') {
+ closed = i2;
+ break;
+ }
+ // Otherwise
+ else {
+ set += c2;
+ }
+ }
+ // Closed?
+ if (closed >= 0) {
+ // Cannot convert
+ if (set.length > 1) {
+ return '';
+ }
+ // Convert to literal
+ if (set) {
+ literal += set;
+ i = closed;
+ continue;
+ }
+ }
+ // Otherwise fall thru
+ }
+ // Append
+ literal += c;
+ }
+ return literal;
+ }
+ /**
+ * Escapes regexp special characters
+ * https://javascript.info/regexp-escaping
+ */
+ static regExpEscape(s) {
+ return s.replace(/[[\\^$.|?*+()]/g, '\\$&');
+ }
+}
+exports.Pattern = Pattern;
+//# sourceMappingURL=internal-pattern.js.map
+
+/***/ }),
+
+/***/ 9117:
+/***/ ((__unused_webpack_module, exports) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.SearchState = void 0;
+class SearchState {
+ constructor(path, level) {
+ this.path = path;
+ this.level = level;
+ }
+}
+exports.SearchState = SearchState;
+//# sourceMappingURL=internal-search-state.js.map
+
+/***/ }),
+
+/***/ 9144:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+var concatMap = __nccwpck_require__(6891);
+var balanced = __nccwpck_require__(9417);
+
+module.exports = expandTop;
+
+var escSlash = '\0SLASH'+Math.random()+'\0';
+var escOpen = '\0OPEN'+Math.random()+'\0';
+var escClose = '\0CLOSE'+Math.random()+'\0';
+var escComma = '\0COMMA'+Math.random()+'\0';
+var escPeriod = '\0PERIOD'+Math.random()+'\0';
+
+function numeric(str) {
+ return parseInt(str, 10) == str
+ ? parseInt(str, 10)
+ : str.charCodeAt(0);
+}
+
+function escapeBraces(str) {
+ return str.split('\\\\').join(escSlash)
+ .split('\\{').join(escOpen)
+ .split('\\}').join(escClose)
+ .split('\\,').join(escComma)
+ .split('\\.').join(escPeriod);
+}
+
+function unescapeBraces(str) {
+ return str.split(escSlash).join('\\')
+ .split(escOpen).join('{')
+ .split(escClose).join('}')
+ .split(escComma).join(',')
+ .split(escPeriod).join('.');
+}
+
+
+// Basically just str.split(","), but handling cases
+// where we have nested braced sections, which should be
+// treated as individual members, like {a,{b,c},d}
+function parseCommaParts(str) {
+ if (!str)
+ return [''];
+
+ var parts = [];
+ var m = balanced('{', '}', str);
+
+ if (!m)
+ return str.split(',');
+
+ var pre = m.pre;
+ var body = m.body;
+ var post = m.post;
+ var p = pre.split(',');
+
+ p[p.length-1] += '{' + body + '}';
+ var postParts = parseCommaParts(post);
+ if (post.length) {
+ p[p.length-1] += postParts.shift();
+ p.push.apply(p, postParts);
+ }
+
+ parts.push.apply(parts, p);
+
+ return parts;
+}
+
+function expandTop(str) {
+ if (!str)
+ return [];
+
+ // I don't know why Bash 4.3 does this, but it does.
+ // Anything starting with {} will have the first two bytes preserved
+ // but *only* at the top level, so {},a}b will not expand to anything,
+ // but a{},b}c will be expanded to [a}c,abc].
+ // One could argue that this is a bug in Bash, but since the goal of
+ // this module is to match Bash's rules, we escape a leading {}
+ if (str.substr(0, 2) === '{}') {
+ str = '\\{\\}' + str.substr(2);
+ }
+
+ return expand(escapeBraces(str), true).map(unescapeBraces);
+}
+
+function identity(e) {
+ return e;
+}
+
+function embrace(str) {
+ return '{' + str + '}';
+}
+function isPadded(el) {
+ return /^-?0\d/.test(el);
+}
+
+function lte(i, y) {
+ return i <= y;
+}
+function gte(i, y) {
+ return i >= y;
+}
+
+function expand(str, isTop) {
+ var expansions = [];
+
+ var m = balanced('{', '}', str);
+ if (!m || /\$$/.test(m.pre)) return [str];
+
+ var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body);
+ var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body);
+ var isSequence = isNumericSequence || isAlphaSequence;
+ var isOptions = m.body.indexOf(',') >= 0;
+ if (!isSequence && !isOptions) {
+ // {a},b}
+ if (m.post.match(/,.*\}/)) {
+ str = m.pre + '{' + m.body + escClose + m.post;
+ return expand(str);
+ }
+ return [str];
+ }
+
+ var n;
+ if (isSequence) {
+ n = m.body.split(/\.\./);
+ } else {
+ n = parseCommaParts(m.body);
+ if (n.length === 1) {
+ // x{{a,b}}y ==> x{a}y x{b}y
+ n = expand(n[0], false).map(embrace);
+ if (n.length === 1) {
+ var post = m.post.length
+ ? expand(m.post, false)
+ : [''];
+ return post.map(function(p) {
+ return m.pre + n[0] + p;
+ });
+ }
+ }
+ }
+
+ // at this point, n is the parts, and we know it's not a comma set
+ // with a single entry.
+
+ // no need to expand pre, since it is guaranteed to be free of brace-sets
+ var pre = m.pre;
+ var post = m.post.length
+ ? expand(m.post, false)
+ : [''];
+
+ var N;
+
+ if (isSequence) {
+ var x = numeric(n[0]);
+ var y = numeric(n[1]);
+ var width = Math.max(n[0].length, n[1].length)
+ var incr = n.length == 3
+ ? Math.abs(numeric(n[2]))
+ : 1;
+ var test = lte;
+ var reverse = y < x;
+ if (reverse) {
+ incr *= -1;
+ test = gte;
+ }
+ var pad = n.some(isPadded);
+
+ N = [];
+
+ for (var i = x; test(i, y); i += incr) {
+ var c;
+ if (isAlphaSequence) {
+ c = String.fromCharCode(i);
+ if (c === '\\')
+ c = '';
+ } else {
+ c = String(i);
+ if (pad) {
+ var need = width - c.length;
+ if (need > 0) {
+ var z = new Array(need + 1).join('0');
+ if (i < 0)
+ c = '-' + z + c.slice(1);
+ else
+ c = z + c;
+ }
+ }
+ }
+ N.push(c);
+ }
+ } else {
+ N = concatMap(n, function(el) { return expand(el, false) });
+ }
+
+ for (var j = 0; j < N.length; j++) {
+ for (var k = 0; k < post.length; k++) {
+ var expansion = pre + N[j] + post[k];
+ if (!isTop || isSequence || expansion)
+ expansions.push(expansion);
+ }
+ }
+
+ return expansions;
+}
+
+
+
+/***/ }),
+
+/***/ 2680:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+module.exports = minimatch
+minimatch.Minimatch = Minimatch
+
+var path = (function () { try { return __nccwpck_require__(1017) } catch (e) {}}()) || {
+ sep: '/'
+}
+minimatch.sep = path.sep
+
+var GLOBSTAR = minimatch.GLOBSTAR = Minimatch.GLOBSTAR = {}
+var expand = __nccwpck_require__(9144)
+
+var plTypes = {
+ '!': { open: '(?:(?!(?:', close: '))[^/]*?)'},
+ '?': { open: '(?:', close: ')?' },
+ '+': { open: '(?:', close: ')+' },
+ '*': { open: '(?:', close: ')*' },
+ '@': { open: '(?:', close: ')' }
+}
+
+// any single thing other than /
+// don't need to escape / when using new RegExp()
+var qmark = '[^/]'
+
+// * => any number of characters
+var star = qmark + '*?'
+
+// ** when dots are allowed. Anything goes, except .. and .
+// not (^ or / followed by one or two dots followed by $ or /),
+// followed by anything, any number of times.
+var twoStarDot = '(?:(?!(?:\\\/|^)(?:\\.{1,2})($|\\\/)).)*?'
+
+// not a ^ or / followed by a dot,
+// followed by anything, any number of times.
+var twoStarNoDot = '(?:(?!(?:\\\/|^)\\.).)*?'
+
+// characters that need to be escaped in RegExp.
+var reSpecials = charSet('().*{}+?[]^$\\!')
+
+// "abc" -> { a:true, b:true, c:true }
+function charSet (s) {
+ return s.split('').reduce(function (set, c) {
+ set[c] = true
+ return set
+ }, {})
+}
+
+// normalizes slashes.
+var slashSplit = /\/+/
+
+minimatch.filter = filter
+function filter (pattern, options) {
+ options = options || {}
+ return function (p, i, list) {
+ return minimatch(p, pattern, options)
+ }
+}
+
+function ext (a, b) {
+ b = b || {}
+ var t = {}
+ Object.keys(a).forEach(function (k) {
+ t[k] = a[k]
+ })
+ Object.keys(b).forEach(function (k) {
+ t[k] = b[k]
+ })
+ return t
+}
+
+minimatch.defaults = function (def) {
+ if (!def || typeof def !== 'object' || !Object.keys(def).length) {
+ return minimatch
+ }
+
+ var orig = minimatch
+
+ var m = function minimatch (p, pattern, options) {
+ return orig(p, pattern, ext(def, options))
+ }
+
+ m.Minimatch = function Minimatch (pattern, options) {
+ return new orig.Minimatch(pattern, ext(def, options))
+ }
+ m.Minimatch.defaults = function defaults (options) {
+ return orig.defaults(ext(def, options)).Minimatch
+ }
+
+ m.filter = function filter (pattern, options) {
+ return orig.filter(pattern, ext(def, options))
+ }
+
+ m.defaults = function defaults (options) {
+ return orig.defaults(ext(def, options))
+ }
+
+ m.makeRe = function makeRe (pattern, options) {
+ return orig.makeRe(pattern, ext(def, options))
+ }
+
+ m.braceExpand = function braceExpand (pattern, options) {
+ return orig.braceExpand(pattern, ext(def, options))
+ }
+
+ m.match = function (list, pattern, options) {
+ return orig.match(list, pattern, ext(def, options))
+ }
+
+ return m
+}
+
+Minimatch.defaults = function (def) {
+ return minimatch.defaults(def).Minimatch
+}
+
+function minimatch (p, pattern, options) {
+ assertValidPattern(pattern)
+
+ if (!options) options = {}
+
+ // shortcut: comments match nothing.
+ if (!options.nocomment && pattern.charAt(0) === '#') {
+ return false
+ }
+
+ return new Minimatch(pattern, options).match(p)
+}
+
+function Minimatch (pattern, options) {
+ if (!(this instanceof Minimatch)) {
+ return new Minimatch(pattern, options)
+ }
+
+ assertValidPattern(pattern)
+
+ if (!options) options = {}
+
+ pattern = pattern.trim()
+
+ // windows support: need to use /, not \
+ if (!options.allowWindowsEscape && path.sep !== '/') {
+ pattern = pattern.split(path.sep).join('/')
+ }
+
+ this.options = options
+ this.set = []
+ this.pattern = pattern
+ this.regexp = null
+ this.negate = false
+ this.comment = false
+ this.empty = false
+ this.partial = !!options.partial
+
+ // make the set of regexps etc.
+ this.make()
+}
+
+Minimatch.prototype.debug = function () {}
+
+Minimatch.prototype.make = make
+function make () {
+ var pattern = this.pattern
+ var options = this.options
+
+ // empty patterns and comments match nothing.
+ if (!options.nocomment && pattern.charAt(0) === '#') {
+ this.comment = true
+ return
+ }
+ if (!pattern) {
+ this.empty = true
+ return
+ }
+
+ // step 1: figure out negation, etc.
+ this.parseNegate()
+
+ // step 2: expand braces
+ var set = this.globSet = this.braceExpand()
+
+ if (options.debug) this.debug = function debug() { console.error.apply(console, arguments) }
+
+ this.debug(this.pattern, set)
+
+ // step 3: now we have a set, so turn each one into a series of path-portion
+ // matching patterns.
+ // These will be regexps, except in the case of "**", which is
+ // set to the GLOBSTAR object for globstar behavior,
+ // and will not contain any / characters
+ set = this.globParts = set.map(function (s) {
+ return s.split(slashSplit)
+ })
+
+ this.debug(this.pattern, set)
+
+ // glob --> regexps
+ set = set.map(function (s, si, set) {
+ return s.map(this.parse, this)
+ }, this)
+
+ this.debug(this.pattern, set)
+
+ // filter out everything that didn't compile properly.
+ set = set.filter(function (s) {
+ return s.indexOf(false) === -1
+ })
+
+ this.debug(this.pattern, set)
+
+ this.set = set
+}
+
+Minimatch.prototype.parseNegate = parseNegate
+function parseNegate () {
+ var pattern = this.pattern
+ var negate = false
+ var options = this.options
+ var negateOffset = 0
+
+ if (options.nonegate) return
+
+ for (var i = 0, l = pattern.length
+ ; i < l && pattern.charAt(i) === '!'
+ ; i++) {
+ negate = !negate
+ negateOffset++
+ }
+
+ if (negateOffset) this.pattern = pattern.substr(negateOffset)
+ this.negate = negate
+}
+
+// Brace expansion:
+// a{b,c}d -> abd acd
+// a{b,}c -> abc ac
+// a{0..3}d -> a0d a1d a2d a3d
+// a{b,c{d,e}f}g -> abg acdfg acefg
+// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg
+//
+// Invalid sets are not expanded.
+// a{2..}b -> a{2..}b
+// a{b}c -> a{b}c
+minimatch.braceExpand = function (pattern, options) {
+ return braceExpand(pattern, options)
+}
+
+Minimatch.prototype.braceExpand = braceExpand
+
+function braceExpand (pattern, options) {
+ if (!options) {
+ if (this instanceof Minimatch) {
+ options = this.options
+ } else {
+ options = {}
+ }
+ }
+
+ pattern = typeof pattern === 'undefined'
+ ? this.pattern : pattern
+
+ assertValidPattern(pattern)
+
+ // Thanks to Yeting Li for
+ // improving this regexp to avoid a ReDOS vulnerability.
+ if (options.nobrace || !/\{(?:(?!\{).)*\}/.test(pattern)) {
+ // shortcut. no need to expand.
+ return [pattern]
+ }
+
+ return expand(pattern)
+}
+
+var MAX_PATTERN_LENGTH = 1024 * 64
+var assertValidPattern = function (pattern) {
+ if (typeof pattern !== 'string') {
+ throw new TypeError('invalid pattern')
+ }
+
+ if (pattern.length > MAX_PATTERN_LENGTH) {
+ throw new TypeError('pattern is too long')
+ }
+}
+
+// parse a component of the expanded set.
+// At this point, no pattern may contain "/" in it
+// so we're going to return a 2d array, where each entry is the full
+// pattern, split on '/', and then turned into a regular expression.
+// A regexp is made at the end which joins each array with an
+// escaped /, and another full one which joins each regexp with |.
+//
+// Following the lead of Bash 4.1, note that "**" only has special meaning
+// when it is the *only* thing in a path portion. Otherwise, any series
+// of * is equivalent to a single *. Globstar behavior is enabled by
+// default, and can be disabled by setting options.noglobstar.
+Minimatch.prototype.parse = parse
+var SUBPARSE = {}
+function parse (pattern, isSub) {
+ assertValidPattern(pattern)
+
+ var options = this.options
+
+ // shortcuts
+ if (pattern === '**') {
+ if (!options.noglobstar)
+ return GLOBSTAR
+ else
+ pattern = '*'
+ }
+ if (pattern === '') return ''
+
+ var re = ''
+ var hasMagic = !!options.nocase
+ var escaping = false
+ // ? => one single character
+ var patternListStack = []
+ var negativeLists = []
+ var stateChar
+ var inClass = false
+ var reClassStart = -1
+ var classStart = -1
+ // . and .. never match anything that doesn't start with .,
+ // even when options.dot is set.
+ var patternStart = pattern.charAt(0) === '.' ? '' // anything
+ // not (start or / followed by . or .. followed by / or end)
+ : options.dot ? '(?!(?:^|\\\/)\\.{1,2}(?:$|\\\/))'
+ : '(?!\\.)'
+ var self = this
+
+ function clearStateChar () {
+ if (stateChar) {
+ // we had some state-tracking character
+ // that wasn't consumed by this pass.
+ switch (stateChar) {
+ case '*':
+ re += star
+ hasMagic = true
+ break
+ case '?':
+ re += qmark
+ hasMagic = true
+ break
+ default:
+ re += '\\' + stateChar
+ break
+ }
+ self.debug('clearStateChar %j %j', stateChar, re)
+ stateChar = false
+ }
+ }
+
+ for (var i = 0, len = pattern.length, c
+ ; (i < len) && (c = pattern.charAt(i))
+ ; i++) {
+ this.debug('%s\t%s %s %j', pattern, i, re, c)
+
+ // skip over any that are escaped.
+ if (escaping && reSpecials[c]) {
+ re += '\\' + c
+ escaping = false
+ continue
+ }
+
+ switch (c) {
+ /* istanbul ignore next */
+ case '/': {
+ // completely not allowed, even escaped.
+ // Should already be path-split by now.
+ return false
+ }
+
+ case '\\':
+ clearStateChar()
+ escaping = true
+ continue
+
+ // the various stateChar values
+ // for the "extglob" stuff.
+ case '?':
+ case '*':
+ case '+':
+ case '@':
+ case '!':
+ this.debug('%s\t%s %s %j <-- stateChar', pattern, i, re, c)
+
+ // all of those are literals inside a class, except that
+ // the glob [!a] means [^a] in regexp
+ if (inClass) {
+ this.debug(' in class')
+ if (c === '!' && i === classStart + 1) c = '^'
+ re += c
+ continue
+ }
+
+ // if we already have a stateChar, then it means
+ // that there was something like ** or +? in there.
+ // Handle the stateChar, then proceed with this one.
+ self.debug('call clearStateChar %j', stateChar)
+ clearStateChar()
+ stateChar = c
+ // if extglob is disabled, then +(asdf|foo) isn't a thing.
+ // just clear the statechar *now*, rather than even diving into
+ // the patternList stuff.
+ if (options.noext) clearStateChar()
+ continue
+
+ case '(':
+ if (inClass) {
+ re += '('
+ continue
+ }
+
+ if (!stateChar) {
+ re += '\\('
+ continue
+ }
+
+ patternListStack.push({
+ type: stateChar,
+ start: i - 1,
+ reStart: re.length,
+ open: plTypes[stateChar].open,
+ close: plTypes[stateChar].close
+ })
+ // negation is (?:(?!js)[^/]*)
+ re += stateChar === '!' ? '(?:(?!(?:' : '(?:'
+ this.debug('plType %j %j', stateChar, re)
+ stateChar = false
+ continue
+
+ case ')':
+ if (inClass || !patternListStack.length) {
+ re += '\\)'
+ continue
+ }
+
+ clearStateChar()
+ hasMagic = true
+ var pl = patternListStack.pop()
+ // negation is (?:(?!js)[^/]*)
+ // The others are (?:)
+ re += pl.close
+ if (pl.type === '!') {
+ negativeLists.push(pl)
+ }
+ pl.reEnd = re.length
+ continue
+
+ case '|':
+ if (inClass || !patternListStack.length || escaping) {
+ re += '\\|'
+ escaping = false
+ continue
+ }
+
+ clearStateChar()
+ re += '|'
+ continue
+
+ // these are mostly the same in regexp and glob
+ case '[':
+ // swallow any state-tracking char before the [
+ clearStateChar()
+
+ if (inClass) {
+ re += '\\' + c
+ continue
+ }
+
+ inClass = true
+ classStart = i
+ reClassStart = re.length
+ re += c
+ continue
+
+ case ']':
+ // a right bracket shall lose its special
+ // meaning and represent itself in
+ // a bracket expression if it occurs
+ // first in the list. -- POSIX.2 2.8.3.2
+ if (i === classStart + 1 || !inClass) {
+ re += '\\' + c
+ escaping = false
+ continue
+ }
+
+ // handle the case where we left a class open.
+ // "[z-a]" is valid, equivalent to "\[z-a\]"
+ // split where the last [ was, make sure we don't have
+ // an invalid re. if so, re-walk the contents of the
+ // would-be class to re-translate any characters that
+ // were passed through as-is
+ // TODO: It would probably be faster to determine this
+ // without a try/catch and a new RegExp, but it's tricky
+ // to do safely. For now, this is safe and works.
+ var cs = pattern.substring(classStart + 1, i)
+ try {
+ RegExp('[' + cs + ']')
+ } catch (er) {
+ // not a valid class!
+ var sp = this.parse(cs, SUBPARSE)
+ re = re.substr(0, reClassStart) + '\\[' + sp[0] + '\\]'
+ hasMagic = hasMagic || sp[1]
+ inClass = false
+ continue
+ }
+
+ // finish up the class.
+ hasMagic = true
+ inClass = false
+ re += c
+ continue
+
+ default:
+ // swallow any state char that wasn't consumed
+ clearStateChar()
+
+ if (escaping) {
+ // no need
+ escaping = false
+ } else if (reSpecials[c]
+ && !(c === '^' && inClass)) {
+ re += '\\'
+ }
+
+ re += c
+
+ } // switch
+ } // for
+
+ // handle the case where we left a class open.
+ // "[abc" is valid, equivalent to "\[abc"
+ if (inClass) {
+ // split where the last [ was, and escape it
+ // this is a huge pita. We now have to re-walk
+ // the contents of the would-be class to re-translate
+ // any characters that were passed through as-is
+ cs = pattern.substr(classStart + 1)
+ sp = this.parse(cs, SUBPARSE)
+ re = re.substr(0, reClassStart) + '\\[' + sp[0]
+ hasMagic = hasMagic || sp[1]
+ }
+
+ // handle the case where we had a +( thing at the *end*
+ // of the pattern.
+ // each pattern list stack adds 3 chars, and we need to go through
+ // and escape any | chars that were passed through as-is for the regexp.
+ // Go through and escape them, taking care not to double-escape any
+ // | chars that were already escaped.
+ for (pl = patternListStack.pop(); pl; pl = patternListStack.pop()) {
+ var tail = re.slice(pl.reStart + pl.open.length)
+ this.debug('setting tail', re, pl)
+ // maybe some even number of \, then maybe 1 \, followed by a |
+ tail = tail.replace(/((?:\\{2}){0,64})(\\?)\|/g, function (_, $1, $2) {
+ if (!$2) {
+ // the | isn't already escaped, so escape it.
+ $2 = '\\'
+ }
+
+ // need to escape all those slashes *again*, without escaping the
+ // one that we need for escaping the | character. As it works out,
+ // escaping an even number of slashes can be done by simply repeating
+ // it exactly after itself. That's why this trick works.
+ //
+ // I am sorry that you have to see this.
+ return $1 + $1 + $2 + '|'
+ })
+
+ this.debug('tail=%j\n %s', tail, tail, pl, re)
+ var t = pl.type === '*' ? star
+ : pl.type === '?' ? qmark
+ : '\\' + pl.type
+
+ hasMagic = true
+ re = re.slice(0, pl.reStart) + t + '\\(' + tail
+ }
+
+ // handle trailing things that only matter at the very end.
+ clearStateChar()
+ if (escaping) {
+ // trailing \\
+ re += '\\\\'
+ }
+
+ // only need to apply the nodot start if the re starts with
+ // something that could conceivably capture a dot
+ var addPatternStart = false
+ switch (re.charAt(0)) {
+ case '[': case '.': case '(': addPatternStart = true
+ }
+
+ // Hack to work around lack of negative lookbehind in JS
+ // A pattern like: *.!(x).!(y|z) needs to ensure that a name
+ // like 'a.xyz.yz' doesn't match. So, the first negative
+ // lookahead, has to look ALL the way ahead, to the end of
+ // the pattern.
+ for (var n = negativeLists.length - 1; n > -1; n--) {
+ var nl = negativeLists[n]
+
+ var nlBefore = re.slice(0, nl.reStart)
+ var nlFirst = re.slice(nl.reStart, nl.reEnd - 8)
+ var nlLast = re.slice(nl.reEnd - 8, nl.reEnd)
+ var nlAfter = re.slice(nl.reEnd)
+
+ nlLast += nlAfter
+
+ // Handle nested stuff like *(*.js|!(*.json)), where open parens
+ // mean that we should *not* include the ) in the bit that is considered
+ // "after" the negated section.
+ var openParensBefore = nlBefore.split('(').length - 1
+ var cleanAfter = nlAfter
+ for (i = 0; i < openParensBefore; i++) {
+ cleanAfter = cleanAfter.replace(/\)[+*?]?/, '')
+ }
+ nlAfter = cleanAfter
+
+ var dollar = ''
+ if (nlAfter === '' && isSub !== SUBPARSE) {
+ dollar = '$'
+ }
+ var newRe = nlBefore + nlFirst + nlAfter + dollar + nlLast
+ re = newRe
+ }
+
+ // if the re is not "" at this point, then we need to make sure
+ // it doesn't match against an empty path part.
+ // Otherwise a/* will match a/, which it should not.
+ if (re !== '' && hasMagic) {
+ re = '(?=.)' + re
+ }
+
+ if (addPatternStart) {
+ re = patternStart + re
+ }
+
+ // parsing just a piece of a larger pattern.
+ if (isSub === SUBPARSE) {
+ return [re, hasMagic]
+ }
+
+ // skip the regexp for non-magical patterns
+ // unescape anything in it, though, so that it'll be
+ // an exact match against a file etc.
+ if (!hasMagic) {
+ return globUnescape(pattern)
+ }
+
+ var flags = options.nocase ? 'i' : ''
+ try {
+ var regExp = new RegExp('^' + re + '$', flags)
+ } catch (er) /* istanbul ignore next - should be impossible */ {
+ // If it was an invalid regular expression, then it can't match
+ // anything. This trick looks for a character after the end of
+ // the string, which is of course impossible, except in multi-line
+ // mode, but it's not a /m regex.
+ return new RegExp('$.')
+ }
+
+ regExp._glob = pattern
+ regExp._src = re
+
+ return regExp
+}
+
+minimatch.makeRe = function (pattern, options) {
+ return new Minimatch(pattern, options || {}).makeRe()
+}
+
+Minimatch.prototype.makeRe = makeRe
+function makeRe () {
+ if (this.regexp || this.regexp === false) return this.regexp
+
+ // at this point, this.set is a 2d array of partial
+ // pattern strings, or "**".
+ //
+ // It's better to use .match(). This function shouldn't
+ // be used, really, but it's pretty convenient sometimes,
+ // when you just want to work with a regex.
+ var set = this.set
+
+ if (!set.length) {
+ this.regexp = false
+ return this.regexp
+ }
+ var options = this.options
+
+ var twoStar = options.noglobstar ? star
+ : options.dot ? twoStarDot
+ : twoStarNoDot
+ var flags = options.nocase ? 'i' : ''
+
+ var re = set.map(function (pattern) {
+ return pattern.map(function (p) {
+ return (p === GLOBSTAR) ? twoStar
+ : (typeof p === 'string') ? regExpEscape(p)
+ : p._src
+ }).join('\\\/')
+ }).join('|')
+
+ // must match entire pattern
+ // ending in a * or ** will make it less strict.
+ re = '^(?:' + re + ')$'
+
+ // can match anything, as long as it's not this.
+ if (this.negate) re = '^(?!' + re + ').*$'
+
+ try {
+ this.regexp = new RegExp(re, flags)
+ } catch (ex) /* istanbul ignore next - should be impossible */ {
+ this.regexp = false
+ }
+ return this.regexp
+}
+
+minimatch.match = function (list, pattern, options) {
+ options = options || {}
+ var mm = new Minimatch(pattern, options)
+ list = list.filter(function (f) {
+ return mm.match(f)
+ })
+ if (mm.options.nonull && !list.length) {
+ list.push(pattern)
+ }
+ return list
+}
+
+Minimatch.prototype.match = function match (f, partial) {
+ if (typeof partial === 'undefined') partial = this.partial
+ this.debug('match', f, this.pattern)
+ // short-circuit in the case of busted things.
+ // comments, etc.
+ if (this.comment) return false
+ if (this.empty) return f === ''
+
+ if (f === '/' && partial) return true
+
+ var options = this.options
+
+ // windows: need to use /, not \
+ if (path.sep !== '/') {
+ f = f.split(path.sep).join('/')
+ }
+
+ // treat the test path as a set of pathparts.
+ f = f.split(slashSplit)
+ this.debug(this.pattern, 'split', f)
+
+ // just ONE of the pattern sets in this.set needs to match
+ // in order for it to be valid. If negating, then just one
+ // match means that we have failed.
+ // Either way, return on the first hit.
+
+ var set = this.set
+ this.debug(this.pattern, 'set', set)
+
+ // Find the basename of the path by looking for the last non-empty segment
+ var filename
+ var i
+ for (i = f.length - 1; i >= 0; i--) {
+ filename = f[i]
+ if (filename) break
+ }
+
+ for (i = 0; i < set.length; i++) {
+ var pattern = set[i]
+ var file = f
+ if (options.matchBase && pattern.length === 1) {
+ file = [filename]
+ }
+ var hit = this.matchOne(file, pattern, partial)
+ if (hit) {
+ if (options.flipNegate) return true
+ return !this.negate
+ }
+ }
+
+ // didn't get any hits. this is success if it's a negative
+ // pattern, failure otherwise.
+ if (options.flipNegate) return false
+ return this.negate
+}
+
+// set partial to true to test if, for example,
+// "/a/b" matches the start of "/*/b/*/d"
+// Partial means, if you run out of file before you run
+// out of pattern, then that's fine, as long as all
+// the parts match.
+Minimatch.prototype.matchOne = function (file, pattern, partial) {
+ var options = this.options
+
+ this.debug('matchOne',
+ { 'this': this, file: file, pattern: pattern })
+
+ this.debug('matchOne', file.length, pattern.length)
+
+ for (var fi = 0,
+ pi = 0,
+ fl = file.length,
+ pl = pattern.length
+ ; (fi < fl) && (pi < pl)
+ ; fi++, pi++) {
+ this.debug('matchOne loop')
+ var p = pattern[pi]
+ var f = file[fi]
+
+ this.debug(pattern, p, f)
+
+ // should be impossible.
+ // some invalid regexp stuff in the set.
+ /* istanbul ignore if */
+ if (p === false) return false
+
+ if (p === GLOBSTAR) {
+ this.debug('GLOBSTAR', [pattern, p, f])
+
+ // "**"
+ // a/**/b/**/c would match the following:
+ // a/b/x/y/z/c
+ // a/x/y/z/b/c
+ // a/b/x/b/x/c
+ // a/b/c
+ // To do this, take the rest of the pattern after
+ // the **, and see if it would match the file remainder.
+ // If so, return success.
+ // If not, the ** "swallows" a segment, and try again.
+ // This is recursively awful.
+ //
+ // a/**/b/**/c matching a/b/x/y/z/c
+ // - a matches a
+ // - doublestar
+ // - matchOne(b/x/y/z/c, b/**/c)
+ // - b matches b
+ // - doublestar
+ // - matchOne(x/y/z/c, c) -> no
+ // - matchOne(y/z/c, c) -> no
+ // - matchOne(z/c, c) -> no
+ // - matchOne(c, c) yes, hit
+ var fr = fi
+ var pr = pi + 1
+ if (pr === pl) {
+ this.debug('** at the end')
+ // a ** at the end will just swallow the rest.
+ // We have found a match.
+ // however, it will not swallow /.x, unless
+ // options.dot is set.
+ // . and .. are *never* matched by **, for explosively
+ // exponential reasons.
+ for (; fi < fl; fi++) {
+ if (file[fi] === '.' || file[fi] === '..' ||
+ (!options.dot && file[fi].charAt(0) === '.')) return false
+ }
+ return true
+ }
+
+ // ok, let's see if we can swallow whatever we can.
+ while (fr < fl) {
+ var swallowee = file[fr]
+
+ this.debug('\nglobstar while', file, fr, pattern, pr, swallowee)
+
+ // XXX remove this slice. Just pass the start index.
+ if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) {
+ this.debug('globstar found match!', fr, fl, swallowee)
+ // found a match.
+ return true
+ } else {
+ // can't swallow "." or ".." ever.
+ // can only swallow ".foo" when explicitly asked.
+ if (swallowee === '.' || swallowee === '..' ||
+ (!options.dot && swallowee.charAt(0) === '.')) {
+ this.debug('dot detected!', file, fr, pattern, pr)
+ break
+ }
+
+ // ** swallows a segment, and continue.
+ this.debug('globstar swallow a segment, and continue')
+ fr++
+ }
+ }
+
+ // no match was found.
+ // However, in partial mode, we can't say this is necessarily over.
+ // If there's more *pattern* left, then
+ /* istanbul ignore if */
+ if (partial) {
+ // ran out of file
+ this.debug('\n>>> no match, partial?', file, fr, pattern, pr)
+ if (fr === fl) return true
+ }
+ return false
+ }
+
+ // something other than **
+ // non-magic patterns just have to match exactly
+ // patterns with magic have been turned into regexps.
+ var hit
+ if (typeof p === 'string') {
+ hit = f === p
+ this.debug('string match', p, f, hit)
+ } else {
+ hit = f.match(p)
+ this.debug('pattern match', p, f, hit)
+ }
+
+ if (!hit) return false
+ }
+
+ // Note: ending in / means that we'll get a final ""
+ // at the end of the pattern. This can only match a
+ // corresponding "" at the end of the file.
+ // If the file ends in /, then it can only match a
+ // a pattern that ends in /, unless the pattern just
+ // doesn't have any more for it. But, a/b/ should *not*
+ // match "a/b/*", even though "" matches against the
+ // [^/]*? pattern, except in partial mode, where it might
+ // simply not be reached yet.
+ // However, a/b/ should still satisfy a/*
+
+ // now either we fell off the end of the pattern, or we're done.
+ if (fi === fl && pi === pl) {
+ // ran out of pattern and filename at the same time.
+ // an exact hit!
+ return true
+ } else if (fi === fl) {
+ // ran out of file, but still had pattern left.
+ // this is ok if we're doing the match as part of
+ // a glob fs traversal.
+ return partial
+ } else /* istanbul ignore else */ if (pi === pl) {
+ // ran out of pattern, still have file left.
+ // this is only acceptable if we're on the very last
+ // empty segment of a file with a trailing slash.
+ // a/* should match a/b/
+ return (fi === fl - 1) && (file[fi] === '')
+ }
+
+ // should be unreachable.
+ /* istanbul ignore next */
+ throw new Error('wtf?')
+}
+
+// replace stuff like \* with *
+function globUnescape (s) {
+ return s.replace(/\\(.)/g, '$1')
+}
+
+function regExpEscape (s) {
+ return s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&')
+}
+
+
+/***/ }),
+
+/***/ 5526:
+/***/ (function(__unused_webpack_module, exports) {
+
+"use strict";
+
+var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
+ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
+ return new (P || (P = Promise))(function (resolve, reject) {
+ function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+ function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+ function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
+ });
+};
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.PersonalAccessTokenCredentialHandler = exports.BearerCredentialHandler = exports.BasicCredentialHandler = void 0;
+class BasicCredentialHandler {
+ constructor(username, password) {
+ this.username = username;
+ this.password = password;
+ }
+ prepareRequest(options) {
+ if (!options.headers) {
+ throw Error('The request has no headers');
+ }
+ options.headers['Authorization'] = `Basic ${Buffer.from(`${this.username}:${this.password}`).toString('base64')}`;
+ }
+ // This handler cannot handle 401
+ canHandleAuthentication() {
+ return false;
+ }
+ handleAuthentication() {
+ return __awaiter(this, void 0, void 0, function* () {
+ throw new Error('not implemented');
+ });
+ }
+}
+exports.BasicCredentialHandler = BasicCredentialHandler;
+class BearerCredentialHandler {
+ constructor(token) {
+ this.token = token;
+ }
+ // currently implements pre-authorization
+ // TODO: support preAuth = false where it hooks on 401
+ prepareRequest(options) {
+ if (!options.headers) {
+ throw Error('The request has no headers');
+ }
+ options.headers['Authorization'] = `Bearer ${this.token}`;
+ }
+ // This handler cannot handle 401
+ canHandleAuthentication() {
+ return false;
+ }
+ handleAuthentication() {
+ return __awaiter(this, void 0, void 0, function* () {
+ throw new Error('not implemented');
+ });
+ }
+}
+exports.BearerCredentialHandler = BearerCredentialHandler;
+class PersonalAccessTokenCredentialHandler {
+ constructor(token) {
+ this.token = token;
+ }
+ // currently implements pre-authorization
+ // TODO: support preAuth = false where it hooks on 401
+ prepareRequest(options) {
+ if (!options.headers) {
+ throw Error('The request has no headers');
+ }
+ options.headers['Authorization'] = `Basic ${Buffer.from(`PAT:${this.token}`).toString('base64')}`;
+ }
+ // This handler cannot handle 401
+ canHandleAuthentication() {
+ return false;
+ }
+ handleAuthentication() {
+ return __awaiter(this, void 0, void 0, function* () {
+ throw new Error('not implemented');
+ });
+ }
+}
+exports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler;
+//# sourceMappingURL=auth.js.map
+
+/***/ }),
+
+/***/ 6255:
+/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
+
+"use strict";
+
+/* eslint-disable @typescript-eslint/no-explicit-any */
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ var desc = Object.getOwnPropertyDescriptor(m, k);
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+ desc = { enumerable: true, get: function() { return m[k]; } };
+ }
+ Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+ o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+ __setModuleDefault(result, mod);
+ return result;
+};
+var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
+ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
+ return new (P || (P = Promise))(function (resolve, reject) {
+ function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+ function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+ function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
+ });
+};
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.HttpClient = exports.isHttps = exports.HttpClientResponse = exports.HttpClientError = exports.getProxyUrl = exports.MediaTypes = exports.Headers = exports.HttpCodes = void 0;
+const http = __importStar(__nccwpck_require__(3685));
+const https = __importStar(__nccwpck_require__(5687));
+const pm = __importStar(__nccwpck_require__(9835));
+const tunnel = __importStar(__nccwpck_require__(4294));
+const undici_1 = __nccwpck_require__(1773);
+var HttpCodes;
+(function (HttpCodes) {
+ HttpCodes[HttpCodes["OK"] = 200] = "OK";
+ HttpCodes[HttpCodes["MultipleChoices"] = 300] = "MultipleChoices";
+ HttpCodes[HttpCodes["MovedPermanently"] = 301] = "MovedPermanently";
+ HttpCodes[HttpCodes["ResourceMoved"] = 302] = "ResourceMoved";
+ HttpCodes[HttpCodes["SeeOther"] = 303] = "SeeOther";
+ HttpCodes[HttpCodes["NotModified"] = 304] = "NotModified";
+ HttpCodes[HttpCodes["UseProxy"] = 305] = "UseProxy";
+ HttpCodes[HttpCodes["SwitchProxy"] = 306] = "SwitchProxy";
+ HttpCodes[HttpCodes["TemporaryRedirect"] = 307] = "TemporaryRedirect";
+ HttpCodes[HttpCodes["PermanentRedirect"] = 308] = "PermanentRedirect";
+ HttpCodes[HttpCodes["BadRequest"] = 400] = "BadRequest";
+ HttpCodes[HttpCodes["Unauthorized"] = 401] = "Unauthorized";
+ HttpCodes[HttpCodes["PaymentRequired"] = 402] = "PaymentRequired";
+ HttpCodes[HttpCodes["Forbidden"] = 403] = "Forbidden";
+ HttpCodes[HttpCodes["NotFound"] = 404] = "NotFound";
+ HttpCodes[HttpCodes["MethodNotAllowed"] = 405] = "MethodNotAllowed";
+ HttpCodes[HttpCodes["NotAcceptable"] = 406] = "NotAcceptable";
+ HttpCodes[HttpCodes["ProxyAuthenticationRequired"] = 407] = "ProxyAuthenticationRequired";
+ HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout";
+ HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict";
+ HttpCodes[HttpCodes["Gone"] = 410] = "Gone";
+ HttpCodes[HttpCodes["TooManyRequests"] = 429] = "TooManyRequests";
+ HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError";
+ HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented";
+ HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway";
+ HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable";
+ HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout";
+})(HttpCodes || (exports.HttpCodes = HttpCodes = {}));
+var Headers;
+(function (Headers) {
+ Headers["Accept"] = "accept";
+ Headers["ContentType"] = "content-type";
+})(Headers || (exports.Headers = Headers = {}));
+var MediaTypes;
+(function (MediaTypes) {
+ MediaTypes["ApplicationJson"] = "application/json";
+})(MediaTypes || (exports.MediaTypes = MediaTypes = {}));
+/**
+ * Returns the proxy URL, depending upon the supplied url and proxy environment variables.
+ * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
+ */
+function getProxyUrl(serverUrl) {
+ const proxyUrl = pm.getProxyUrl(new URL(serverUrl));
+ return proxyUrl ? proxyUrl.href : '';
+}
+exports.getProxyUrl = getProxyUrl;
+const HttpRedirectCodes = [
+ HttpCodes.MovedPermanently,
+ HttpCodes.ResourceMoved,
+ HttpCodes.SeeOther,
+ HttpCodes.TemporaryRedirect,
+ HttpCodes.PermanentRedirect
+];
+const HttpResponseRetryCodes = [
+ HttpCodes.BadGateway,
+ HttpCodes.ServiceUnavailable,
+ HttpCodes.GatewayTimeout
+];
+const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD'];
+const ExponentialBackoffCeiling = 10;
+const ExponentialBackoffTimeSlice = 5;
+class HttpClientError extends Error {
+ constructor(message, statusCode) {
+ super(message);
+ this.name = 'HttpClientError';
+ this.statusCode = statusCode;
+ Object.setPrototypeOf(this, HttpClientError.prototype);
+ }
+}
+exports.HttpClientError = HttpClientError;
+class HttpClientResponse {
+ constructor(message) {
+ this.message = message;
+ }
+ readBody() {
+ return __awaiter(this, void 0, void 0, function* () {
+ return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () {
+ let output = Buffer.alloc(0);
+ this.message.on('data', (chunk) => {
+ output = Buffer.concat([output, chunk]);
+ });
+ this.message.on('end', () => {
+ resolve(output.toString());
+ });
+ }));
+ });
+ }
+ readBodyBuffer() {
+ return __awaiter(this, void 0, void 0, function* () {
+ return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () {
+ const chunks = [];
+ this.message.on('data', (chunk) => {
+ chunks.push(chunk);
+ });
+ this.message.on('end', () => {
+ resolve(Buffer.concat(chunks));
+ });
+ }));
+ });
+ }
+}
+exports.HttpClientResponse = HttpClientResponse;
+function isHttps(requestUrl) {
+ const parsedUrl = new URL(requestUrl);
+ return parsedUrl.protocol === 'https:';
+}
+exports.isHttps = isHttps;
+class HttpClient {
+ constructor(userAgent, handlers, requestOptions) {
+ this._ignoreSslError = false;
+ this._allowRedirects = true;
+ this._allowRedirectDowngrade = false;
+ this._maxRedirects = 50;
+ this._allowRetries = false;
+ this._maxRetries = 1;
+ this._keepAlive = false;
+ this._disposed = false;
+ this.userAgent = userAgent;
+ this.handlers = handlers || [];
+ this.requestOptions = requestOptions;
+ if (requestOptions) {
+ if (requestOptions.ignoreSslError != null) {
+ this._ignoreSslError = requestOptions.ignoreSslError;
+ }
+ this._socketTimeout = requestOptions.socketTimeout;
+ if (requestOptions.allowRedirects != null) {
+ this._allowRedirects = requestOptions.allowRedirects;
+ }
+ if (requestOptions.allowRedirectDowngrade != null) {
+ this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade;
+ }
+ if (requestOptions.maxRedirects != null) {
+ this._maxRedirects = Math.max(requestOptions.maxRedirects, 0);
+ }
+ if (requestOptions.keepAlive != null) {
+ this._keepAlive = requestOptions.keepAlive;
+ }
+ if (requestOptions.allowRetries != null) {
+ this._allowRetries = requestOptions.allowRetries;
+ }
+ if (requestOptions.maxRetries != null) {
+ this._maxRetries = requestOptions.maxRetries;
+ }
+ }
+ }
+ options(requestUrl, additionalHeaders) {
+ return __awaiter(this, void 0, void 0, function* () {
+ return this.request('OPTIONS', requestUrl, null, additionalHeaders || {});
+ });
+ }
+ get(requestUrl, additionalHeaders) {
+ return __awaiter(this, void 0, void 0, function* () {
+ return this.request('GET', requestUrl, null, additionalHeaders || {});
+ });
+ }
+ del(requestUrl, additionalHeaders) {
+ return __awaiter(this, void 0, void 0, function* () {
+ return this.request('DELETE', requestUrl, null, additionalHeaders || {});
+ });
+ }
+ post(requestUrl, data, additionalHeaders) {
+ return __awaiter(this, void 0, void 0, function* () {
+ return this.request('POST', requestUrl, data, additionalHeaders || {});
+ });
+ }
+ patch(requestUrl, data, additionalHeaders) {
+ return __awaiter(this, void 0, void 0, function* () {
+ return this.request('PATCH', requestUrl, data, additionalHeaders || {});
+ });
+ }
+ put(requestUrl, data, additionalHeaders) {
+ return __awaiter(this, void 0, void 0, function* () {
+ return this.request('PUT', requestUrl, data, additionalHeaders || {});
+ });
+ }
+ head(requestUrl, additionalHeaders) {
+ return __awaiter(this, void 0, void 0, function* () {
+ return this.request('HEAD', requestUrl, null, additionalHeaders || {});
+ });
+ }
+ sendStream(verb, requestUrl, stream, additionalHeaders) {
+ return __awaiter(this, void 0, void 0, function* () {
+ return this.request(verb, requestUrl, stream, additionalHeaders);
+ });
+ }
+ /**
+ * Gets a typed object from an endpoint
+ * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise
+ */
+ getJson(requestUrl, additionalHeaders = {}) {
+ return __awaiter(this, void 0, void 0, function* () {
+ additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
+ const res = yield this.get(requestUrl, additionalHeaders);
+ return this._processResponse(res, this.requestOptions);
+ });
+ }
+ postJson(requestUrl, obj, additionalHeaders = {}) {
+ return __awaiter(this, void 0, void 0, function* () {
+ const data = JSON.stringify(obj, null, 2);
+ additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
+ additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
+ const res = yield this.post(requestUrl, data, additionalHeaders);
+ return this._processResponse(res, this.requestOptions);
+ });
+ }
+ putJson(requestUrl, obj, additionalHeaders = {}) {
+ return __awaiter(this, void 0, void 0, function* () {
+ const data = JSON.stringify(obj, null, 2);
+ additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
+ additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
+ const res = yield this.put(requestUrl, data, additionalHeaders);
+ return this._processResponse(res, this.requestOptions);
+ });
+ }
+ patchJson(requestUrl, obj, additionalHeaders = {}) {
+ return __awaiter(this, void 0, void 0, function* () {
+ const data = JSON.stringify(obj, null, 2);
+ additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
+ additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
+ const res = yield this.patch(requestUrl, data, additionalHeaders);
+ return this._processResponse(res, this.requestOptions);
+ });
+ }
+ /**
+ * Makes a raw http request.
+ * All other methods such as get, post, patch, and request ultimately call this.
+ * Prefer get, del, post and patch
+ */
+ request(verb, requestUrl, data, headers) {
+ return __awaiter(this, void 0, void 0, function* () {
+ if (this._disposed) {
+ throw new Error('Client has already been disposed.');
+ }
+ const parsedUrl = new URL(requestUrl);
+ let info = this._prepareRequest(verb, parsedUrl, headers);
+ // Only perform retries on reads since writes may not be idempotent.
+ const maxTries = this._allowRetries && RetryableHttpVerbs.includes(verb)
+ ? this._maxRetries + 1
+ : 1;
+ let numTries = 0;
+ let response;
+ do {
+ response = yield this.requestRaw(info, data);
+ // Check if it's an authentication challenge
+ if (response &&
+ response.message &&
+ response.message.statusCode === HttpCodes.Unauthorized) {
+ let authenticationHandler;
+ for (const handler of this.handlers) {
+ if (handler.canHandleAuthentication(response)) {
+ authenticationHandler = handler;
+ break;
+ }
+ }
+ if (authenticationHandler) {
+ return authenticationHandler.handleAuthentication(this, info, data);
+ }
+ else {
+ // We have received an unauthorized response but have no handlers to handle it.
+ // Let the response return to the caller.
+ return response;
+ }
+ }
+ let redirectsRemaining = this._maxRedirects;
+ while (response.message.statusCode &&
+ HttpRedirectCodes.includes(response.message.statusCode) &&
+ this._allowRedirects &&
+ redirectsRemaining > 0) {
+ const redirectUrl = response.message.headers['location'];
+ if (!redirectUrl) {
+ // if there's no location to redirect to, we won't
+ break;
+ }
+ const parsedRedirectUrl = new URL(redirectUrl);
+ if (parsedUrl.protocol === 'https:' &&
+ parsedUrl.protocol !== parsedRedirectUrl.protocol &&
+ !this._allowRedirectDowngrade) {
+ throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.');
+ }
+ // we need to finish reading the response before reassigning response
+ // which will leak the open socket.
+ yield response.readBody();
+ // strip authorization header if redirected to a different hostname
+ if (parsedRedirectUrl.hostname !== parsedUrl.hostname) {
+ for (const header in headers) {
+ // header names are case insensitive
+ if (header.toLowerCase() === 'authorization') {
+ delete headers[header];
+ }
+ }
+ }
+ // let's make the request with the new redirectUrl
+ info = this._prepareRequest(verb, parsedRedirectUrl, headers);
+ response = yield this.requestRaw(info, data);
+ redirectsRemaining--;
+ }
+ if (!response.message.statusCode ||
+ !HttpResponseRetryCodes.includes(response.message.statusCode)) {
+ // If not a retry code, return immediately instead of retrying
+ return response;
+ }
+ numTries += 1;
+ if (numTries < maxTries) {
+ yield response.readBody();
+ yield this._performExponentialBackoff(numTries);
+ }
+ } while (numTries < maxTries);
+ return response;
+ });
+ }
+ /**
+ * Needs to be called if keepAlive is set to true in request options.
+ */
+ dispose() {
+ if (this._agent) {
+ this._agent.destroy();
+ }
+ this._disposed = true;
+ }
+ /**
+ * Raw request.
+ * @param info
+ * @param data
+ */
+ requestRaw(info, data) {
+ return __awaiter(this, void 0, void 0, function* () {
+ return new Promise((resolve, reject) => {
+ function callbackForResult(err, res) {
+ if (err) {
+ reject(err);
+ }
+ else if (!res) {
+ // If `err` is not passed, then `res` must be passed.
+ reject(new Error('Unknown error'));
+ }
+ else {
+ resolve(res);
+ }
+ }
+ this.requestRawWithCallback(info, data, callbackForResult);
+ });
+ });
+ }
+ /**
+ * Raw request with callback.
+ * @param info
+ * @param data
+ * @param onResult
+ */
+ requestRawWithCallback(info, data, onResult) {
+ if (typeof data === 'string') {
+ if (!info.options.headers) {
+ info.options.headers = {};
+ }
+ info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8');
+ }
+ let callbackCalled = false;
+ function handleResult(err, res) {
+ if (!callbackCalled) {
+ callbackCalled = true;
+ onResult(err, res);
+ }
+ }
+ const req = info.httpModule.request(info.options, (msg) => {
+ const res = new HttpClientResponse(msg);
+ handleResult(undefined, res);
+ });
+ let socket;
+ req.on('socket', sock => {
+ socket = sock;
+ });
+ // If we ever get disconnected, we want the socket to timeout eventually
+ req.setTimeout(this._socketTimeout || 3 * 60000, () => {
+ if (socket) {
+ socket.end();
+ }
+ handleResult(new Error(`Request timeout: ${info.options.path}`));
+ });
+ req.on('error', function (err) {
+ // err has statusCode property
+ // res should have headers
+ handleResult(err);
+ });
+ if (data && typeof data === 'string') {
+ req.write(data, 'utf8');
+ }
+ if (data && typeof data !== 'string') {
+ data.on('close', function () {
+ req.end();
+ });
+ data.pipe(req);
+ }
+ else {
+ req.end();
+ }
+ }
+ /**
+ * Gets an http agent. This function is useful when you need an http agent that handles
+ * routing through a proxy server - depending upon the url and proxy environment variables.
+ * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
+ */
+ getAgent(serverUrl) {
+ const parsedUrl = new URL(serverUrl);
+ return this._getAgent(parsedUrl);
+ }
+ getAgentDispatcher(serverUrl) {
+ const parsedUrl = new URL(serverUrl);
+ const proxyUrl = pm.getProxyUrl(parsedUrl);
+ const useProxy = proxyUrl && proxyUrl.hostname;
+ if (!useProxy) {
+ return;
+ }
+ return this._getProxyAgentDispatcher(parsedUrl, proxyUrl);
+ }
+ _prepareRequest(method, requestUrl, headers) {
+ const info = {};
+ info.parsedUrl = requestUrl;
+ const usingSsl = info.parsedUrl.protocol === 'https:';
+ info.httpModule = usingSsl ? https : http;
+ const defaultPort = usingSsl ? 443 : 80;
+ info.options = {};
+ info.options.host = info.parsedUrl.hostname;
+ info.options.port = info.parsedUrl.port
+ ? parseInt(info.parsedUrl.port)
+ : defaultPort;
+ info.options.path =
+ (info.parsedUrl.pathname || '') + (info.parsedUrl.search || '');
+ info.options.method = method;
+ info.options.headers = this._mergeHeaders(headers);
+ if (this.userAgent != null) {
+ info.options.headers['user-agent'] = this.userAgent;
+ }
+ info.options.agent = this._getAgent(info.parsedUrl);
+ // gives handlers an opportunity to participate
+ if (this.handlers) {
+ for (const handler of this.handlers) {
+ handler.prepareRequest(info.options);
+ }
+ }
+ return info;
+ }
+ _mergeHeaders(headers) {
+ if (this.requestOptions && this.requestOptions.headers) {
+ return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers || {}));
+ }
+ return lowercaseKeys(headers || {});
+ }
+ _getExistingOrDefaultHeader(additionalHeaders, header, _default) {
+ let clientHeader;
+ if (this.requestOptions && this.requestOptions.headers) {
+ clientHeader = lowercaseKeys(this.requestOptions.headers)[header];
+ }
+ return additionalHeaders[header] || clientHeader || _default;
+ }
+ _getAgent(parsedUrl) {
+ let agent;
+ const proxyUrl = pm.getProxyUrl(parsedUrl);
+ const useProxy = proxyUrl && proxyUrl.hostname;
+ if (this._keepAlive && useProxy) {
+ agent = this._proxyAgent;
+ }
+ if (this._keepAlive && !useProxy) {
+ agent = this._agent;
+ }
+ // if agent is already assigned use that agent.
+ if (agent) {
+ return agent;
+ }
+ const usingSsl = parsedUrl.protocol === 'https:';
+ let maxSockets = 100;
+ if (this.requestOptions) {
+ maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets;
+ }
+ // This is `useProxy` again, but we need to check `proxyURl` directly for TypeScripts's flow analysis.
+ if (proxyUrl && proxyUrl.hostname) {
+ const agentOptions = {
+ maxSockets,
+ keepAlive: this._keepAlive,
+ proxy: Object.assign(Object.assign({}, ((proxyUrl.username || proxyUrl.password) && {
+ proxyAuth: `${proxyUrl.username}:${proxyUrl.password}`
+ })), { host: proxyUrl.hostname, port: proxyUrl.port })
+ };
+ let tunnelAgent;
+ const overHttps = proxyUrl.protocol === 'https:';
+ if (usingSsl) {
+ tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp;
+ }
+ else {
+ tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp;
+ }
+ agent = tunnelAgent(agentOptions);
+ this._proxyAgent = agent;
+ }
+ // if reusing agent across request and tunneling agent isn't assigned create a new agent
+ if (this._keepAlive && !agent) {
+ const options = { keepAlive: this._keepAlive, maxSockets };
+ agent = usingSsl ? new https.Agent(options) : new http.Agent(options);
+ this._agent = agent;
+ }
+ // if not using private agent and tunnel agent isn't setup then use global agent
+ if (!agent) {
+ agent = usingSsl ? https.globalAgent : http.globalAgent;
+ }
+ if (usingSsl && this._ignoreSslError) {
+ // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process
+ // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options
+ // we have to cast it to any and change it directly
+ agent.options = Object.assign(agent.options || {}, {
+ rejectUnauthorized: false
+ });
+ }
+ return agent;
+ }
+ _getProxyAgentDispatcher(parsedUrl, proxyUrl) {
+ let proxyAgent;
+ if (this._keepAlive) {
+ proxyAgent = this._proxyAgentDispatcher;
+ }
+ // if agent is already assigned use that agent.
+ if (proxyAgent) {
+ return proxyAgent;
+ }
+ const usingSsl = parsedUrl.protocol === 'https:';
+ proxyAgent = new undici_1.ProxyAgent(Object.assign({ uri: proxyUrl.href, pipelining: !this._keepAlive ? 0 : 1 }, ((proxyUrl.username || proxyUrl.password) && {
+ token: `${proxyUrl.username}:${proxyUrl.password}`
+ })));
+ this._proxyAgentDispatcher = proxyAgent;
+ if (usingSsl && this._ignoreSslError) {
+ // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process
+ // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options
+ // we have to cast it to any and change it directly
+ proxyAgent.options = Object.assign(proxyAgent.options.requestTls || {}, {
+ rejectUnauthorized: false
+ });
+ }
+ return proxyAgent;
+ }
+ _performExponentialBackoff(retryNumber) {
+ return __awaiter(this, void 0, void 0, function* () {
+ retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber);
+ const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber);
+ return new Promise(resolve => setTimeout(() => resolve(), ms));
+ });
+ }
+ _processResponse(res, options) {
+ return __awaiter(this, void 0, void 0, function* () {
+ return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () {
+ const statusCode = res.message.statusCode || 0;
+ const response = {
+ statusCode,
+ result: null,
+ headers: {}
+ };
+ // not found leads to null obj returned
+ if (statusCode === HttpCodes.NotFound) {
+ resolve(response);
+ }
+ // get the result from the body
+ function dateTimeDeserializer(key, value) {
+ if (typeof value === 'string') {
+ const a = new Date(value);
+ if (!isNaN(a.valueOf())) {
+ return a;
+ }
+ }
+ return value;
+ }
+ let obj;
+ let contents;
+ try {
+ contents = yield res.readBody();
+ if (contents && contents.length > 0) {
+ if (options && options.deserializeDates) {
+ obj = JSON.parse(contents, dateTimeDeserializer);
+ }
+ else {
+ obj = JSON.parse(contents);
+ }
+ response.result = obj;
+ }
+ response.headers = res.message.headers;
+ }
+ catch (err) {
+ // Invalid resource (contents not json); leaving result obj null
+ }
+ // note that 3xx redirects are handled by the http layer.
+ if (statusCode > 299) {
+ let msg;
+ // if exception/error in body, attempt to get better error
+ if (obj && obj.message) {
+ msg = obj.message;
+ }
+ else if (contents && contents.length > 0) {
+ // it may be the case that the exception is in the body message as string
+ msg = contents;
+ }
+ else {
+ msg = `Failed request: (${statusCode})`;
+ }
+ const err = new HttpClientError(msg, statusCode);
+ err.result = response.result;
+ reject(err);
+ }
+ else {
+ resolve(response);
+ }
+ }));
+ });
+ }
+}
+exports.HttpClient = HttpClient;
+const lowercaseKeys = (obj) => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});
+//# sourceMappingURL=index.js.map
+
+/***/ }),
+
+/***/ 9835:
+/***/ ((__unused_webpack_module, exports) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.checkBypass = exports.getProxyUrl = void 0;
+function getProxyUrl(reqUrl) {
+ const usingSsl = reqUrl.protocol === 'https:';
+ if (checkBypass(reqUrl)) {
+ return undefined;
+ }
+ const proxyVar = (() => {
+ if (usingSsl) {
+ return process.env['https_proxy'] || process.env['HTTPS_PROXY'];
+ }
+ else {
+ return process.env['http_proxy'] || process.env['HTTP_PROXY'];
+ }
+ })();
+ if (proxyVar) {
+ try {
+ return new URL(proxyVar);
+ }
+ catch (_a) {
+ if (!proxyVar.startsWith('http://') && !proxyVar.startsWith('https://'))
+ return new URL(`http://${proxyVar}`);
+ }
+ }
+ else {
+ return undefined;
+ }
+}
+exports.getProxyUrl = getProxyUrl;
+function checkBypass(reqUrl) {
+ if (!reqUrl.hostname) {
+ return false;
+ }
+ const reqHost = reqUrl.hostname;
+ if (isLoopbackAddress(reqHost)) {
+ return true;
+ }
+ const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || '';
+ if (!noProxy) {
+ return false;
+ }
+ // Determine the request port
+ let reqPort;
+ if (reqUrl.port) {
+ reqPort = Number(reqUrl.port);
+ }
+ else if (reqUrl.protocol === 'http:') {
+ reqPort = 80;
+ }
+ else if (reqUrl.protocol === 'https:') {
+ reqPort = 443;
+ }
+ // Format the request hostname and hostname with port
+ const upperReqHosts = [reqUrl.hostname.toUpperCase()];
+ if (typeof reqPort === 'number') {
+ upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`);
+ }
+ // Compare request host against noproxy
+ for (const upperNoProxyItem of noProxy
+ .split(',')
+ .map(x => x.trim().toUpperCase())
+ .filter(x => x)) {
+ if (upperNoProxyItem === '*' ||
+ upperReqHosts.some(x => x === upperNoProxyItem ||
+ x.endsWith(`.${upperNoProxyItem}`) ||
+ (upperNoProxyItem.startsWith('.') &&
+ x.endsWith(`${upperNoProxyItem}`)))) {
+ return true;
+ }
+ }
+ return false;
+}
+exports.checkBypass = checkBypass;
+function isLoopbackAddress(host) {
+ const hostLower = host.toLowerCase();
+ return (hostLower === 'localhost' ||
+ hostLower.startsWith('127.') ||
+ hostLower.startsWith('[::1]') ||
+ hostLower.startsWith('[0:0:0:0:0:0:0:1]'));
+}
+//# sourceMappingURL=proxy.js.map
+
+/***/ }),
+
+/***/ 334:
+/***/ ((module) => {
+
+"use strict";
+
+var __defProp = Object.defineProperty;
+var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
+var __getOwnPropNames = Object.getOwnPropertyNames;
+var __hasOwnProp = Object.prototype.hasOwnProperty;
+var __export = (target, all) => {
+ for (var name in all)
+ __defProp(target, name, { get: all[name], enumerable: true });
+};
+var __copyProps = (to, from, except, desc) => {
+ if (from && typeof from === "object" || typeof from === "function") {
+ for (let key of __getOwnPropNames(from))
+ if (!__hasOwnProp.call(to, key) && key !== except)
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
+ }
+ return to;
+};
+var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
+
+// pkg/dist-src/index.js
+var dist_src_exports = {};
+__export(dist_src_exports, {
+ createTokenAuth: () => createTokenAuth
+});
+module.exports = __toCommonJS(dist_src_exports);
+
+// pkg/dist-src/auth.js
+var REGEX_IS_INSTALLATION_LEGACY = /^v1\./;
+var REGEX_IS_INSTALLATION = /^ghs_/;
+var REGEX_IS_USER_TO_SERVER = /^ghu_/;
+async function auth(token) {
+ const isApp = token.split(/\./).length === 3;
+ const isInstallation = REGEX_IS_INSTALLATION_LEGACY.test(token) || REGEX_IS_INSTALLATION.test(token);
+ const isUserToServer = REGEX_IS_USER_TO_SERVER.test(token);
+ const tokenType = isApp ? "app" : isInstallation ? "installation" : isUserToServer ? "user-to-server" : "oauth";
+ return {
+ type: "token",
+ token,
+ tokenType
+ };
+}
+
+// pkg/dist-src/with-authorization-prefix.js
+function withAuthorizationPrefix(token) {
+ if (token.split(/\./).length === 3) {
+ return `bearer ${token}`;
+ }
+ return `token ${token}`;
+}
+
+// pkg/dist-src/hook.js
+async function hook(token, request, route, parameters) {
+ const endpoint = request.endpoint.merge(
+ route,
+ parameters
+ );
+ endpoint.headers.authorization = withAuthorizationPrefix(token);
+ return request(endpoint);
+}
+
+// pkg/dist-src/index.js
+var createTokenAuth = function createTokenAuth2(token) {
+ if (!token) {
+ throw new Error("[@octokit/auth-token] No token passed to createTokenAuth");
+ }
+ if (typeof token !== "string") {
+ throw new Error(
+ "[@octokit/auth-token] Token passed to createTokenAuth is not a string"
+ );
+ }
+ token = token.replace(/^(token|bearer) +/i, "");
+ return Object.assign(auth.bind(null, token), {
+ hook: hook.bind(null, token)
+ });
+};
+// Annotate the CommonJS export names for ESM import in node:
+0 && (0);
+
+
+/***/ }),
+
+/***/ 6762:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+var __defProp = Object.defineProperty;
+var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
+var __getOwnPropNames = Object.getOwnPropertyNames;
+var __hasOwnProp = Object.prototype.hasOwnProperty;
+var __export = (target, all) => {
+ for (var name in all)
+ __defProp(target, name, { get: all[name], enumerable: true });
+};
+var __copyProps = (to, from, except, desc) => {
+ if (from && typeof from === "object" || typeof from === "function") {
+ for (let key of __getOwnPropNames(from))
+ if (!__hasOwnProp.call(to, key) && key !== except)
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
+ }
+ return to;
+};
+var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
+
+// pkg/dist-src/index.js
+var dist_src_exports = {};
+__export(dist_src_exports, {
+ Octokit: () => Octokit
+});
+module.exports = __toCommonJS(dist_src_exports);
+var import_universal_user_agent = __nccwpck_require__(5030);
+var import_before_after_hook = __nccwpck_require__(3682);
+var import_request = __nccwpck_require__(6234);
+var import_graphql = __nccwpck_require__(8467);
+var import_auth_token = __nccwpck_require__(334);
+
+// pkg/dist-src/version.js
+var VERSION = "5.2.0";
+
+// pkg/dist-src/index.js
+var noop = () => {
+};
+var consoleWarn = console.warn.bind(console);
+var consoleError = console.error.bind(console);
+var userAgentTrail = `octokit-core.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}`;
+var Octokit = class {
+ static {
+ this.VERSION = VERSION;
+ }
+ static defaults(defaults) {
+ const OctokitWithDefaults = class extends this {
+ constructor(...args) {
+ const options = args[0] || {};
+ if (typeof defaults === "function") {
+ super(defaults(options));
+ return;
+ }
+ super(
+ Object.assign(
+ {},
+ defaults,
+ options,
+ options.userAgent && defaults.userAgent ? {
+ userAgent: `${options.userAgent} ${defaults.userAgent}`
+ } : null
+ )
+ );
+ }
+ };
+ return OctokitWithDefaults;
+ }
+ static {
+ this.plugins = [];
+ }
+ /**
+ * Attach a plugin (or many) to your Octokit instance.
+ *
+ * @example
+ * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...)
+ */
+ static plugin(...newPlugins) {
+ const currentPlugins = this.plugins;
+ const NewOctokit = class extends this {
+ static {
+ this.plugins = currentPlugins.concat(
+ newPlugins.filter((plugin) => !currentPlugins.includes(plugin))
+ );
+ }
+ };
+ return NewOctokit;
+ }
+ constructor(options = {}) {
+ const hook = new import_before_after_hook.Collection();
+ const requestDefaults = {
+ baseUrl: import_request.request.endpoint.DEFAULTS.baseUrl,
+ headers: {},
+ request: Object.assign({}, options.request, {
+ // @ts-ignore internal usage only, no need to type
+ hook: hook.bind(null, "request")
+ }),
+ mediaType: {
+ previews: [],
+ format: ""
+ }
+ };
+ requestDefaults.headers["user-agent"] = options.userAgent ? `${options.userAgent} ${userAgentTrail}` : userAgentTrail;
+ if (options.baseUrl) {
+ requestDefaults.baseUrl = options.baseUrl;
+ }
+ if (options.previews) {
+ requestDefaults.mediaType.previews = options.previews;
+ }
+ if (options.timeZone) {
+ requestDefaults.headers["time-zone"] = options.timeZone;
+ }
+ this.request = import_request.request.defaults(requestDefaults);
+ this.graphql = (0, import_graphql.withCustomRequest)(this.request).defaults(requestDefaults);
+ this.log = Object.assign(
+ {
+ debug: noop,
+ info: noop,
+ warn: consoleWarn,
+ error: consoleError
+ },
+ options.log
+ );
+ this.hook = hook;
+ if (!options.authStrategy) {
+ if (!options.auth) {
+ this.auth = async () => ({
+ type: "unauthenticated"
+ });
+ } else {
+ const auth = (0, import_auth_token.createTokenAuth)(options.auth);
+ hook.wrap("request", auth.hook);
+ this.auth = auth;
+ }
+ } else {
+ const { authStrategy, ...otherOptions } = options;
+ const auth = authStrategy(
+ Object.assign(
+ {
+ request: this.request,
+ log: this.log,
+ // we pass the current octokit instance as well as its constructor options
+ // to allow for authentication strategies that return a new octokit instance
+ // that shares the same internal state as the current one. The original
+ // requirement for this was the "event-octokit" authentication strategy
+ // of https://github.com/probot/octokit-auth-probot.
+ octokit: this,
+ octokitOptions: otherOptions
+ },
+ options.auth
+ )
+ );
+ hook.wrap("request", auth.hook);
+ this.auth = auth;
+ }
+ const classConstructor = this.constructor;
+ for (let i = 0; i < classConstructor.plugins.length; ++i) {
+ Object.assign(this, classConstructor.plugins[i](this, options));
+ }
+ }
+};
+// Annotate the CommonJS export names for ESM import in node:
+0 && (0);
+
+
+/***/ }),
+
+/***/ 9440:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+var __defProp = Object.defineProperty;
+var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
+var __getOwnPropNames = Object.getOwnPropertyNames;
+var __hasOwnProp = Object.prototype.hasOwnProperty;
+var __export = (target, all) => {
+ for (var name in all)
+ __defProp(target, name, { get: all[name], enumerable: true });
+};
+var __copyProps = (to, from, except, desc) => {
+ if (from && typeof from === "object" || typeof from === "function") {
+ for (let key of __getOwnPropNames(from))
+ if (!__hasOwnProp.call(to, key) && key !== except)
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
+ }
+ return to;
+};
+var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
+
+// pkg/dist-src/index.js
+var dist_src_exports = {};
+__export(dist_src_exports, {
+ endpoint: () => endpoint
+});
+module.exports = __toCommonJS(dist_src_exports);
+
+// pkg/dist-src/defaults.js
+var import_universal_user_agent = __nccwpck_require__(5030);
+
+// pkg/dist-src/version.js
+var VERSION = "9.0.5";
+
+// pkg/dist-src/defaults.js
+var userAgent = `octokit-endpoint.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}`;
+var DEFAULTS = {
+ method: "GET",
+ baseUrl: "https://api.github.com",
+ headers: {
+ accept: "application/vnd.github.v3+json",
+ "user-agent": userAgent
+ },
+ mediaType: {
+ format: ""
+ }
+};
+
+// pkg/dist-src/util/lowercase-keys.js
+function lowercaseKeys(object) {
+ if (!object) {
+ return {};
+ }
+ return Object.keys(object).reduce((newObj, key) => {
+ newObj[key.toLowerCase()] = object[key];
+ return newObj;
+ }, {});
+}
+
+// pkg/dist-src/util/is-plain-object.js
+function isPlainObject(value) {
+ if (typeof value !== "object" || value === null)
+ return false;
+ if (Object.prototype.toString.call(value) !== "[object Object]")
+ return false;
+ const proto = Object.getPrototypeOf(value);
+ if (proto === null)
+ return true;
+ const Ctor = Object.prototype.hasOwnProperty.call(proto, "constructor") && proto.constructor;
+ return typeof Ctor === "function" && Ctor instanceof Ctor && Function.prototype.call(Ctor) === Function.prototype.call(value);
+}
+
+// pkg/dist-src/util/merge-deep.js
+function mergeDeep(defaults, options) {
+ const result = Object.assign({}, defaults);
+ Object.keys(options).forEach((key) => {
+ if (isPlainObject(options[key])) {
+ if (!(key in defaults))
+ Object.assign(result, { [key]: options[key] });
+ else
+ result[key] = mergeDeep(defaults[key], options[key]);
+ } else {
+ Object.assign(result, { [key]: options[key] });
+ }
+ });
+ return result;
+}
+
+// pkg/dist-src/util/remove-undefined-properties.js
+function removeUndefinedProperties(obj) {
+ for (const key in obj) {
+ if (obj[key] === void 0) {
+ delete obj[key];
+ }
+ }
+ return obj;
+}
+
+// pkg/dist-src/merge.js
+function merge(defaults, route, options) {
+ if (typeof route === "string") {
+ let [method, url] = route.split(" ");
+ options = Object.assign(url ? { method, url } : { url: method }, options);
+ } else {
+ options = Object.assign({}, route);
+ }
+ options.headers = lowercaseKeys(options.headers);
+ removeUndefinedProperties(options);
+ removeUndefinedProperties(options.headers);
+ const mergedOptions = mergeDeep(defaults || {}, options);
+ if (options.url === "/graphql") {
+ if (defaults && defaults.mediaType.previews?.length) {
+ mergedOptions.mediaType.previews = defaults.mediaType.previews.filter(
+ (preview) => !mergedOptions.mediaType.previews.includes(preview)
+ ).concat(mergedOptions.mediaType.previews);
+ }
+ mergedOptions.mediaType.previews = (mergedOptions.mediaType.previews || []).map((preview) => preview.replace(/-preview/, ""));
+ }
+ return mergedOptions;
+}
+
+// pkg/dist-src/util/add-query-parameters.js
+function addQueryParameters(url, parameters) {
+ const separator = /\?/.test(url) ? "&" : "?";
+ const names = Object.keys(parameters);
+ if (names.length === 0) {
+ return url;
+ }
+ return url + separator + names.map((name) => {
+ if (name === "q") {
+ return "q=" + parameters.q.split("+").map(encodeURIComponent).join("+");
+ }
+ return `${name}=${encodeURIComponent(parameters[name])}`;
+ }).join("&");
+}
+
+// pkg/dist-src/util/extract-url-variable-names.js
+var urlVariableRegex = /\{[^}]+\}/g;
+function removeNonChars(variableName) {
+ return variableName.replace(/^\W+|\W+$/g, "").split(/,/);
+}
+function extractUrlVariableNames(url) {
+ const matches = url.match(urlVariableRegex);
+ if (!matches) {
+ return [];
+ }
+ return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []);
+}
+
+// pkg/dist-src/util/omit.js
+function omit(object, keysToOmit) {
+ const result = { __proto__: null };
+ for (const key of Object.keys(object)) {
+ if (keysToOmit.indexOf(key) === -1) {
+ result[key] = object[key];
+ }
+ }
+ return result;
+}
+
+// pkg/dist-src/util/url-template.js
+function encodeReserved(str) {
+ return str.split(/(%[0-9A-Fa-f]{2})/g).map(function(part) {
+ if (!/%[0-9A-Fa-f]/.test(part)) {
+ part = encodeURI(part).replace(/%5B/g, "[").replace(/%5D/g, "]");
+ }
+ return part;
+ }).join("");
+}
+function encodeUnreserved(str) {
+ return encodeURIComponent(str).replace(/[!'()*]/g, function(c) {
+ return "%" + c.charCodeAt(0).toString(16).toUpperCase();
+ });
+}
+function encodeValue(operator, value, key) {
+ value = operator === "+" || operator === "#" ? encodeReserved(value) : encodeUnreserved(value);
+ if (key) {
+ return encodeUnreserved(key) + "=" + value;
+ } else {
+ return value;
+ }
+}
+function isDefined(value) {
+ return value !== void 0 && value !== null;
+}
+function isKeyOperator(operator) {
+ return operator === ";" || operator === "&" || operator === "?";
+}
+function getValues(context, operator, key, modifier) {
+ var value = context[key], result = [];
+ if (isDefined(value) && value !== "") {
+ if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") {
+ value = value.toString();
+ if (modifier && modifier !== "*") {
+ value = value.substring(0, parseInt(modifier, 10));
+ }
+ result.push(
+ encodeValue(operator, value, isKeyOperator(operator) ? key : "")
+ );
+ } else {
+ if (modifier === "*") {
+ if (Array.isArray(value)) {
+ value.filter(isDefined).forEach(function(value2) {
+ result.push(
+ encodeValue(operator, value2, isKeyOperator(operator) ? key : "")
+ );
+ });
+ } else {
+ Object.keys(value).forEach(function(k) {
+ if (isDefined(value[k])) {
+ result.push(encodeValue(operator, value[k], k));
+ }
+ });
+ }
+ } else {
+ const tmp = [];
+ if (Array.isArray(value)) {
+ value.filter(isDefined).forEach(function(value2) {
+ tmp.push(encodeValue(operator, value2));
+ });
+ } else {
+ Object.keys(value).forEach(function(k) {
+ if (isDefined(value[k])) {
+ tmp.push(encodeUnreserved(k));
+ tmp.push(encodeValue(operator, value[k].toString()));
+ }
+ });
+ }
+ if (isKeyOperator(operator)) {
+ result.push(encodeUnreserved(key) + "=" + tmp.join(","));
+ } else if (tmp.length !== 0) {
+ result.push(tmp.join(","));
+ }
+ }
+ }
+ } else {
+ if (operator === ";") {
+ if (isDefined(value)) {
+ result.push(encodeUnreserved(key));
+ }
+ } else if (value === "" && (operator === "&" || operator === "?")) {
+ result.push(encodeUnreserved(key) + "=");
+ } else if (value === "") {
+ result.push("");
+ }
+ }
+ return result;
+}
+function parseUrl(template) {
+ return {
+ expand: expand.bind(null, template)
+ };
+}
+function expand(template, context) {
+ var operators = ["+", "#", ".", "/", ";", "?", "&"];
+ template = template.replace(
+ /\{([^\{\}]+)\}|([^\{\}]+)/g,
+ function(_, expression, literal) {
+ if (expression) {
+ let operator = "";
+ const values = [];
+ if (operators.indexOf(expression.charAt(0)) !== -1) {
+ operator = expression.charAt(0);
+ expression = expression.substr(1);
+ }
+ expression.split(/,/g).forEach(function(variable) {
+ var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable);
+ values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3]));
+ });
+ if (operator && operator !== "+") {
+ var separator = ",";
+ if (operator === "?") {
+ separator = "&";
+ } else if (operator !== "#") {
+ separator = operator;
+ }
+ return (values.length !== 0 ? operator : "") + values.join(separator);
+ } else {
+ return values.join(",");
+ }
+ } else {
+ return encodeReserved(literal);
+ }
+ }
+ );
+ if (template === "/") {
+ return template;
+ } else {
+ return template.replace(/\/$/, "");
+ }
+}
+
+// pkg/dist-src/parse.js
+function parse(options) {
+ let method = options.method.toUpperCase();
+ let url = (options.url || "/").replace(/:([a-z]\w+)/g, "{$1}");
+ let headers = Object.assign({}, options.headers);
+ let body;
+ let parameters = omit(options, [
+ "method",
+ "baseUrl",
+ "url",
+ "headers",
+ "request",
+ "mediaType"
+ ]);
+ const urlVariableNames = extractUrlVariableNames(url);
+ url = parseUrl(url).expand(parameters);
+ if (!/^http/.test(url)) {
+ url = options.baseUrl + url;
+ }
+ const omittedParameters = Object.keys(options).filter((option) => urlVariableNames.includes(option)).concat("baseUrl");
+ const remainingParameters = omit(parameters, omittedParameters);
+ const isBinaryRequest = /application\/octet-stream/i.test(headers.accept);
+ if (!isBinaryRequest) {
+ if (options.mediaType.format) {
+ headers.accept = headers.accept.split(/,/).map(
+ (format) => format.replace(
+ /application\/vnd(\.\w+)(\.v3)?(\.\w+)?(\+json)?$/,
+ `application/vnd$1$2.${options.mediaType.format}`
+ )
+ ).join(",");
+ }
+ if (url.endsWith("/graphql")) {
+ if (options.mediaType.previews?.length) {
+ const previewsFromAcceptHeader = headers.accept.match(/[\w-]+(?=-preview)/g) || [];
+ headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map((preview) => {
+ const format = options.mediaType.format ? `.${options.mediaType.format}` : "+json";
+ return `application/vnd.github.${preview}-preview${format}`;
+ }).join(",");
+ }
+ }
+ }
+ if (["GET", "HEAD"].includes(method)) {
+ url = addQueryParameters(url, remainingParameters);
+ } else {
+ if ("data" in remainingParameters) {
+ body = remainingParameters.data;
+ } else {
+ if (Object.keys(remainingParameters).length) {
+ body = remainingParameters;
+ }
+ }
+ }
+ if (!headers["content-type"] && typeof body !== "undefined") {
+ headers["content-type"] = "application/json; charset=utf-8";
+ }
+ if (["PATCH", "PUT"].includes(method) && typeof body === "undefined") {
+ body = "";
+ }
+ return Object.assign(
+ { method, url, headers },
+ typeof body !== "undefined" ? { body } : null,
+ options.request ? { request: options.request } : null
+ );
+}
+
+// pkg/dist-src/endpoint-with-defaults.js
+function endpointWithDefaults(defaults, route, options) {
+ return parse(merge(defaults, route, options));
+}
+
+// pkg/dist-src/with-defaults.js
+function withDefaults(oldDefaults, newDefaults) {
+ const DEFAULTS2 = merge(oldDefaults, newDefaults);
+ const endpoint2 = endpointWithDefaults.bind(null, DEFAULTS2);
+ return Object.assign(endpoint2, {
+ DEFAULTS: DEFAULTS2,
+ defaults: withDefaults.bind(null, DEFAULTS2),
+ merge: merge.bind(null, DEFAULTS2),
+ parse
+ });
+}
+
+// pkg/dist-src/index.js
+var endpoint = withDefaults(null, DEFAULTS);
+// Annotate the CommonJS export names for ESM import in node:
+0 && (0);
+
+
+/***/ }),
+
+/***/ 8467:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+var __defProp = Object.defineProperty;
+var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
+var __getOwnPropNames = Object.getOwnPropertyNames;
+var __hasOwnProp = Object.prototype.hasOwnProperty;
+var __export = (target, all) => {
+ for (var name in all)
+ __defProp(target, name, { get: all[name], enumerable: true });
+};
+var __copyProps = (to, from, except, desc) => {
+ if (from && typeof from === "object" || typeof from === "function") {
+ for (let key of __getOwnPropNames(from))
+ if (!__hasOwnProp.call(to, key) && key !== except)
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
+ }
+ return to;
+};
+var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
+
+// pkg/dist-src/index.js
+var dist_src_exports = {};
+__export(dist_src_exports, {
+ GraphqlResponseError: () => GraphqlResponseError,
+ graphql: () => graphql2,
+ withCustomRequest: () => withCustomRequest
+});
+module.exports = __toCommonJS(dist_src_exports);
+var import_request3 = __nccwpck_require__(6234);
+var import_universal_user_agent = __nccwpck_require__(5030);
+
+// pkg/dist-src/version.js
+var VERSION = "7.1.0";
+
+// pkg/dist-src/with-defaults.js
+var import_request2 = __nccwpck_require__(6234);
+
+// pkg/dist-src/graphql.js
+var import_request = __nccwpck_require__(6234);
+
+// pkg/dist-src/error.js
+function _buildMessageForResponseErrors(data) {
+ return `Request failed due to following response errors:
+` + data.errors.map((e) => ` - ${e.message}`).join("\n");
+}
+var GraphqlResponseError = class extends Error {
+ constructor(request2, headers, response) {
+ super(_buildMessageForResponseErrors(response));
+ this.request = request2;
+ this.headers = headers;
+ this.response = response;
+ this.name = "GraphqlResponseError";
+ this.errors = response.errors;
+ this.data = response.data;
+ if (Error.captureStackTrace) {
+ Error.captureStackTrace(this, this.constructor);
+ }
+ }
+};
+
+// pkg/dist-src/graphql.js
+var NON_VARIABLE_OPTIONS = [
+ "method",
+ "baseUrl",
+ "url",
+ "headers",
+ "request",
+ "query",
+ "mediaType"
+];
+var FORBIDDEN_VARIABLE_OPTIONS = ["query", "method", "url"];
+var GHES_V3_SUFFIX_REGEX = /\/api\/v3\/?$/;
+function graphql(request2, query, options) {
+ if (options) {
+ if (typeof query === "string" && "query" in options) {
+ return Promise.reject(
+ new Error(`[@octokit/graphql] "query" cannot be used as variable name`)
+ );
+ }
+ for (const key in options) {
+ if (!FORBIDDEN_VARIABLE_OPTIONS.includes(key))
+ continue;
+ return Promise.reject(
+ new Error(
+ `[@octokit/graphql] "${key}" cannot be used as variable name`
+ )
+ );
+ }
+ }
+ const parsedOptions = typeof query === "string" ? Object.assign({ query }, options) : query;
+ const requestOptions = Object.keys(
+ parsedOptions
+ ).reduce((result, key) => {
+ if (NON_VARIABLE_OPTIONS.includes(key)) {
+ result[key] = parsedOptions[key];
+ return result;
+ }
+ if (!result.variables) {
+ result.variables = {};
+ }
+ result.variables[key] = parsedOptions[key];
+ return result;
+ }, {});
+ const baseUrl = parsedOptions.baseUrl || request2.endpoint.DEFAULTS.baseUrl;
+ if (GHES_V3_SUFFIX_REGEX.test(baseUrl)) {
+ requestOptions.url = baseUrl.replace(GHES_V3_SUFFIX_REGEX, "/api/graphql");
+ }
+ return request2(requestOptions).then((response) => {
+ if (response.data.errors) {
+ const headers = {};
+ for (const key of Object.keys(response.headers)) {
+ headers[key] = response.headers[key];
+ }
+ throw new GraphqlResponseError(
+ requestOptions,
+ headers,
+ response.data
+ );
+ }
+ return response.data.data;
+ });
+}
+
+// pkg/dist-src/with-defaults.js
+function withDefaults(request2, newDefaults) {
+ const newRequest = request2.defaults(newDefaults);
+ const newApi = (query, options) => {
+ return graphql(newRequest, query, options);
+ };
+ return Object.assign(newApi, {
+ defaults: withDefaults.bind(null, newRequest),
+ endpoint: newRequest.endpoint
+ });
+}
+
+// pkg/dist-src/index.js
+var graphql2 = withDefaults(import_request3.request, {
+ headers: {
+ "user-agent": `octokit-graphql.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}`
+ },
+ method: "POST",
+ url: "/graphql"
+});
+function withCustomRequest(customRequest) {
+ return withDefaults(customRequest, {
+ method: "POST",
+ url: "/graphql"
+ });
+}
+// Annotate the CommonJS export names for ESM import in node:
+0 && (0);
+
+
+/***/ }),
+
+/***/ 4193:
+/***/ ((module) => {
+
+"use strict";
+
+var __defProp = Object.defineProperty;
+var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
+var __getOwnPropNames = Object.getOwnPropertyNames;
+var __hasOwnProp = Object.prototype.hasOwnProperty;
+var __export = (target, all) => {
+ for (var name in all)
+ __defProp(target, name, { get: all[name], enumerable: true });
+};
+var __copyProps = (to, from, except, desc) => {
+ if (from && typeof from === "object" || typeof from === "function") {
+ for (let key of __getOwnPropNames(from))
+ if (!__hasOwnProp.call(to, key) && key !== except)
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
+ }
+ return to;
+};
+var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
+
+// pkg/dist-src/index.js
+var dist_src_exports = {};
+__export(dist_src_exports, {
+ composePaginateRest: () => composePaginateRest,
+ isPaginatingEndpoint: () => isPaginatingEndpoint,
+ paginateRest: () => paginateRest,
+ paginatingEndpoints: () => paginatingEndpoints
+});
+module.exports = __toCommonJS(dist_src_exports);
+
+// pkg/dist-src/version.js
+var VERSION = "9.2.1";
+
+// pkg/dist-src/normalize-paginated-list-response.js
+function normalizePaginatedListResponse(response) {
+ if (!response.data) {
+ return {
+ ...response,
+ data: []
+ };
+ }
+ const responseNeedsNormalization = "total_count" in response.data && !("url" in response.data);
+ if (!responseNeedsNormalization)
+ return response;
+ const incompleteResults = response.data.incomplete_results;
+ const repositorySelection = response.data.repository_selection;
+ const totalCount = response.data.total_count;
+ delete response.data.incomplete_results;
+ delete response.data.repository_selection;
+ delete response.data.total_count;
+ const namespaceKey = Object.keys(response.data)[0];
+ const data = response.data[namespaceKey];
+ response.data = data;
+ if (typeof incompleteResults !== "undefined") {
+ response.data.incomplete_results = incompleteResults;
+ }
+ if (typeof repositorySelection !== "undefined") {
+ response.data.repository_selection = repositorySelection;
+ }
+ response.data.total_count = totalCount;
+ return response;
+}
+
+// pkg/dist-src/iterator.js
+function iterator(octokit, route, parameters) {
+ const options = typeof route === "function" ? route.endpoint(parameters) : octokit.request.endpoint(route, parameters);
+ const requestMethod = typeof route === "function" ? route : octokit.request;
+ const method = options.method;
+ const headers = options.headers;
+ let url = options.url;
+ return {
+ [Symbol.asyncIterator]: () => ({
+ async next() {
+ if (!url)
+ return { done: true };
+ try {
+ const response = await requestMethod({ method, url, headers });
+ const normalizedResponse = normalizePaginatedListResponse(response);
+ url = ((normalizedResponse.headers.link || "").match(
+ /<([^>]+)>;\s*rel="next"/
+ ) || [])[1];
+ return { value: normalizedResponse };
+ } catch (error) {
+ if (error.status !== 409)
+ throw error;
+ url = "";
+ return {
+ value: {
+ status: 200,
+ headers: {},
+ data: []
+ }
+ };
+ }
+ }
+ })
+ };
+}
+
+// pkg/dist-src/paginate.js
+function paginate(octokit, route, parameters, mapFn) {
+ if (typeof parameters === "function") {
+ mapFn = parameters;
+ parameters = void 0;
+ }
+ return gather(
+ octokit,
+ [],
+ iterator(octokit, route, parameters)[Symbol.asyncIterator](),
+ mapFn
+ );
+}
+function gather(octokit, results, iterator2, mapFn) {
+ return iterator2.next().then((result) => {
+ if (result.done) {
+ return results;
+ }
+ let earlyExit = false;
+ function done() {
+ earlyExit = true;
+ }
+ results = results.concat(
+ mapFn ? mapFn(result.value, done) : result.value.data
+ );
+ if (earlyExit) {
+ return results;
+ }
+ return gather(octokit, results, iterator2, mapFn);
+ });
+}
+
+// pkg/dist-src/compose-paginate.js
+var composePaginateRest = Object.assign(paginate, {
+ iterator
+});
+
+// pkg/dist-src/generated/paginating-endpoints.js
+var paginatingEndpoints = [
+ "GET /advisories",
+ "GET /app/hook/deliveries",
+ "GET /app/installation-requests",
+ "GET /app/installations",
+ "GET /assignments/{assignment_id}/accepted_assignments",
+ "GET /classrooms",
+ "GET /classrooms/{classroom_id}/assignments",
+ "GET /enterprises/{enterprise}/dependabot/alerts",
+ "GET /enterprises/{enterprise}/secret-scanning/alerts",
+ "GET /events",
+ "GET /gists",
+ "GET /gists/public",
+ "GET /gists/starred",
+ "GET /gists/{gist_id}/comments",
+ "GET /gists/{gist_id}/commits",
+ "GET /gists/{gist_id}/forks",
+ "GET /installation/repositories",
+ "GET /issues",
+ "GET /licenses",
+ "GET /marketplace_listing/plans",
+ "GET /marketplace_listing/plans/{plan_id}/accounts",
+ "GET /marketplace_listing/stubbed/plans",
+ "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts",
+ "GET /networks/{owner}/{repo}/events",
+ "GET /notifications",
+ "GET /organizations",
+ "GET /orgs/{org}/actions/cache/usage-by-repository",
+ "GET /orgs/{org}/actions/permissions/repositories",
+ "GET /orgs/{org}/actions/runners",
+ "GET /orgs/{org}/actions/secrets",
+ "GET /orgs/{org}/actions/secrets/{secret_name}/repositories",
+ "GET /orgs/{org}/actions/variables",
+ "GET /orgs/{org}/actions/variables/{name}/repositories",
+ "GET /orgs/{org}/blocks",
+ "GET /orgs/{org}/code-scanning/alerts",
+ "GET /orgs/{org}/codespaces",
+ "GET /orgs/{org}/codespaces/secrets",
+ "GET /orgs/{org}/codespaces/secrets/{secret_name}/repositories",
+ "GET /orgs/{org}/copilot/billing/seats",
+ "GET /orgs/{org}/dependabot/alerts",
+ "GET /orgs/{org}/dependabot/secrets",
+ "GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories",
+ "GET /orgs/{org}/events",
+ "GET /orgs/{org}/failed_invitations",
+ "GET /orgs/{org}/hooks",
+ "GET /orgs/{org}/hooks/{hook_id}/deliveries",
+ "GET /orgs/{org}/installations",
+ "GET /orgs/{org}/invitations",
+ "GET /orgs/{org}/invitations/{invitation_id}/teams",
+ "GET /orgs/{org}/issues",
+ "GET /orgs/{org}/members",
+ "GET /orgs/{org}/members/{username}/codespaces",
+ "GET /orgs/{org}/migrations",
+ "GET /orgs/{org}/migrations/{migration_id}/repositories",
+ "GET /orgs/{org}/organization-roles/{role_id}/teams",
+ "GET /orgs/{org}/organization-roles/{role_id}/users",
+ "GET /orgs/{org}/outside_collaborators",
+ "GET /orgs/{org}/packages",
+ "GET /orgs/{org}/packages/{package_type}/{package_name}/versions",
+ "GET /orgs/{org}/personal-access-token-requests",
+ "GET /orgs/{org}/personal-access-token-requests/{pat_request_id}/repositories",
+ "GET /orgs/{org}/personal-access-tokens",
+ "GET /orgs/{org}/personal-access-tokens/{pat_id}/repositories",
+ "GET /orgs/{org}/projects",
+ "GET /orgs/{org}/properties/values",
+ "GET /orgs/{org}/public_members",
+ "GET /orgs/{org}/repos",
+ "GET /orgs/{org}/rulesets",
+ "GET /orgs/{org}/rulesets/rule-suites",
+ "GET /orgs/{org}/secret-scanning/alerts",
+ "GET /orgs/{org}/security-advisories",
+ "GET /orgs/{org}/teams",
+ "GET /orgs/{org}/teams/{team_slug}/discussions",
+ "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments",
+ "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions",
+ "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions",
+ "GET /orgs/{org}/teams/{team_slug}/invitations",
+ "GET /orgs/{org}/teams/{team_slug}/members",
+ "GET /orgs/{org}/teams/{team_slug}/projects",
+ "GET /orgs/{org}/teams/{team_slug}/repos",
+ "GET /orgs/{org}/teams/{team_slug}/teams",
+ "GET /projects/columns/{column_id}/cards",
+ "GET /projects/{project_id}/collaborators",
+ "GET /projects/{project_id}/columns",
+ "GET /repos/{owner}/{repo}/actions/artifacts",
+ "GET /repos/{owner}/{repo}/actions/caches",
+ "GET /repos/{owner}/{repo}/actions/organization-secrets",
+ "GET /repos/{owner}/{repo}/actions/organization-variables",
+ "GET /repos/{owner}/{repo}/actions/runners",
+ "GET /repos/{owner}/{repo}/actions/runs",
+ "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts",
+ "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs",
+ "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs",
+ "GET /repos/{owner}/{repo}/actions/secrets",
+ "GET /repos/{owner}/{repo}/actions/variables",
+ "GET /repos/{owner}/{repo}/actions/workflows",
+ "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs",
+ "GET /repos/{owner}/{repo}/activity",
+ "GET /repos/{owner}/{repo}/assignees",
+ "GET /repos/{owner}/{repo}/branches",
+ "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations",
+ "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs",
+ "GET /repos/{owner}/{repo}/code-scanning/alerts",
+ "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances",
+ "GET /repos/{owner}/{repo}/code-scanning/analyses",
+ "GET /repos/{owner}/{repo}/codespaces",
+ "GET /repos/{owner}/{repo}/codespaces/devcontainers",
+ "GET /repos/{owner}/{repo}/codespaces/secrets",
+ "GET /repos/{owner}/{repo}/collaborators",
+ "GET /repos/{owner}/{repo}/comments",
+ "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions",
+ "GET /repos/{owner}/{repo}/commits",
+ "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments",
+ "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls",
+ "GET /repos/{owner}/{repo}/commits/{ref}/check-runs",
+ "GET /repos/{owner}/{repo}/commits/{ref}/check-suites",
+ "GET /repos/{owner}/{repo}/commits/{ref}/status",
+ "GET /repos/{owner}/{repo}/commits/{ref}/statuses",
+ "GET /repos/{owner}/{repo}/contributors",
+ "GET /repos/{owner}/{repo}/dependabot/alerts",
+ "GET /repos/{owner}/{repo}/dependabot/secrets",
+ "GET /repos/{owner}/{repo}/deployments",
+ "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses",
+ "GET /repos/{owner}/{repo}/environments",
+ "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies",
+ "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/apps",
+ "GET /repos/{owner}/{repo}/events",
+ "GET /repos/{owner}/{repo}/forks",
+ "GET /repos/{owner}/{repo}/hooks",
+ "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries",
+ "GET /repos/{owner}/{repo}/invitations",
+ "GET /repos/{owner}/{repo}/issues",
+ "GET /repos/{owner}/{repo}/issues/comments",
+ "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions",
+ "GET /repos/{owner}/{repo}/issues/events",
+ "GET /repos/{owner}/{repo}/issues/{issue_number}/comments",
+ "GET /repos/{owner}/{repo}/issues/{issue_number}/events",
+ "GET /repos/{owner}/{repo}/issues/{issue_number}/labels",
+ "GET /repos/{owner}/{repo}/issues/{issue_number}/reactions",
+ "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline",
+ "GET /repos/{owner}/{repo}/keys",
+ "GET /repos/{owner}/{repo}/labels",
+ "GET /repos/{owner}/{repo}/milestones",
+ "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels",
+ "GET /repos/{owner}/{repo}/notifications",
+ "GET /repos/{owner}/{repo}/pages/builds",
+ "GET /repos/{owner}/{repo}/projects",
+ "GET /repos/{owner}/{repo}/pulls",
+ "GET /repos/{owner}/{repo}/pulls/comments",
+ "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions",
+ "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments",
+ "GET /repos/{owner}/{repo}/pulls/{pull_number}/commits",
+ "GET /repos/{owner}/{repo}/pulls/{pull_number}/files",
+ "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews",
+ "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments",
+ "GET /repos/{owner}/{repo}/releases",
+ "GET /repos/{owner}/{repo}/releases/{release_id}/assets",
+ "GET /repos/{owner}/{repo}/releases/{release_id}/reactions",
+ "GET /repos/{owner}/{repo}/rules/branches/{branch}",
+ "GET /repos/{owner}/{repo}/rulesets",
+ "GET /repos/{owner}/{repo}/rulesets/rule-suites",
+ "GET /repos/{owner}/{repo}/secret-scanning/alerts",
+ "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations",
+ "GET /repos/{owner}/{repo}/security-advisories",
+ "GET /repos/{owner}/{repo}/stargazers",
+ "GET /repos/{owner}/{repo}/subscribers",
+ "GET /repos/{owner}/{repo}/tags",
+ "GET /repos/{owner}/{repo}/teams",
+ "GET /repos/{owner}/{repo}/topics",
+ "GET /repositories",
+ "GET /repositories/{repository_id}/environments/{environment_name}/secrets",
+ "GET /repositories/{repository_id}/environments/{environment_name}/variables",
+ "GET /search/code",
+ "GET /search/commits",
+ "GET /search/issues",
+ "GET /search/labels",
+ "GET /search/repositories",
+ "GET /search/topics",
+ "GET /search/users",
+ "GET /teams/{team_id}/discussions",
+ "GET /teams/{team_id}/discussions/{discussion_number}/comments",
+ "GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions",
+ "GET /teams/{team_id}/discussions/{discussion_number}/reactions",
+ "GET /teams/{team_id}/invitations",
+ "GET /teams/{team_id}/members",
+ "GET /teams/{team_id}/projects",
+ "GET /teams/{team_id}/repos",
+ "GET /teams/{team_id}/teams",
+ "GET /user/blocks",
+ "GET /user/codespaces",
+ "GET /user/codespaces/secrets",
+ "GET /user/emails",
+ "GET /user/followers",
+ "GET /user/following",
+ "GET /user/gpg_keys",
+ "GET /user/installations",
+ "GET /user/installations/{installation_id}/repositories",
+ "GET /user/issues",
+ "GET /user/keys",
+ "GET /user/marketplace_purchases",
+ "GET /user/marketplace_purchases/stubbed",
+ "GET /user/memberships/orgs",
+ "GET /user/migrations",
+ "GET /user/migrations/{migration_id}/repositories",
+ "GET /user/orgs",
+ "GET /user/packages",
+ "GET /user/packages/{package_type}/{package_name}/versions",
+ "GET /user/public_emails",
+ "GET /user/repos",
+ "GET /user/repository_invitations",
+ "GET /user/social_accounts",
+ "GET /user/ssh_signing_keys",
+ "GET /user/starred",
+ "GET /user/subscriptions",
+ "GET /user/teams",
+ "GET /users",
+ "GET /users/{username}/events",
+ "GET /users/{username}/events/orgs/{org}",
+ "GET /users/{username}/events/public",
+ "GET /users/{username}/followers",
+ "GET /users/{username}/following",
+ "GET /users/{username}/gists",
+ "GET /users/{username}/gpg_keys",
+ "GET /users/{username}/keys",
+ "GET /users/{username}/orgs",
+ "GET /users/{username}/packages",
+ "GET /users/{username}/projects",
+ "GET /users/{username}/received_events",
+ "GET /users/{username}/received_events/public",
+ "GET /users/{username}/repos",
+ "GET /users/{username}/social_accounts",
+ "GET /users/{username}/ssh_signing_keys",
+ "GET /users/{username}/starred",
+ "GET /users/{username}/subscriptions"
+];
+
+// pkg/dist-src/paginating-endpoints.js
+function isPaginatingEndpoint(arg) {
+ if (typeof arg === "string") {
+ return paginatingEndpoints.includes(arg);
+ } else {
+ return false;
+ }
+}
+
+// pkg/dist-src/index.js
+function paginateRest(octokit) {
+ return {
+ paginate: Object.assign(paginate.bind(null, octokit), {
+ iterator: iterator.bind(null, octokit)
+ })
+ };
+}
+paginateRest.VERSION = VERSION;
+// Annotate the CommonJS export names for ESM import in node:
+0 && (0);
+
+
+/***/ }),
+
+/***/ 3044:
+/***/ ((module) => {
+
+"use strict";
+
+var __defProp = Object.defineProperty;
+var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
+var __getOwnPropNames = Object.getOwnPropertyNames;
+var __hasOwnProp = Object.prototype.hasOwnProperty;
+var __export = (target, all) => {
+ for (var name in all)
+ __defProp(target, name, { get: all[name], enumerable: true });
+};
+var __copyProps = (to, from, except, desc) => {
+ if (from && typeof from === "object" || typeof from === "function") {
+ for (let key of __getOwnPropNames(from))
+ if (!__hasOwnProp.call(to, key) && key !== except)
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
+ }
+ return to;
+};
+var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
+
+// pkg/dist-src/index.js
+var dist_src_exports = {};
+__export(dist_src_exports, {
+ legacyRestEndpointMethods: () => legacyRestEndpointMethods,
+ restEndpointMethods: () => restEndpointMethods
+});
+module.exports = __toCommonJS(dist_src_exports);
+
+// pkg/dist-src/version.js
+var VERSION = "10.4.1";
+
+// pkg/dist-src/generated/endpoints.js
+var Endpoints = {
+ actions: {
+ addCustomLabelsToSelfHostedRunnerForOrg: [
+ "POST /orgs/{org}/actions/runners/{runner_id}/labels"
+ ],
+ addCustomLabelsToSelfHostedRunnerForRepo: [
+ "POST /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"
+ ],
+ addSelectedRepoToOrgSecret: [
+ "PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"
+ ],
+ addSelectedRepoToOrgVariable: [
+ "PUT /orgs/{org}/actions/variables/{name}/repositories/{repository_id}"
+ ],
+ approveWorkflowRun: [
+ "POST /repos/{owner}/{repo}/actions/runs/{run_id}/approve"
+ ],
+ cancelWorkflowRun: [
+ "POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel"
+ ],
+ createEnvironmentVariable: [
+ "POST /repositories/{repository_id}/environments/{environment_name}/variables"
+ ],
+ createOrUpdateEnvironmentSecret: [
+ "PUT /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"
+ ],
+ createOrUpdateOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}"],
+ createOrUpdateRepoSecret: [
+ "PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}"
+ ],
+ createOrgVariable: ["POST /orgs/{org}/actions/variables"],
+ createRegistrationTokenForOrg: [
+ "POST /orgs/{org}/actions/runners/registration-token"
+ ],
+ createRegistrationTokenForRepo: [
+ "POST /repos/{owner}/{repo}/actions/runners/registration-token"
+ ],
+ createRemoveTokenForOrg: ["POST /orgs/{org}/actions/runners/remove-token"],
+ createRemoveTokenForRepo: [
+ "POST /repos/{owner}/{repo}/actions/runners/remove-token"
+ ],
+ createRepoVariable: ["POST /repos/{owner}/{repo}/actions/variables"],
+ createWorkflowDispatch: [
+ "POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches"
+ ],
+ deleteActionsCacheById: [
+ "DELETE /repos/{owner}/{repo}/actions/caches/{cache_id}"
+ ],
+ deleteActionsCacheByKey: [
+ "DELETE /repos/{owner}/{repo}/actions/caches{?key,ref}"
+ ],
+ deleteArtifact: [
+ "DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"
+ ],
+ deleteEnvironmentSecret: [
+ "DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"
+ ],
+ deleteEnvironmentVariable: [
+ "DELETE /repositories/{repository_id}/environments/{environment_name}/variables/{name}"
+ ],
+ deleteOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}"],
+ deleteOrgVariable: ["DELETE /orgs/{org}/actions/variables/{name}"],
+ deleteRepoSecret: [
+ "DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}"
+ ],
+ deleteRepoVariable: [
+ "DELETE /repos/{owner}/{repo}/actions/variables/{name}"
+ ],
+ deleteSelfHostedRunnerFromOrg: [
+ "DELETE /orgs/{org}/actions/runners/{runner_id}"
+ ],
+ deleteSelfHostedRunnerFromRepo: [
+ "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}"
+ ],
+ deleteWorkflowRun: ["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}"],
+ deleteWorkflowRunLogs: [
+ "DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs"
+ ],
+ disableSelectedRepositoryGithubActionsOrganization: [
+ "DELETE /orgs/{org}/actions/permissions/repositories/{repository_id}"
+ ],
+ disableWorkflow: [
+ "PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable"
+ ],
+ downloadArtifact: [
+ "GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}"
+ ],
+ downloadJobLogsForWorkflowRun: [
+ "GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs"
+ ],
+ downloadWorkflowRunAttemptLogs: [
+ "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/logs"
+ ],
+ downloadWorkflowRunLogs: [
+ "GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs"
+ ],
+ enableSelectedRepositoryGithubActionsOrganization: [
+ "PUT /orgs/{org}/actions/permissions/repositories/{repository_id}"
+ ],
+ enableWorkflow: [
+ "PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable"
+ ],
+ forceCancelWorkflowRun: [
+ "POST /repos/{owner}/{repo}/actions/runs/{run_id}/force-cancel"
+ ],
+ generateRunnerJitconfigForOrg: [
+ "POST /orgs/{org}/actions/runners/generate-jitconfig"
+ ],
+ generateRunnerJitconfigForRepo: [
+ "POST /repos/{owner}/{repo}/actions/runners/generate-jitconfig"
+ ],
+ getActionsCacheList: ["GET /repos/{owner}/{repo}/actions/caches"],
+ getActionsCacheUsage: ["GET /repos/{owner}/{repo}/actions/cache/usage"],
+ getActionsCacheUsageByRepoForOrg: [
+ "GET /orgs/{org}/actions/cache/usage-by-repository"
+ ],
+ getActionsCacheUsageForOrg: ["GET /orgs/{org}/actions/cache/usage"],
+ getAllowedActionsOrganization: [
+ "GET /orgs/{org}/actions/permissions/selected-actions"
+ ],
+ getAllowedActionsRepository: [
+ "GET /repos/{owner}/{repo}/actions/permissions/selected-actions"
+ ],
+ getArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"],
+ getCustomOidcSubClaimForRepo: [
+ "GET /repos/{owner}/{repo}/actions/oidc/customization/sub"
+ ],
+ getEnvironmentPublicKey: [
+ "GET /repositories/{repository_id}/environments/{environment_name}/secrets/public-key"
+ ],
+ getEnvironmentSecret: [
+ "GET /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"
+ ],
+ getEnvironmentVariable: [
+ "GET /repositories/{repository_id}/environments/{environment_name}/variables/{name}"
+ ],
+ getGithubActionsDefaultWorkflowPermissionsOrganization: [
+ "GET /orgs/{org}/actions/permissions/workflow"
+ ],
+ getGithubActionsDefaultWorkflowPermissionsRepository: [
+ "GET /repos/{owner}/{repo}/actions/permissions/workflow"
+ ],
+ getGithubActionsPermissionsOrganization: [
+ "GET /orgs/{org}/actions/permissions"
+ ],
+ getGithubActionsPermissionsRepository: [
+ "GET /repos/{owner}/{repo}/actions/permissions"
+ ],
+ getJobForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}"],
+ getOrgPublicKey: ["GET /orgs/{org}/actions/secrets/public-key"],
+ getOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}"],
+ getOrgVariable: ["GET /orgs/{org}/actions/variables/{name}"],
+ getPendingDeploymentsForRun: [
+ "GET /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments"
+ ],
+ getRepoPermissions: [
+ "GET /repos/{owner}/{repo}/actions/permissions",
+ {},
+ { renamed: ["actions", "getGithubActionsPermissionsRepository"] }
+ ],
+ getRepoPublicKey: ["GET /repos/{owner}/{repo}/actions/secrets/public-key"],
+ getRepoSecret: ["GET /repos/{owner}/{repo}/actions/secrets/{secret_name}"],
+ getRepoVariable: ["GET /repos/{owner}/{repo}/actions/variables/{name}"],
+ getReviewsForRun: [
+ "GET /repos/{owner}/{repo}/actions/runs/{run_id}/approvals"
+ ],
+ getSelfHostedRunnerForOrg: ["GET /orgs/{org}/actions/runners/{runner_id}"],
+ getSelfHostedRunnerForRepo: [
+ "GET /repos/{owner}/{repo}/actions/runners/{runner_id}"
+ ],
+ getWorkflow: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}"],
+ getWorkflowAccessToRepository: [
+ "GET /repos/{owner}/{repo}/actions/permissions/access"
+ ],
+ getWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}"],
+ getWorkflowRunAttempt: [
+ "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}"
+ ],
+ getWorkflowRunUsage: [
+ "GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing"
+ ],
+ getWorkflowUsage: [
+ "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing"
+ ],
+ listArtifactsForRepo: ["GET /repos/{owner}/{repo}/actions/artifacts"],
+ listEnvironmentSecrets: [
+ "GET /repositories/{repository_id}/environments/{environment_name}/secrets"
+ ],
+ listEnvironmentVariables: [
+ "GET /repositories/{repository_id}/environments/{environment_name}/variables"
+ ],
+ listJobsForWorkflowRun: [
+ "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs"
+ ],
+ listJobsForWorkflowRunAttempt: [
+ "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs"
+ ],
+ listLabelsForSelfHostedRunnerForOrg: [
+ "GET /orgs/{org}/actions/runners/{runner_id}/labels"
+ ],
+ listLabelsForSelfHostedRunnerForRepo: [
+ "GET /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"
+ ],
+ listOrgSecrets: ["GET /orgs/{org}/actions/secrets"],
+ listOrgVariables: ["GET /orgs/{org}/actions/variables"],
+ listRepoOrganizationSecrets: [
+ "GET /repos/{owner}/{repo}/actions/organization-secrets"
+ ],
+ listRepoOrganizationVariables: [
+ "GET /repos/{owner}/{repo}/actions/organization-variables"
+ ],
+ listRepoSecrets: ["GET /repos/{owner}/{repo}/actions/secrets"],
+ listRepoVariables: ["GET /repos/{owner}/{repo}/actions/variables"],
+ listRepoWorkflows: ["GET /repos/{owner}/{repo}/actions/workflows"],
+ listRunnerApplicationsForOrg: ["GET /orgs/{org}/actions/runners/downloads"],
+ listRunnerApplicationsForRepo: [
+ "GET /repos/{owner}/{repo}/actions/runners/downloads"
+ ],
+ listSelectedReposForOrgSecret: [
+ "GET /orgs/{org}/actions/secrets/{secret_name}/repositories"
+ ],
+ listSelectedReposForOrgVariable: [
+ "GET /orgs/{org}/actions/variables/{name}/repositories"
+ ],
+ listSelectedRepositoriesEnabledGithubActionsOrganization: [
+ "GET /orgs/{org}/actions/permissions/repositories"
+ ],
+ listSelfHostedRunnersForOrg: ["GET /orgs/{org}/actions/runners"],
+ listSelfHostedRunnersForRepo: ["GET /repos/{owner}/{repo}/actions/runners"],
+ listWorkflowRunArtifacts: [
+ "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts"
+ ],
+ listWorkflowRuns: [
+ "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs"
+ ],
+ listWorkflowRunsForRepo: ["GET /repos/{owner}/{repo}/actions/runs"],
+ reRunJobForWorkflowRun: [
+ "POST /repos/{owner}/{repo}/actions/jobs/{job_id}/rerun"
+ ],
+ reRunWorkflow: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun"],
+ reRunWorkflowFailedJobs: [
+ "POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun-failed-jobs"
+ ],
+ removeAllCustomLabelsFromSelfHostedRunnerForOrg: [
+ "DELETE /orgs/{org}/actions/runners/{runner_id}/labels"
+ ],
+ removeAllCustomLabelsFromSelfHostedRunnerForRepo: [
+ "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"
+ ],
+ removeCustomLabelFromSelfHostedRunnerForOrg: [
+ "DELETE /orgs/{org}/actions/runners/{runner_id}/labels/{name}"
+ ],
+ removeCustomLabelFromSelfHostedRunnerForRepo: [
+ "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels/{name}"
+ ],
+ removeSelectedRepoFromOrgSecret: [
+ "DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"
+ ],
+ removeSelectedRepoFromOrgVariable: [
+ "DELETE /orgs/{org}/actions/variables/{name}/repositories/{repository_id}"
+ ],
+ reviewCustomGatesForRun: [
+ "POST /repos/{owner}/{repo}/actions/runs/{run_id}/deployment_protection_rule"
+ ],
+ reviewPendingDeploymentsForRun: [
+ "POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments"
+ ],
+ setAllowedActionsOrganization: [
+ "PUT /orgs/{org}/actions/permissions/selected-actions"
+ ],
+ setAllowedActionsRepository: [
+ "PUT /repos/{owner}/{repo}/actions/permissions/selected-actions"
+ ],
+ setCustomLabelsForSelfHostedRunnerForOrg: [
+ "PUT /orgs/{org}/actions/runners/{runner_id}/labels"
+ ],
+ setCustomLabelsForSelfHostedRunnerForRepo: [
+ "PUT /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"
+ ],
+ setCustomOidcSubClaimForRepo: [
+ "PUT /repos/{owner}/{repo}/actions/oidc/customization/sub"
+ ],
+ setGithubActionsDefaultWorkflowPermissionsOrganization: [
+ "PUT /orgs/{org}/actions/permissions/workflow"
+ ],
+ setGithubActionsDefaultWorkflowPermissionsRepository: [
+ "PUT /repos/{owner}/{repo}/actions/permissions/workflow"
+ ],
+ setGithubActionsPermissionsOrganization: [
+ "PUT /orgs/{org}/actions/permissions"
+ ],
+ setGithubActionsPermissionsRepository: [
+ "PUT /repos/{owner}/{repo}/actions/permissions"
+ ],
+ setSelectedReposForOrgSecret: [
+ "PUT /orgs/{org}/actions/secrets/{secret_name}/repositories"
+ ],
+ setSelectedReposForOrgVariable: [
+ "PUT /orgs/{org}/actions/variables/{name}/repositories"
+ ],
+ setSelectedRepositoriesEnabledGithubActionsOrganization: [
+ "PUT /orgs/{org}/actions/permissions/repositories"
+ ],
+ setWorkflowAccessToRepository: [
+ "PUT /repos/{owner}/{repo}/actions/permissions/access"
+ ],
+ updateEnvironmentVariable: [
+ "PATCH /repositories/{repository_id}/environments/{environment_name}/variables/{name}"
+ ],
+ updateOrgVariable: ["PATCH /orgs/{org}/actions/variables/{name}"],
+ updateRepoVariable: [
+ "PATCH /repos/{owner}/{repo}/actions/variables/{name}"
+ ]
+ },
+ activity: {
+ checkRepoIsStarredByAuthenticatedUser: ["GET /user/starred/{owner}/{repo}"],
+ deleteRepoSubscription: ["DELETE /repos/{owner}/{repo}/subscription"],
+ deleteThreadSubscription: [
+ "DELETE /notifications/threads/{thread_id}/subscription"
+ ],
+ getFeeds: ["GET /feeds"],
+ getRepoSubscription: ["GET /repos/{owner}/{repo}/subscription"],
+ getThread: ["GET /notifications/threads/{thread_id}"],
+ getThreadSubscriptionForAuthenticatedUser: [
+ "GET /notifications/threads/{thread_id}/subscription"
+ ],
+ listEventsForAuthenticatedUser: ["GET /users/{username}/events"],
+ listNotificationsForAuthenticatedUser: ["GET /notifications"],
+ listOrgEventsForAuthenticatedUser: [
+ "GET /users/{username}/events/orgs/{org}"
+ ],
+ listPublicEvents: ["GET /events"],
+ listPublicEventsForRepoNetwork: ["GET /networks/{owner}/{repo}/events"],
+ listPublicEventsForUser: ["GET /users/{username}/events/public"],
+ listPublicOrgEvents: ["GET /orgs/{org}/events"],
+ listReceivedEventsForUser: ["GET /users/{username}/received_events"],
+ listReceivedPublicEventsForUser: [
+ "GET /users/{username}/received_events/public"
+ ],
+ listRepoEvents: ["GET /repos/{owner}/{repo}/events"],
+ listRepoNotificationsForAuthenticatedUser: [
+ "GET /repos/{owner}/{repo}/notifications"
+ ],
+ listReposStarredByAuthenticatedUser: ["GET /user/starred"],
+ listReposStarredByUser: ["GET /users/{username}/starred"],
+ listReposWatchedByUser: ["GET /users/{username}/subscriptions"],
+ listStargazersForRepo: ["GET /repos/{owner}/{repo}/stargazers"],
+ listWatchedReposForAuthenticatedUser: ["GET /user/subscriptions"],
+ listWatchersForRepo: ["GET /repos/{owner}/{repo}/subscribers"],
+ markNotificationsAsRead: ["PUT /notifications"],
+ markRepoNotificationsAsRead: ["PUT /repos/{owner}/{repo}/notifications"],
+ markThreadAsDone: ["DELETE /notifications/threads/{thread_id}"],
+ markThreadAsRead: ["PATCH /notifications/threads/{thread_id}"],
+ setRepoSubscription: ["PUT /repos/{owner}/{repo}/subscription"],
+ setThreadSubscription: [
+ "PUT /notifications/threads/{thread_id}/subscription"
+ ],
+ starRepoForAuthenticatedUser: ["PUT /user/starred/{owner}/{repo}"],
+ unstarRepoForAuthenticatedUser: ["DELETE /user/starred/{owner}/{repo}"]
+ },
+ apps: {
+ addRepoToInstallation: [
+ "PUT /user/installations/{installation_id}/repositories/{repository_id}",
+ {},
+ { renamed: ["apps", "addRepoToInstallationForAuthenticatedUser"] }
+ ],
+ addRepoToInstallationForAuthenticatedUser: [
+ "PUT /user/installations/{installation_id}/repositories/{repository_id}"
+ ],
+ checkToken: ["POST /applications/{client_id}/token"],
+ createFromManifest: ["POST /app-manifests/{code}/conversions"],
+ createInstallationAccessToken: [
+ "POST /app/installations/{installation_id}/access_tokens"
+ ],
+ deleteAuthorization: ["DELETE /applications/{client_id}/grant"],
+ deleteInstallation: ["DELETE /app/installations/{installation_id}"],
+ deleteToken: ["DELETE /applications/{client_id}/token"],
+ getAuthenticated: ["GET /app"],
+ getBySlug: ["GET /apps/{app_slug}"],
+ getInstallation: ["GET /app/installations/{installation_id}"],
+ getOrgInstallation: ["GET /orgs/{org}/installation"],
+ getRepoInstallation: ["GET /repos/{owner}/{repo}/installation"],
+ getSubscriptionPlanForAccount: [
+ "GET /marketplace_listing/accounts/{account_id}"
+ ],
+ getSubscriptionPlanForAccountStubbed: [
+ "GET /marketplace_listing/stubbed/accounts/{account_id}"
+ ],
+ getUserInstallation: ["GET /users/{username}/installation"],
+ getWebhookConfigForApp: ["GET /app/hook/config"],
+ getWebhookDelivery: ["GET /app/hook/deliveries/{delivery_id}"],
+ listAccountsForPlan: ["GET /marketplace_listing/plans/{plan_id}/accounts"],
+ listAccountsForPlanStubbed: [
+ "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts"
+ ],
+ listInstallationReposForAuthenticatedUser: [
+ "GET /user/installations/{installation_id}/repositories"
+ ],
+ listInstallationRequestsForAuthenticatedApp: [
+ "GET /app/installation-requests"
+ ],
+ listInstallations: ["GET /app/installations"],
+ listInstallationsForAuthenticatedUser: ["GET /user/installations"],
+ listPlans: ["GET /marketplace_listing/plans"],
+ listPlansStubbed: ["GET /marketplace_listing/stubbed/plans"],
+ listReposAccessibleToInstallation: ["GET /installation/repositories"],
+ listSubscriptionsForAuthenticatedUser: ["GET /user/marketplace_purchases"],
+ listSubscriptionsForAuthenticatedUserStubbed: [
+ "GET /user/marketplace_purchases/stubbed"
+ ],
+ listWebhookDeliveries: ["GET /app/hook/deliveries"],
+ redeliverWebhookDelivery: [
+ "POST /app/hook/deliveries/{delivery_id}/attempts"
+ ],
+ removeRepoFromInstallation: [
+ "DELETE /user/installations/{installation_id}/repositories/{repository_id}",
+ {},
+ { renamed: ["apps", "removeRepoFromInstallationForAuthenticatedUser"] }
+ ],
+ removeRepoFromInstallationForAuthenticatedUser: [
+ "DELETE /user/installations/{installation_id}/repositories/{repository_id}"
+ ],
+ resetToken: ["PATCH /applications/{client_id}/token"],
+ revokeInstallationAccessToken: ["DELETE /installation/token"],
+ scopeToken: ["POST /applications/{client_id}/token/scoped"],
+ suspendInstallation: ["PUT /app/installations/{installation_id}/suspended"],
+ unsuspendInstallation: [
+ "DELETE /app/installations/{installation_id}/suspended"
+ ],
+ updateWebhookConfigForApp: ["PATCH /app/hook/config"]
+ },
+ billing: {
+ getGithubActionsBillingOrg: ["GET /orgs/{org}/settings/billing/actions"],
+ getGithubActionsBillingUser: [
+ "GET /users/{username}/settings/billing/actions"
+ ],
+ getGithubPackagesBillingOrg: ["GET /orgs/{org}/settings/billing/packages"],
+ getGithubPackagesBillingUser: [
+ "GET /users/{username}/settings/billing/packages"
+ ],
+ getSharedStorageBillingOrg: [
+ "GET /orgs/{org}/settings/billing/shared-storage"
+ ],
+ getSharedStorageBillingUser: [
+ "GET /users/{username}/settings/billing/shared-storage"
+ ]
+ },
+ checks: {
+ create: ["POST /repos/{owner}/{repo}/check-runs"],
+ createSuite: ["POST /repos/{owner}/{repo}/check-suites"],
+ get: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}"],
+ getSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}"],
+ listAnnotations: [
+ "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations"
+ ],
+ listForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-runs"],
+ listForSuite: [
+ "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs"
+ ],
+ listSuitesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-suites"],
+ rerequestRun: [
+ "POST /repos/{owner}/{repo}/check-runs/{check_run_id}/rerequest"
+ ],
+ rerequestSuite: [
+ "POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest"
+ ],
+ setSuitesPreferences: [
+ "PATCH /repos/{owner}/{repo}/check-suites/preferences"
+ ],
+ update: ["PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}"]
+ },
+ codeScanning: {
+ deleteAnalysis: [
+ "DELETE /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}{?confirm_delete}"
+ ],
+ getAlert: [
+ "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}",
+ {},
+ { renamedParameters: { alert_id: "alert_number" } }
+ ],
+ getAnalysis: [
+ "GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}"
+ ],
+ getCodeqlDatabase: [
+ "GET /repos/{owner}/{repo}/code-scanning/codeql/databases/{language}"
+ ],
+ getDefaultSetup: ["GET /repos/{owner}/{repo}/code-scanning/default-setup"],
+ getSarif: ["GET /repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}"],
+ listAlertInstances: [
+ "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances"
+ ],
+ listAlertsForOrg: ["GET /orgs/{org}/code-scanning/alerts"],
+ listAlertsForRepo: ["GET /repos/{owner}/{repo}/code-scanning/alerts"],
+ listAlertsInstances: [
+ "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances",
+ {},
+ { renamed: ["codeScanning", "listAlertInstances"] }
+ ],
+ listCodeqlDatabases: [
+ "GET /repos/{owner}/{repo}/code-scanning/codeql/databases"
+ ],
+ listRecentAnalyses: ["GET /repos/{owner}/{repo}/code-scanning/analyses"],
+ updateAlert: [
+ "PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}"
+ ],
+ updateDefaultSetup: [
+ "PATCH /repos/{owner}/{repo}/code-scanning/default-setup"
+ ],
+ uploadSarif: ["POST /repos/{owner}/{repo}/code-scanning/sarifs"]
+ },
+ codesOfConduct: {
+ getAllCodesOfConduct: ["GET /codes_of_conduct"],
+ getConductCode: ["GET /codes_of_conduct/{key}"]
+ },
+ codespaces: {
+ addRepositoryForSecretForAuthenticatedUser: [
+ "PUT /user/codespaces/secrets/{secret_name}/repositories/{repository_id}"
+ ],
+ addSelectedRepoToOrgSecret: [
+ "PUT /orgs/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}"
+ ],
+ checkPermissionsForDevcontainer: [
+ "GET /repos/{owner}/{repo}/codespaces/permissions_check"
+ ],
+ codespaceMachinesForAuthenticatedUser: [
+ "GET /user/codespaces/{codespace_name}/machines"
+ ],
+ createForAuthenticatedUser: ["POST /user/codespaces"],
+ createOrUpdateOrgSecret: [
+ "PUT /orgs/{org}/codespaces/secrets/{secret_name}"
+ ],
+ createOrUpdateRepoSecret: [
+ "PUT /repos/{owner}/{repo}/codespaces/secrets/{secret_name}"
+ ],
+ createOrUpdateSecretForAuthenticatedUser: [
+ "PUT /user/codespaces/secrets/{secret_name}"
+ ],
+ createWithPrForAuthenticatedUser: [
+ "POST /repos/{owner}/{repo}/pulls/{pull_number}/codespaces"
+ ],
+ createWithRepoForAuthenticatedUser: [
+ "POST /repos/{owner}/{repo}/codespaces"
+ ],
+ deleteForAuthenticatedUser: ["DELETE /user/codespaces/{codespace_name}"],
+ deleteFromOrganization: [
+ "DELETE /orgs/{org}/members/{username}/codespaces/{codespace_name}"
+ ],
+ deleteOrgSecret: ["DELETE /orgs/{org}/codespaces/secrets/{secret_name}"],
+ deleteRepoSecret: [
+ "DELETE /repos/{owner}/{repo}/codespaces/secrets/{secret_name}"
+ ],
+ deleteSecretForAuthenticatedUser: [
+ "DELETE /user/codespaces/secrets/{secret_name}"
+ ],
+ exportForAuthenticatedUser: [
+ "POST /user/codespaces/{codespace_name}/exports"
+ ],
+ getCodespacesForUserInOrg: [
+ "GET /orgs/{org}/members/{username}/codespaces"
+ ],
+ getExportDetailsForAuthenticatedUser: [
+ "GET /user/codespaces/{codespace_name}/exports/{export_id}"
+ ],
+ getForAuthenticatedUser: ["GET /user/codespaces/{codespace_name}"],
+ getOrgPublicKey: ["GET /orgs/{org}/codespaces/secrets/public-key"],
+ getOrgSecret: ["GET /orgs/{org}/codespaces/secrets/{secret_name}"],
+ getPublicKeyForAuthenticatedUser: [
+ "GET /user/codespaces/secrets/public-key"
+ ],
+ getRepoPublicKey: [
+ "GET /repos/{owner}/{repo}/codespaces/secrets/public-key"
+ ],
+ getRepoSecret: [
+ "GET /repos/{owner}/{repo}/codespaces/secrets/{secret_name}"
+ ],
+ getSecretForAuthenticatedUser: [
+ "GET /user/codespaces/secrets/{secret_name}"
+ ],
+ listDevcontainersInRepositoryForAuthenticatedUser: [
+ "GET /repos/{owner}/{repo}/codespaces/devcontainers"
+ ],
+ listForAuthenticatedUser: ["GET /user/codespaces"],
+ listInOrganization: [
+ "GET /orgs/{org}/codespaces",
+ {},
+ { renamedParameters: { org_id: "org" } }
+ ],
+ listInRepositoryForAuthenticatedUser: [
+ "GET /repos/{owner}/{repo}/codespaces"
+ ],
+ listOrgSecrets: ["GET /orgs/{org}/codespaces/secrets"],
+ listRepoSecrets: ["GET /repos/{owner}/{repo}/codespaces/secrets"],
+ listRepositoriesForSecretForAuthenticatedUser: [
+ "GET /user/codespaces/secrets/{secret_name}/repositories"
+ ],
+ listSecretsForAuthenticatedUser: ["GET /user/codespaces/secrets"],
+ listSelectedReposForOrgSecret: [
+ "GET /orgs/{org}/codespaces/secrets/{secret_name}/repositories"
+ ],
+ preFlightWithRepoForAuthenticatedUser: [
+ "GET /repos/{owner}/{repo}/codespaces/new"
+ ],
+ publishForAuthenticatedUser: [
+ "POST /user/codespaces/{codespace_name}/publish"
+ ],
+ removeRepositoryForSecretForAuthenticatedUser: [
+ "DELETE /user/codespaces/secrets/{secret_name}/repositories/{repository_id}"
+ ],
+ removeSelectedRepoFromOrgSecret: [
+ "DELETE /orgs/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}"
+ ],
+ repoMachinesForAuthenticatedUser: [
+ "GET /repos/{owner}/{repo}/codespaces/machines"
+ ],
+ setRepositoriesForSecretForAuthenticatedUser: [
+ "PUT /user/codespaces/secrets/{secret_name}/repositories"
+ ],
+ setSelectedReposForOrgSecret: [
+ "PUT /orgs/{org}/codespaces/secrets/{secret_name}/repositories"
+ ],
+ startForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/start"],
+ stopForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/stop"],
+ stopInOrganization: [
+ "POST /orgs/{org}/members/{username}/codespaces/{codespace_name}/stop"
+ ],
+ updateForAuthenticatedUser: ["PATCH /user/codespaces/{codespace_name}"]
+ },
+ copilot: {
+ addCopilotSeatsForTeams: [
+ "POST /orgs/{org}/copilot/billing/selected_teams"
+ ],
+ addCopilotSeatsForUsers: [
+ "POST /orgs/{org}/copilot/billing/selected_users"
+ ],
+ cancelCopilotSeatAssignmentForTeams: [
+ "DELETE /orgs/{org}/copilot/billing/selected_teams"
+ ],
+ cancelCopilotSeatAssignmentForUsers: [
+ "DELETE /orgs/{org}/copilot/billing/selected_users"
+ ],
+ getCopilotOrganizationDetails: ["GET /orgs/{org}/copilot/billing"],
+ getCopilotSeatDetailsForUser: [
+ "GET /orgs/{org}/members/{username}/copilot"
+ ],
+ listCopilotSeats: ["GET /orgs/{org}/copilot/billing/seats"]
+ },
+ dependabot: {
+ addSelectedRepoToOrgSecret: [
+ "PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}"
+ ],
+ createOrUpdateOrgSecret: [
+ "PUT /orgs/{org}/dependabot/secrets/{secret_name}"
+ ],
+ createOrUpdateRepoSecret: [
+ "PUT /repos/{owner}/{repo}/dependabot/secrets/{secret_name}"
+ ],
+ deleteOrgSecret: ["DELETE /orgs/{org}/dependabot/secrets/{secret_name}"],
+ deleteRepoSecret: [
+ "DELETE /repos/{owner}/{repo}/dependabot/secrets/{secret_name}"
+ ],
+ getAlert: ["GET /repos/{owner}/{repo}/dependabot/alerts/{alert_number}"],
+ getOrgPublicKey: ["GET /orgs/{org}/dependabot/secrets/public-key"],
+ getOrgSecret: ["GET /orgs/{org}/dependabot/secrets/{secret_name}"],
+ getRepoPublicKey: [
+ "GET /repos/{owner}/{repo}/dependabot/secrets/public-key"
+ ],
+ getRepoSecret: [
+ "GET /repos/{owner}/{repo}/dependabot/secrets/{secret_name}"
+ ],
+ listAlertsForEnterprise: [
+ "GET /enterprises/{enterprise}/dependabot/alerts"
+ ],
+ listAlertsForOrg: ["GET /orgs/{org}/dependabot/alerts"],
+ listAlertsForRepo: ["GET /repos/{owner}/{repo}/dependabot/alerts"],
+ listOrgSecrets: ["GET /orgs/{org}/dependabot/secrets"],
+ listRepoSecrets: ["GET /repos/{owner}/{repo}/dependabot/secrets"],
+ listSelectedReposForOrgSecret: [
+ "GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories"
+ ],
+ removeSelectedRepoFromOrgSecret: [
+ "DELETE /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}"
+ ],
+ setSelectedReposForOrgSecret: [
+ "PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories"
+ ],
+ updateAlert: [
+ "PATCH /repos/{owner}/{repo}/dependabot/alerts/{alert_number}"
+ ]
+ },
+ dependencyGraph: {
+ createRepositorySnapshot: [
+ "POST /repos/{owner}/{repo}/dependency-graph/snapshots"
+ ],
+ diffRange: [
+ "GET /repos/{owner}/{repo}/dependency-graph/compare/{basehead}"
+ ],
+ exportSbom: ["GET /repos/{owner}/{repo}/dependency-graph/sbom"]
+ },
+ emojis: { get: ["GET /emojis"] },
+ gists: {
+ checkIsStarred: ["GET /gists/{gist_id}/star"],
+ create: ["POST /gists"],
+ createComment: ["POST /gists/{gist_id}/comments"],
+ delete: ["DELETE /gists/{gist_id}"],
+ deleteComment: ["DELETE /gists/{gist_id}/comments/{comment_id}"],
+ fork: ["POST /gists/{gist_id}/forks"],
+ get: ["GET /gists/{gist_id}"],
+ getComment: ["GET /gists/{gist_id}/comments/{comment_id}"],
+ getRevision: ["GET /gists/{gist_id}/{sha}"],
+ list: ["GET /gists"],
+ listComments: ["GET /gists/{gist_id}/comments"],
+ listCommits: ["GET /gists/{gist_id}/commits"],
+ listForUser: ["GET /users/{username}/gists"],
+ listForks: ["GET /gists/{gist_id}/forks"],
+ listPublic: ["GET /gists/public"],
+ listStarred: ["GET /gists/starred"],
+ star: ["PUT /gists/{gist_id}/star"],
+ unstar: ["DELETE /gists/{gist_id}/star"],
+ update: ["PATCH /gists/{gist_id}"],
+ updateComment: ["PATCH /gists/{gist_id}/comments/{comment_id}"]
+ },
+ git: {
+ createBlob: ["POST /repos/{owner}/{repo}/git/blobs"],
+ createCommit: ["POST /repos/{owner}/{repo}/git/commits"],
+ createRef: ["POST /repos/{owner}/{repo}/git/refs"],
+ createTag: ["POST /repos/{owner}/{repo}/git/tags"],
+ createTree: ["POST /repos/{owner}/{repo}/git/trees"],
+ deleteRef: ["DELETE /repos/{owner}/{repo}/git/refs/{ref}"],
+ getBlob: ["GET /repos/{owner}/{repo}/git/blobs/{file_sha}"],
+ getCommit: ["GET /repos/{owner}/{repo}/git/commits/{commit_sha}"],
+ getRef: ["GET /repos/{owner}/{repo}/git/ref/{ref}"],
+ getTag: ["GET /repos/{owner}/{repo}/git/tags/{tag_sha}"],
+ getTree: ["GET /repos/{owner}/{repo}/git/trees/{tree_sha}"],
+ listMatchingRefs: ["GET /repos/{owner}/{repo}/git/matching-refs/{ref}"],
+ updateRef: ["PATCH /repos/{owner}/{repo}/git/refs/{ref}"]
+ },
+ gitignore: {
+ getAllTemplates: ["GET /gitignore/templates"],
+ getTemplate: ["GET /gitignore/templates/{name}"]
+ },
+ interactions: {
+ getRestrictionsForAuthenticatedUser: ["GET /user/interaction-limits"],
+ getRestrictionsForOrg: ["GET /orgs/{org}/interaction-limits"],
+ getRestrictionsForRepo: ["GET /repos/{owner}/{repo}/interaction-limits"],
+ getRestrictionsForYourPublicRepos: [
+ "GET /user/interaction-limits",
+ {},
+ { renamed: ["interactions", "getRestrictionsForAuthenticatedUser"] }
+ ],
+ removeRestrictionsForAuthenticatedUser: ["DELETE /user/interaction-limits"],
+ removeRestrictionsForOrg: ["DELETE /orgs/{org}/interaction-limits"],
+ removeRestrictionsForRepo: [
+ "DELETE /repos/{owner}/{repo}/interaction-limits"
+ ],
+ removeRestrictionsForYourPublicRepos: [
+ "DELETE /user/interaction-limits",
+ {},
+ { renamed: ["interactions", "removeRestrictionsForAuthenticatedUser"] }
+ ],
+ setRestrictionsForAuthenticatedUser: ["PUT /user/interaction-limits"],
+ setRestrictionsForOrg: ["PUT /orgs/{org}/interaction-limits"],
+ setRestrictionsForRepo: ["PUT /repos/{owner}/{repo}/interaction-limits"],
+ setRestrictionsForYourPublicRepos: [
+ "PUT /user/interaction-limits",
+ {},
+ { renamed: ["interactions", "setRestrictionsForAuthenticatedUser"] }
+ ]
+ },
+ issues: {
+ addAssignees: [
+ "POST /repos/{owner}/{repo}/issues/{issue_number}/assignees"
+ ],
+ addLabels: ["POST /repos/{owner}/{repo}/issues/{issue_number}/labels"],
+ checkUserCanBeAssigned: ["GET /repos/{owner}/{repo}/assignees/{assignee}"],
+ checkUserCanBeAssignedToIssue: [
+ "GET /repos/{owner}/{repo}/issues/{issue_number}/assignees/{assignee}"
+ ],
+ create: ["POST /repos/{owner}/{repo}/issues"],
+ createComment: [
+ "POST /repos/{owner}/{repo}/issues/{issue_number}/comments"
+ ],
+ createLabel: ["POST /repos/{owner}/{repo}/labels"],
+ createMilestone: ["POST /repos/{owner}/{repo}/milestones"],
+ deleteComment: [
+ "DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}"
+ ],
+ deleteLabel: ["DELETE /repos/{owner}/{repo}/labels/{name}"],
+ deleteMilestone: [
+ "DELETE /repos/{owner}/{repo}/milestones/{milestone_number}"
+ ],
+ get: ["GET /repos/{owner}/{repo}/issues/{issue_number}"],
+ getComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}"],
+ getEvent: ["GET /repos/{owner}/{repo}/issues/events/{event_id}"],
+ getLabel: ["GET /repos/{owner}/{repo}/labels/{name}"],
+ getMilestone: ["GET /repos/{owner}/{repo}/milestones/{milestone_number}"],
+ list: ["GET /issues"],
+ listAssignees: ["GET /repos/{owner}/{repo}/assignees"],
+ listComments: ["GET /repos/{owner}/{repo}/issues/{issue_number}/comments"],
+ listCommentsForRepo: ["GET /repos/{owner}/{repo}/issues/comments"],
+ listEvents: ["GET /repos/{owner}/{repo}/issues/{issue_number}/events"],
+ listEventsForRepo: ["GET /repos/{owner}/{repo}/issues/events"],
+ listEventsForTimeline: [
+ "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline"
+ ],
+ listForAuthenticatedUser: ["GET /user/issues"],
+ listForOrg: ["GET /orgs/{org}/issues"],
+ listForRepo: ["GET /repos/{owner}/{repo}/issues"],
+ listLabelsForMilestone: [
+ "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels"
+ ],
+ listLabelsForRepo: ["GET /repos/{owner}/{repo}/labels"],
+ listLabelsOnIssue: [
+ "GET /repos/{owner}/{repo}/issues/{issue_number}/labels"
+ ],
+ listMilestones: ["GET /repos/{owner}/{repo}/milestones"],
+ lock: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/lock"],
+ removeAllLabels: [
+ "DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels"
+ ],
+ removeAssignees: [
+ "DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees"
+ ],
+ removeLabel: [
+ "DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}"
+ ],
+ setLabels: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/labels"],
+ unlock: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock"],
+ update: ["PATCH /repos/{owner}/{repo}/issues/{issue_number}"],
+ updateComment: ["PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}"],
+ updateLabel: ["PATCH /repos/{owner}/{repo}/labels/{name}"],
+ updateMilestone: [
+ "PATCH /repos/{owner}/{repo}/milestones/{milestone_number}"
+ ]
+ },
+ licenses: {
+ get: ["GET /licenses/{license}"],
+ getAllCommonlyUsed: ["GET /licenses"],
+ getForRepo: ["GET /repos/{owner}/{repo}/license"]
+ },
+ markdown: {
+ render: ["POST /markdown"],
+ renderRaw: [
+ "POST /markdown/raw",
+ { headers: { "content-type": "text/plain; charset=utf-8" } }
+ ]
+ },
+ meta: {
+ get: ["GET /meta"],
+ getAllVersions: ["GET /versions"],
+ getOctocat: ["GET /octocat"],
+ getZen: ["GET /zen"],
+ root: ["GET /"]
+ },
+ migrations: {
+ cancelImport: [
+ "DELETE /repos/{owner}/{repo}/import",
+ {},
+ {
+ deprecated: "octokit.rest.migrations.cancelImport() is deprecated, see https://docs.github.com/rest/migrations/source-imports#cancel-an-import"
+ }
+ ],
+ deleteArchiveForAuthenticatedUser: [
+ "DELETE /user/migrations/{migration_id}/archive"
+ ],
+ deleteArchiveForOrg: [
+ "DELETE /orgs/{org}/migrations/{migration_id}/archive"
+ ],
+ downloadArchiveForOrg: [
+ "GET /orgs/{org}/migrations/{migration_id}/archive"
+ ],
+ getArchiveForAuthenticatedUser: [
+ "GET /user/migrations/{migration_id}/archive"
+ ],
+ getCommitAuthors: [
+ "GET /repos/{owner}/{repo}/import/authors",
+ {},
+ {
+ deprecated: "octokit.rest.migrations.getCommitAuthors() is deprecated, see https://docs.github.com/rest/migrations/source-imports#get-commit-authors"
+ }
+ ],
+ getImportStatus: [
+ "GET /repos/{owner}/{repo}/import",
+ {},
+ {
+ deprecated: "octokit.rest.migrations.getImportStatus() is deprecated, see https://docs.github.com/rest/migrations/source-imports#get-an-import-status"
+ }
+ ],
+ getLargeFiles: [
+ "GET /repos/{owner}/{repo}/import/large_files",
+ {},
+ {
+ deprecated: "octokit.rest.migrations.getLargeFiles() is deprecated, see https://docs.github.com/rest/migrations/source-imports#get-large-files"
+ }
+ ],
+ getStatusForAuthenticatedUser: ["GET /user/migrations/{migration_id}"],
+ getStatusForOrg: ["GET /orgs/{org}/migrations/{migration_id}"],
+ listForAuthenticatedUser: ["GET /user/migrations"],
+ listForOrg: ["GET /orgs/{org}/migrations"],
+ listReposForAuthenticatedUser: [
+ "GET /user/migrations/{migration_id}/repositories"
+ ],
+ listReposForOrg: ["GET /orgs/{org}/migrations/{migration_id}/repositories"],
+ listReposForUser: [
+ "GET /user/migrations/{migration_id}/repositories",
+ {},
+ { renamed: ["migrations", "listReposForAuthenticatedUser"] }
+ ],
+ mapCommitAuthor: [
+ "PATCH /repos/{owner}/{repo}/import/authors/{author_id}",
+ {},
+ {
+ deprecated: "octokit.rest.migrations.mapCommitAuthor() is deprecated, see https://docs.github.com/rest/migrations/source-imports#map-a-commit-author"
+ }
+ ],
+ setLfsPreference: [
+ "PATCH /repos/{owner}/{repo}/import/lfs",
+ {},
+ {
+ deprecated: "octokit.rest.migrations.setLfsPreference() is deprecated, see https://docs.github.com/rest/migrations/source-imports#update-git-lfs-preference"
+ }
+ ],
+ startForAuthenticatedUser: ["POST /user/migrations"],
+ startForOrg: ["POST /orgs/{org}/migrations"],
+ startImport: [
+ "PUT /repos/{owner}/{repo}/import",
+ {},
+ {
+ deprecated: "octokit.rest.migrations.startImport() is deprecated, see https://docs.github.com/rest/migrations/source-imports#start-an-import"
+ }
+ ],
+ unlockRepoForAuthenticatedUser: [
+ "DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock"
+ ],
+ unlockRepoForOrg: [
+ "DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock"
+ ],
+ updateImport: [
+ "PATCH /repos/{owner}/{repo}/import",
+ {},
+ {
+ deprecated: "octokit.rest.migrations.updateImport() is deprecated, see https://docs.github.com/rest/migrations/source-imports#update-an-import"
+ }
+ ]
+ },
+ oidc: {
+ getOidcCustomSubTemplateForOrg: [
+ "GET /orgs/{org}/actions/oidc/customization/sub"
+ ],
+ updateOidcCustomSubTemplateForOrg: [
+ "PUT /orgs/{org}/actions/oidc/customization/sub"
+ ]
+ },
+ orgs: {
+ addSecurityManagerTeam: [
+ "PUT /orgs/{org}/security-managers/teams/{team_slug}"
+ ],
+ assignTeamToOrgRole: [
+ "PUT /orgs/{org}/organization-roles/teams/{team_slug}/{role_id}"
+ ],
+ assignUserToOrgRole: [
+ "PUT /orgs/{org}/organization-roles/users/{username}/{role_id}"
+ ],
+ blockUser: ["PUT /orgs/{org}/blocks/{username}"],
+ cancelInvitation: ["DELETE /orgs/{org}/invitations/{invitation_id}"],
+ checkBlockedUser: ["GET /orgs/{org}/blocks/{username}"],
+ checkMembershipForUser: ["GET /orgs/{org}/members/{username}"],
+ checkPublicMembershipForUser: ["GET /orgs/{org}/public_members/{username}"],
+ convertMemberToOutsideCollaborator: [
+ "PUT /orgs/{org}/outside_collaborators/{username}"
+ ],
+ createCustomOrganizationRole: ["POST /orgs/{org}/organization-roles"],
+ createInvitation: ["POST /orgs/{org}/invitations"],
+ createOrUpdateCustomProperties: ["PATCH /orgs/{org}/properties/schema"],
+ createOrUpdateCustomPropertiesValuesForRepos: [
+ "PATCH /orgs/{org}/properties/values"
+ ],
+ createOrUpdateCustomProperty: [
+ "PUT /orgs/{org}/properties/schema/{custom_property_name}"
+ ],
+ createWebhook: ["POST /orgs/{org}/hooks"],
+ delete: ["DELETE /orgs/{org}"],
+ deleteCustomOrganizationRole: [
+ "DELETE /orgs/{org}/organization-roles/{role_id}"
+ ],
+ deleteWebhook: ["DELETE /orgs/{org}/hooks/{hook_id}"],
+ enableOrDisableSecurityProductOnAllOrgRepos: [
+ "POST /orgs/{org}/{security_product}/{enablement}"
+ ],
+ get: ["GET /orgs/{org}"],
+ getAllCustomProperties: ["GET /orgs/{org}/properties/schema"],
+ getCustomProperty: [
+ "GET /orgs/{org}/properties/schema/{custom_property_name}"
+ ],
+ getMembershipForAuthenticatedUser: ["GET /user/memberships/orgs/{org}"],
+ getMembershipForUser: ["GET /orgs/{org}/memberships/{username}"],
+ getOrgRole: ["GET /orgs/{org}/organization-roles/{role_id}"],
+ getWebhook: ["GET /orgs/{org}/hooks/{hook_id}"],
+ getWebhookConfigForOrg: ["GET /orgs/{org}/hooks/{hook_id}/config"],
+ getWebhookDelivery: [
+ "GET /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}"
+ ],
+ list: ["GET /organizations"],
+ listAppInstallations: ["GET /orgs/{org}/installations"],
+ listBlockedUsers: ["GET /orgs/{org}/blocks"],
+ listCustomPropertiesValuesForRepos: ["GET /orgs/{org}/properties/values"],
+ listFailedInvitations: ["GET /orgs/{org}/failed_invitations"],
+ listForAuthenticatedUser: ["GET /user/orgs"],
+ listForUser: ["GET /users/{username}/orgs"],
+ listInvitationTeams: ["GET /orgs/{org}/invitations/{invitation_id}/teams"],
+ listMembers: ["GET /orgs/{org}/members"],
+ listMembershipsForAuthenticatedUser: ["GET /user/memberships/orgs"],
+ listOrgRoleTeams: ["GET /orgs/{org}/organization-roles/{role_id}/teams"],
+ listOrgRoleUsers: ["GET /orgs/{org}/organization-roles/{role_id}/users"],
+ listOrgRoles: ["GET /orgs/{org}/organization-roles"],
+ listOrganizationFineGrainedPermissions: [
+ "GET /orgs/{org}/organization-fine-grained-permissions"
+ ],
+ listOutsideCollaborators: ["GET /orgs/{org}/outside_collaborators"],
+ listPatGrantRepositories: [
+ "GET /orgs/{org}/personal-access-tokens/{pat_id}/repositories"
+ ],
+ listPatGrantRequestRepositories: [
+ "GET /orgs/{org}/personal-access-token-requests/{pat_request_id}/repositories"
+ ],
+ listPatGrantRequests: ["GET /orgs/{org}/personal-access-token-requests"],
+ listPatGrants: ["GET /orgs/{org}/personal-access-tokens"],
+ listPendingInvitations: ["GET /orgs/{org}/invitations"],
+ listPublicMembers: ["GET /orgs/{org}/public_members"],
+ listSecurityManagerTeams: ["GET /orgs/{org}/security-managers"],
+ listWebhookDeliveries: ["GET /orgs/{org}/hooks/{hook_id}/deliveries"],
+ listWebhooks: ["GET /orgs/{org}/hooks"],
+ patchCustomOrganizationRole: [
+ "PATCH /orgs/{org}/organization-roles/{role_id}"
+ ],
+ pingWebhook: ["POST /orgs/{org}/hooks/{hook_id}/pings"],
+ redeliverWebhookDelivery: [
+ "POST /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}/attempts"
+ ],
+ removeCustomProperty: [
+ "DELETE /orgs/{org}/properties/schema/{custom_property_name}"
+ ],
+ removeMember: ["DELETE /orgs/{org}/members/{username}"],
+ removeMembershipForUser: ["DELETE /orgs/{org}/memberships/{username}"],
+ removeOutsideCollaborator: [
+ "DELETE /orgs/{org}/outside_collaborators/{username}"
+ ],
+ removePublicMembershipForAuthenticatedUser: [
+ "DELETE /orgs/{org}/public_members/{username}"
+ ],
+ removeSecurityManagerTeam: [
+ "DELETE /orgs/{org}/security-managers/teams/{team_slug}"
+ ],
+ reviewPatGrantRequest: [
+ "POST /orgs/{org}/personal-access-token-requests/{pat_request_id}"
+ ],
+ reviewPatGrantRequestsInBulk: [
+ "POST /orgs/{org}/personal-access-token-requests"
+ ],
+ revokeAllOrgRolesTeam: [
+ "DELETE /orgs/{org}/organization-roles/teams/{team_slug}"
+ ],
+ revokeAllOrgRolesUser: [
+ "DELETE /orgs/{org}/organization-roles/users/{username}"
+ ],
+ revokeOrgRoleTeam: [
+ "DELETE /orgs/{org}/organization-roles/teams/{team_slug}/{role_id}"
+ ],
+ revokeOrgRoleUser: [
+ "DELETE /orgs/{org}/organization-roles/users/{username}/{role_id}"
+ ],
+ setMembershipForUser: ["PUT /orgs/{org}/memberships/{username}"],
+ setPublicMembershipForAuthenticatedUser: [
+ "PUT /orgs/{org}/public_members/{username}"
+ ],
+ unblockUser: ["DELETE /orgs/{org}/blocks/{username}"],
+ update: ["PATCH /orgs/{org}"],
+ updateMembershipForAuthenticatedUser: [
+ "PATCH /user/memberships/orgs/{org}"
+ ],
+ updatePatAccess: ["POST /orgs/{org}/personal-access-tokens/{pat_id}"],
+ updatePatAccesses: ["POST /orgs/{org}/personal-access-tokens"],
+ updateWebhook: ["PATCH /orgs/{org}/hooks/{hook_id}"],
+ updateWebhookConfigForOrg: ["PATCH /orgs/{org}/hooks/{hook_id}/config"]
+ },
+ packages: {
+ deletePackageForAuthenticatedUser: [
+ "DELETE /user/packages/{package_type}/{package_name}"
+ ],
+ deletePackageForOrg: [
+ "DELETE /orgs/{org}/packages/{package_type}/{package_name}"
+ ],
+ deletePackageForUser: [
+ "DELETE /users/{username}/packages/{package_type}/{package_name}"
+ ],
+ deletePackageVersionForAuthenticatedUser: [
+ "DELETE /user/packages/{package_type}/{package_name}/versions/{package_version_id}"
+ ],
+ deletePackageVersionForOrg: [
+ "DELETE /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}"
+ ],
+ deletePackageVersionForUser: [
+ "DELETE /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}"
+ ],
+ getAllPackageVersionsForAPackageOwnedByAnOrg: [
+ "GET /orgs/{org}/packages/{package_type}/{package_name}/versions",
+ {},
+ { renamed: ["packages", "getAllPackageVersionsForPackageOwnedByOrg"] }
+ ],
+ getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser: [
+ "GET /user/packages/{package_type}/{package_name}/versions",
+ {},
+ {
+ renamed: [
+ "packages",
+ "getAllPackageVersionsForPackageOwnedByAuthenticatedUser"
+ ]
+ }
+ ],
+ getAllPackageVersionsForPackageOwnedByAuthenticatedUser: [
+ "GET /user/packages/{package_type}/{package_name}/versions"
+ ],
+ getAllPackageVersionsForPackageOwnedByOrg: [
+ "GET /orgs/{org}/packages/{package_type}/{package_name}/versions"
+ ],
+ getAllPackageVersionsForPackageOwnedByUser: [
+ "GET /users/{username}/packages/{package_type}/{package_name}/versions"
+ ],
+ getPackageForAuthenticatedUser: [
+ "GET /user/packages/{package_type}/{package_name}"
+ ],
+ getPackageForOrganization: [
+ "GET /orgs/{org}/packages/{package_type}/{package_name}"
+ ],
+ getPackageForUser: [
+ "GET /users/{username}/packages/{package_type}/{package_name}"
+ ],
+ getPackageVersionForAuthenticatedUser: [
+ "GET /user/packages/{package_type}/{package_name}/versions/{package_version_id}"
+ ],
+ getPackageVersionForOrganization: [
+ "GET /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}"
+ ],
+ getPackageVersionForUser: [
+ "GET /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}"
+ ],
+ listDockerMigrationConflictingPackagesForAuthenticatedUser: [
+ "GET /user/docker/conflicts"
+ ],
+ listDockerMigrationConflictingPackagesForOrganization: [
+ "GET /orgs/{org}/docker/conflicts"
+ ],
+ listDockerMigrationConflictingPackagesForUser: [
+ "GET /users/{username}/docker/conflicts"
+ ],
+ listPackagesForAuthenticatedUser: ["GET /user/packages"],
+ listPackagesForOrganization: ["GET /orgs/{org}/packages"],
+ listPackagesForUser: ["GET /users/{username}/packages"],
+ restorePackageForAuthenticatedUser: [
+ "POST /user/packages/{package_type}/{package_name}/restore{?token}"
+ ],
+ restorePackageForOrg: [
+ "POST /orgs/{org}/packages/{package_type}/{package_name}/restore{?token}"
+ ],
+ restorePackageForUser: [
+ "POST /users/{username}/packages/{package_type}/{package_name}/restore{?token}"
+ ],
+ restorePackageVersionForAuthenticatedUser: [
+ "POST /user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore"
+ ],
+ restorePackageVersionForOrg: [
+ "POST /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore"
+ ],
+ restorePackageVersionForUser: [
+ "POST /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore"
+ ]
+ },
+ projects: {
+ addCollaborator: ["PUT /projects/{project_id}/collaborators/{username}"],
+ createCard: ["POST /projects/columns/{column_id}/cards"],
+ createColumn: ["POST /projects/{project_id}/columns"],
+ createForAuthenticatedUser: ["POST /user/projects"],
+ createForOrg: ["POST /orgs/{org}/projects"],
+ createForRepo: ["POST /repos/{owner}/{repo}/projects"],
+ delete: ["DELETE /projects/{project_id}"],
+ deleteCard: ["DELETE /projects/columns/cards/{card_id}"],
+ deleteColumn: ["DELETE /projects/columns/{column_id}"],
+ get: ["GET /projects/{project_id}"],
+ getCard: ["GET /projects/columns/cards/{card_id}"],
+ getColumn: ["GET /projects/columns/{column_id}"],
+ getPermissionForUser: [
+ "GET /projects/{project_id}/collaborators/{username}/permission"
+ ],
+ listCards: ["GET /projects/columns/{column_id}/cards"],
+ listCollaborators: ["GET /projects/{project_id}/collaborators"],
+ listColumns: ["GET /projects/{project_id}/columns"],
+ listForOrg: ["GET /orgs/{org}/projects"],
+ listForRepo: ["GET /repos/{owner}/{repo}/projects"],
+ listForUser: ["GET /users/{username}/projects"],
+ moveCard: ["POST /projects/columns/cards/{card_id}/moves"],
+ moveColumn: ["POST /projects/columns/{column_id}/moves"],
+ removeCollaborator: [
+ "DELETE /projects/{project_id}/collaborators/{username}"
+ ],
+ update: ["PATCH /projects/{project_id}"],
+ updateCard: ["PATCH /projects/columns/cards/{card_id}"],
+ updateColumn: ["PATCH /projects/columns/{column_id}"]
+ },
+ pulls: {
+ checkIfMerged: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/merge"],
+ create: ["POST /repos/{owner}/{repo}/pulls"],
+ createReplyForReviewComment: [
+ "POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies"
+ ],
+ createReview: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews"],
+ createReviewComment: [
+ "POST /repos/{owner}/{repo}/pulls/{pull_number}/comments"
+ ],
+ deletePendingReview: [
+ "DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"
+ ],
+ deleteReviewComment: [
+ "DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}"
+ ],
+ dismissReview: [
+ "PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals"
+ ],
+ get: ["GET /repos/{owner}/{repo}/pulls/{pull_number}"],
+ getReview: [
+ "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"
+ ],
+ getReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}"],
+ list: ["GET /repos/{owner}/{repo}/pulls"],
+ listCommentsForReview: [
+ "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments"
+ ],
+ listCommits: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/commits"],
+ listFiles: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/files"],
+ listRequestedReviewers: [
+ "GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"
+ ],
+ listReviewComments: [
+ "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments"
+ ],
+ listReviewCommentsForRepo: ["GET /repos/{owner}/{repo}/pulls/comments"],
+ listReviews: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews"],
+ merge: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge"],
+ removeRequestedReviewers: [
+ "DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"
+ ],
+ requestReviewers: [
+ "POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"
+ ],
+ submitReview: [
+ "POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events"
+ ],
+ update: ["PATCH /repos/{owner}/{repo}/pulls/{pull_number}"],
+ updateBranch: [
+ "PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch"
+ ],
+ updateReview: [
+ "PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"
+ ],
+ updateReviewComment: [
+ "PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}"
+ ]
+ },
+ rateLimit: { get: ["GET /rate_limit"] },
+ reactions: {
+ createForCommitComment: [
+ "POST /repos/{owner}/{repo}/comments/{comment_id}/reactions"
+ ],
+ createForIssue: [
+ "POST /repos/{owner}/{repo}/issues/{issue_number}/reactions"
+ ],
+ createForIssueComment: [
+ "POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions"
+ ],
+ createForPullRequestReviewComment: [
+ "POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions"
+ ],
+ createForRelease: [
+ "POST /repos/{owner}/{repo}/releases/{release_id}/reactions"
+ ],
+ createForTeamDiscussionCommentInOrg: [
+ "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions"
+ ],
+ createForTeamDiscussionInOrg: [
+ "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions"
+ ],
+ deleteForCommitComment: [
+ "DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}"
+ ],
+ deleteForIssue: [
+ "DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}"
+ ],
+ deleteForIssueComment: [
+ "DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}"
+ ],
+ deleteForPullRequestComment: [
+ "DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}"
+ ],
+ deleteForRelease: [
+ "DELETE /repos/{owner}/{repo}/releases/{release_id}/reactions/{reaction_id}"
+ ],
+ deleteForTeamDiscussion: [
+ "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}"
+ ],
+ deleteForTeamDiscussionComment: [
+ "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}"
+ ],
+ listForCommitComment: [
+ "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions"
+ ],
+ listForIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/reactions"],
+ listForIssueComment: [
+ "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions"
+ ],
+ listForPullRequestReviewComment: [
+ "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions"
+ ],
+ listForRelease: [
+ "GET /repos/{owner}/{repo}/releases/{release_id}/reactions"
+ ],
+ listForTeamDiscussionCommentInOrg: [
+ "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions"
+ ],
+ listForTeamDiscussionInOrg: [
+ "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions"
+ ]
+ },
+ repos: {
+ acceptInvitation: [
+ "PATCH /user/repository_invitations/{invitation_id}",
+ {},
+ { renamed: ["repos", "acceptInvitationForAuthenticatedUser"] }
+ ],
+ acceptInvitationForAuthenticatedUser: [
+ "PATCH /user/repository_invitations/{invitation_id}"
+ ],
+ addAppAccessRestrictions: [
+ "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps",
+ {},
+ { mapToData: "apps" }
+ ],
+ addCollaborator: ["PUT /repos/{owner}/{repo}/collaborators/{username}"],
+ addStatusCheckContexts: [
+ "POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts",
+ {},
+ { mapToData: "contexts" }
+ ],
+ addTeamAccessRestrictions: [
+ "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams",
+ {},
+ { mapToData: "teams" }
+ ],
+ addUserAccessRestrictions: [
+ "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users",
+ {},
+ { mapToData: "users" }
+ ],
+ cancelPagesDeployment: [
+ "POST /repos/{owner}/{repo}/pages/deployments/{pages_deployment_id}/cancel"
+ ],
+ checkAutomatedSecurityFixes: [
+ "GET /repos/{owner}/{repo}/automated-security-fixes"
+ ],
+ checkCollaborator: ["GET /repos/{owner}/{repo}/collaborators/{username}"],
+ checkVulnerabilityAlerts: [
+ "GET /repos/{owner}/{repo}/vulnerability-alerts"
+ ],
+ codeownersErrors: ["GET /repos/{owner}/{repo}/codeowners/errors"],
+ compareCommits: ["GET /repos/{owner}/{repo}/compare/{base}...{head}"],
+ compareCommitsWithBasehead: [
+ "GET /repos/{owner}/{repo}/compare/{basehead}"
+ ],
+ createAutolink: ["POST /repos/{owner}/{repo}/autolinks"],
+ createCommitComment: [
+ "POST /repos/{owner}/{repo}/commits/{commit_sha}/comments"
+ ],
+ createCommitSignatureProtection: [
+ "POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures"
+ ],
+ createCommitStatus: ["POST /repos/{owner}/{repo}/statuses/{sha}"],
+ createDeployKey: ["POST /repos/{owner}/{repo}/keys"],
+ createDeployment: ["POST /repos/{owner}/{repo}/deployments"],
+ createDeploymentBranchPolicy: [
+ "POST /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies"
+ ],
+ createDeploymentProtectionRule: [
+ "POST /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules"
+ ],
+ createDeploymentStatus: [
+ "POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses"
+ ],
+ createDispatchEvent: ["POST /repos/{owner}/{repo}/dispatches"],
+ createForAuthenticatedUser: ["POST /user/repos"],
+ createFork: ["POST /repos/{owner}/{repo}/forks"],
+ createInOrg: ["POST /orgs/{org}/repos"],
+ createOrUpdateCustomPropertiesValues: [
+ "PATCH /repos/{owner}/{repo}/properties/values"
+ ],
+ createOrUpdateEnvironment: [
+ "PUT /repos/{owner}/{repo}/environments/{environment_name}"
+ ],
+ createOrUpdateFileContents: ["PUT /repos/{owner}/{repo}/contents/{path}"],
+ createOrgRuleset: ["POST /orgs/{org}/rulesets"],
+ createPagesDeployment: ["POST /repos/{owner}/{repo}/pages/deployments"],
+ createPagesSite: ["POST /repos/{owner}/{repo}/pages"],
+ createRelease: ["POST /repos/{owner}/{repo}/releases"],
+ createRepoRuleset: ["POST /repos/{owner}/{repo}/rulesets"],
+ createTagProtection: ["POST /repos/{owner}/{repo}/tags/protection"],
+ createUsingTemplate: [
+ "POST /repos/{template_owner}/{template_repo}/generate"
+ ],
+ createWebhook: ["POST /repos/{owner}/{repo}/hooks"],
+ declineInvitation: [
+ "DELETE /user/repository_invitations/{invitation_id}",
+ {},
+ { renamed: ["repos", "declineInvitationForAuthenticatedUser"] }
+ ],
+ declineInvitationForAuthenticatedUser: [
+ "DELETE /user/repository_invitations/{invitation_id}"
+ ],
+ delete: ["DELETE /repos/{owner}/{repo}"],
+ deleteAccessRestrictions: [
+ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions"
+ ],
+ deleteAdminBranchProtection: [
+ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"
+ ],
+ deleteAnEnvironment: [
+ "DELETE /repos/{owner}/{repo}/environments/{environment_name}"
+ ],
+ deleteAutolink: ["DELETE /repos/{owner}/{repo}/autolinks/{autolink_id}"],
+ deleteBranchProtection: [
+ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection"
+ ],
+ deleteCommitComment: ["DELETE /repos/{owner}/{repo}/comments/{comment_id}"],
+ deleteCommitSignatureProtection: [
+ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures"
+ ],
+ deleteDeployKey: ["DELETE /repos/{owner}/{repo}/keys/{key_id}"],
+ deleteDeployment: [
+ "DELETE /repos/{owner}/{repo}/deployments/{deployment_id}"
+ ],
+ deleteDeploymentBranchPolicy: [
+ "DELETE /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}"
+ ],
+ deleteFile: ["DELETE /repos/{owner}/{repo}/contents/{path}"],
+ deleteInvitation: [
+ "DELETE /repos/{owner}/{repo}/invitations/{invitation_id}"
+ ],
+ deleteOrgRuleset: ["DELETE /orgs/{org}/rulesets/{ruleset_id}"],
+ deletePagesSite: ["DELETE /repos/{owner}/{repo}/pages"],
+ deletePullRequestReviewProtection: [
+ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"
+ ],
+ deleteRelease: ["DELETE /repos/{owner}/{repo}/releases/{release_id}"],
+ deleteReleaseAsset: [
+ "DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}"
+ ],
+ deleteRepoRuleset: ["DELETE /repos/{owner}/{repo}/rulesets/{ruleset_id}"],
+ deleteTagProtection: [
+ "DELETE /repos/{owner}/{repo}/tags/protection/{tag_protection_id}"
+ ],
+ deleteWebhook: ["DELETE /repos/{owner}/{repo}/hooks/{hook_id}"],
+ disableAutomatedSecurityFixes: [
+ "DELETE /repos/{owner}/{repo}/automated-security-fixes"
+ ],
+ disableDeploymentProtectionRule: [
+ "DELETE /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/{protection_rule_id}"
+ ],
+ disablePrivateVulnerabilityReporting: [
+ "DELETE /repos/{owner}/{repo}/private-vulnerability-reporting"
+ ],
+ disableVulnerabilityAlerts: [
+ "DELETE /repos/{owner}/{repo}/vulnerability-alerts"
+ ],
+ downloadArchive: [
+ "GET /repos/{owner}/{repo}/zipball/{ref}",
+ {},
+ { renamed: ["repos", "downloadZipballArchive"] }
+ ],
+ downloadTarballArchive: ["GET /repos/{owner}/{repo}/tarball/{ref}"],
+ downloadZipballArchive: ["GET /repos/{owner}/{repo}/zipball/{ref}"],
+ enableAutomatedSecurityFixes: [
+ "PUT /repos/{owner}/{repo}/automated-security-fixes"
+ ],
+ enablePrivateVulnerabilityReporting: [
+ "PUT /repos/{owner}/{repo}/private-vulnerability-reporting"
+ ],
+ enableVulnerabilityAlerts: [
+ "PUT /repos/{owner}/{repo}/vulnerability-alerts"
+ ],
+ generateReleaseNotes: [
+ "POST /repos/{owner}/{repo}/releases/generate-notes"
+ ],
+ get: ["GET /repos/{owner}/{repo}"],
+ getAccessRestrictions: [
+ "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions"
+ ],
+ getAdminBranchProtection: [
+ "GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"
+ ],
+ getAllDeploymentProtectionRules: [
+ "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules"
+ ],
+ getAllEnvironments: ["GET /repos/{owner}/{repo}/environments"],
+ getAllStatusCheckContexts: [
+ "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts"
+ ],
+ getAllTopics: ["GET /repos/{owner}/{repo}/topics"],
+ getAppsWithAccessToProtectedBranch: [
+ "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps"
+ ],
+ getAutolink: ["GET /repos/{owner}/{repo}/autolinks/{autolink_id}"],
+ getBranch: ["GET /repos/{owner}/{repo}/branches/{branch}"],
+ getBranchProtection: [
+ "GET /repos/{owner}/{repo}/branches/{branch}/protection"
+ ],
+ getBranchRules: ["GET /repos/{owner}/{repo}/rules/branches/{branch}"],
+ getClones: ["GET /repos/{owner}/{repo}/traffic/clones"],
+ getCodeFrequencyStats: ["GET /repos/{owner}/{repo}/stats/code_frequency"],
+ getCollaboratorPermissionLevel: [
+ "GET /repos/{owner}/{repo}/collaborators/{username}/permission"
+ ],
+ getCombinedStatusForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/status"],
+ getCommit: ["GET /repos/{owner}/{repo}/commits/{ref}"],
+ getCommitActivityStats: ["GET /repos/{owner}/{repo}/stats/commit_activity"],
+ getCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}"],
+ getCommitSignatureProtection: [
+ "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures"
+ ],
+ getCommunityProfileMetrics: ["GET /repos/{owner}/{repo}/community/profile"],
+ getContent: ["GET /repos/{owner}/{repo}/contents/{path}"],
+ getContributorsStats: ["GET /repos/{owner}/{repo}/stats/contributors"],
+ getCustomDeploymentProtectionRule: [
+ "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/{protection_rule_id}"
+ ],
+ getCustomPropertiesValues: ["GET /repos/{owner}/{repo}/properties/values"],
+ getDeployKey: ["GET /repos/{owner}/{repo}/keys/{key_id}"],
+ getDeployment: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}"],
+ getDeploymentBranchPolicy: [
+ "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}"
+ ],
+ getDeploymentStatus: [
+ "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}"
+ ],
+ getEnvironment: [
+ "GET /repos/{owner}/{repo}/environments/{environment_name}"
+ ],
+ getLatestPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/latest"],
+ getLatestRelease: ["GET /repos/{owner}/{repo}/releases/latest"],
+ getOrgRuleSuite: ["GET /orgs/{org}/rulesets/rule-suites/{rule_suite_id}"],
+ getOrgRuleSuites: ["GET /orgs/{org}/rulesets/rule-suites"],
+ getOrgRuleset: ["GET /orgs/{org}/rulesets/{ruleset_id}"],
+ getOrgRulesets: ["GET /orgs/{org}/rulesets"],
+ getPages: ["GET /repos/{owner}/{repo}/pages"],
+ getPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/{build_id}"],
+ getPagesDeployment: [
+ "GET /repos/{owner}/{repo}/pages/deployments/{pages_deployment_id}"
+ ],
+ getPagesHealthCheck: ["GET /repos/{owner}/{repo}/pages/health"],
+ getParticipationStats: ["GET /repos/{owner}/{repo}/stats/participation"],
+ getPullRequestReviewProtection: [
+ "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"
+ ],
+ getPunchCardStats: ["GET /repos/{owner}/{repo}/stats/punch_card"],
+ getReadme: ["GET /repos/{owner}/{repo}/readme"],
+ getReadmeInDirectory: ["GET /repos/{owner}/{repo}/readme/{dir}"],
+ getRelease: ["GET /repos/{owner}/{repo}/releases/{release_id}"],
+ getReleaseAsset: ["GET /repos/{owner}/{repo}/releases/assets/{asset_id}"],
+ getReleaseByTag: ["GET /repos/{owner}/{repo}/releases/tags/{tag}"],
+ getRepoRuleSuite: [
+ "GET /repos/{owner}/{repo}/rulesets/rule-suites/{rule_suite_id}"
+ ],
+ getRepoRuleSuites: ["GET /repos/{owner}/{repo}/rulesets/rule-suites"],
+ getRepoRuleset: ["GET /repos/{owner}/{repo}/rulesets/{ruleset_id}"],
+ getRepoRulesets: ["GET /repos/{owner}/{repo}/rulesets"],
+ getStatusChecksProtection: [
+ "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"
+ ],
+ getTeamsWithAccessToProtectedBranch: [
+ "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams"
+ ],
+ getTopPaths: ["GET /repos/{owner}/{repo}/traffic/popular/paths"],
+ getTopReferrers: ["GET /repos/{owner}/{repo}/traffic/popular/referrers"],
+ getUsersWithAccessToProtectedBranch: [
+ "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users"
+ ],
+ getViews: ["GET /repos/{owner}/{repo}/traffic/views"],
+ getWebhook: ["GET /repos/{owner}/{repo}/hooks/{hook_id}"],
+ getWebhookConfigForRepo: [
+ "GET /repos/{owner}/{repo}/hooks/{hook_id}/config"
+ ],
+ getWebhookDelivery: [
+ "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}"
+ ],
+ listActivities: ["GET /repos/{owner}/{repo}/activity"],
+ listAutolinks: ["GET /repos/{owner}/{repo}/autolinks"],
+ listBranches: ["GET /repos/{owner}/{repo}/branches"],
+ listBranchesForHeadCommit: [
+ "GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head"
+ ],
+ listCollaborators: ["GET /repos/{owner}/{repo}/collaborators"],
+ listCommentsForCommit: [
+ "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments"
+ ],
+ listCommitCommentsForRepo: ["GET /repos/{owner}/{repo}/comments"],
+ listCommitStatusesForRef: [
+ "GET /repos/{owner}/{repo}/commits/{ref}/statuses"
+ ],
+ listCommits: ["GET /repos/{owner}/{repo}/commits"],
+ listContributors: ["GET /repos/{owner}/{repo}/contributors"],
+ listCustomDeploymentRuleIntegrations: [
+ "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/apps"
+ ],
+ listDeployKeys: ["GET /repos/{owner}/{repo}/keys"],
+ listDeploymentBranchPolicies: [
+ "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies"
+ ],
+ listDeploymentStatuses: [
+ "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses"
+ ],
+ listDeployments: ["GET /repos/{owner}/{repo}/deployments"],
+ listForAuthenticatedUser: ["GET /user/repos"],
+ listForOrg: ["GET /orgs/{org}/repos"],
+ listForUser: ["GET /users/{username}/repos"],
+ listForks: ["GET /repos/{owner}/{repo}/forks"],
+ listInvitations: ["GET /repos/{owner}/{repo}/invitations"],
+ listInvitationsForAuthenticatedUser: ["GET /user/repository_invitations"],
+ listLanguages: ["GET /repos/{owner}/{repo}/languages"],
+ listPagesBuilds: ["GET /repos/{owner}/{repo}/pages/builds"],
+ listPublic: ["GET /repositories"],
+ listPullRequestsAssociatedWithCommit: [
+ "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls"
+ ],
+ listReleaseAssets: [
+ "GET /repos/{owner}/{repo}/releases/{release_id}/assets"
+ ],
+ listReleases: ["GET /repos/{owner}/{repo}/releases"],
+ listTagProtection: ["GET /repos/{owner}/{repo}/tags/protection"],
+ listTags: ["GET /repos/{owner}/{repo}/tags"],
+ listTeams: ["GET /repos/{owner}/{repo}/teams"],
+ listWebhookDeliveries: [
+ "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries"
+ ],
+ listWebhooks: ["GET /repos/{owner}/{repo}/hooks"],
+ merge: ["POST /repos/{owner}/{repo}/merges"],
+ mergeUpstream: ["POST /repos/{owner}/{repo}/merge-upstream"],
+ pingWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/pings"],
+ redeliverWebhookDelivery: [
+ "POST /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}/attempts"
+ ],
+ removeAppAccessRestrictions: [
+ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps",
+ {},
+ { mapToData: "apps" }
+ ],
+ removeCollaborator: [
+ "DELETE /repos/{owner}/{repo}/collaborators/{username}"
+ ],
+ removeStatusCheckContexts: [
+ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts",
+ {},
+ { mapToData: "contexts" }
+ ],
+ removeStatusCheckProtection: [
+ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"
+ ],
+ removeTeamAccessRestrictions: [
+ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams",
+ {},
+ { mapToData: "teams" }
+ ],
+ removeUserAccessRestrictions: [
+ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users",
+ {},
+ { mapToData: "users" }
+ ],
+ renameBranch: ["POST /repos/{owner}/{repo}/branches/{branch}/rename"],
+ replaceAllTopics: ["PUT /repos/{owner}/{repo}/topics"],
+ requestPagesBuild: ["POST /repos/{owner}/{repo}/pages/builds"],
+ setAdminBranchProtection: [
+ "POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"
+ ],
+ setAppAccessRestrictions: [
+ "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps",
+ {},
+ { mapToData: "apps" }
+ ],
+ setStatusCheckContexts: [
+ "PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts",
+ {},
+ { mapToData: "contexts" }
+ ],
+ setTeamAccessRestrictions: [
+ "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams",
+ {},
+ { mapToData: "teams" }
+ ],
+ setUserAccessRestrictions: [
+ "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users",
+ {},
+ { mapToData: "users" }
+ ],
+ testPushWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/tests"],
+ transfer: ["POST /repos/{owner}/{repo}/transfer"],
+ update: ["PATCH /repos/{owner}/{repo}"],
+ updateBranchProtection: [
+ "PUT /repos/{owner}/{repo}/branches/{branch}/protection"
+ ],
+ updateCommitComment: ["PATCH /repos/{owner}/{repo}/comments/{comment_id}"],
+ updateDeploymentBranchPolicy: [
+ "PUT /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}"
+ ],
+ updateInformationAboutPagesSite: ["PUT /repos/{owner}/{repo}/pages"],
+ updateInvitation: [
+ "PATCH /repos/{owner}/{repo}/invitations/{invitation_id}"
+ ],
+ updateOrgRuleset: ["PUT /orgs/{org}/rulesets/{ruleset_id}"],
+ updatePullRequestReviewProtection: [
+ "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"
+ ],
+ updateRelease: ["PATCH /repos/{owner}/{repo}/releases/{release_id}"],
+ updateReleaseAsset: [
+ "PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}"
+ ],
+ updateRepoRuleset: ["PUT /repos/{owner}/{repo}/rulesets/{ruleset_id}"],
+ updateStatusCheckPotection: [
+ "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks",
+ {},
+ { renamed: ["repos", "updateStatusCheckProtection"] }
+ ],
+ updateStatusCheckProtection: [
+ "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"
+ ],
+ updateWebhook: ["PATCH /repos/{owner}/{repo}/hooks/{hook_id}"],
+ updateWebhookConfigForRepo: [
+ "PATCH /repos/{owner}/{repo}/hooks/{hook_id}/config"
+ ],
+ uploadReleaseAsset: [
+ "POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}",
+ { baseUrl: "https://uploads.github.com" }
+ ]
+ },
+ search: {
+ code: ["GET /search/code"],
+ commits: ["GET /search/commits"],
+ issuesAndPullRequests: ["GET /search/issues"],
+ labels: ["GET /search/labels"],
+ repos: ["GET /search/repositories"],
+ topics: ["GET /search/topics"],
+ users: ["GET /search/users"]
+ },
+ secretScanning: {
+ getAlert: [
+ "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}"
+ ],
+ listAlertsForEnterprise: [
+ "GET /enterprises/{enterprise}/secret-scanning/alerts"
+ ],
+ listAlertsForOrg: ["GET /orgs/{org}/secret-scanning/alerts"],
+ listAlertsForRepo: ["GET /repos/{owner}/{repo}/secret-scanning/alerts"],
+ listLocationsForAlert: [
+ "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations"
+ ],
+ updateAlert: [
+ "PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}"
+ ]
+ },
+ securityAdvisories: {
+ createFork: [
+ "POST /repos/{owner}/{repo}/security-advisories/{ghsa_id}/forks"
+ ],
+ createPrivateVulnerabilityReport: [
+ "POST /repos/{owner}/{repo}/security-advisories/reports"
+ ],
+ createRepositoryAdvisory: [
+ "POST /repos/{owner}/{repo}/security-advisories"
+ ],
+ createRepositoryAdvisoryCveRequest: [
+ "POST /repos/{owner}/{repo}/security-advisories/{ghsa_id}/cve"
+ ],
+ getGlobalAdvisory: ["GET /advisories/{ghsa_id}"],
+ getRepositoryAdvisory: [
+ "GET /repos/{owner}/{repo}/security-advisories/{ghsa_id}"
+ ],
+ listGlobalAdvisories: ["GET /advisories"],
+ listOrgRepositoryAdvisories: ["GET /orgs/{org}/security-advisories"],
+ listRepositoryAdvisories: ["GET /repos/{owner}/{repo}/security-advisories"],
+ updateRepositoryAdvisory: [
+ "PATCH /repos/{owner}/{repo}/security-advisories/{ghsa_id}"
+ ]
+ },
+ teams: {
+ addOrUpdateMembershipForUserInOrg: [
+ "PUT /orgs/{org}/teams/{team_slug}/memberships/{username}"
+ ],
+ addOrUpdateProjectPermissionsInOrg: [
+ "PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}"
+ ],
+ addOrUpdateRepoPermissionsInOrg: [
+ "PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"
+ ],
+ checkPermissionsForProjectInOrg: [
+ "GET /orgs/{org}/teams/{team_slug}/projects/{project_id}"
+ ],
+ checkPermissionsForRepoInOrg: [
+ "GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"
+ ],
+ create: ["POST /orgs/{org}/teams"],
+ createDiscussionCommentInOrg: [
+ "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments"
+ ],
+ createDiscussionInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions"],
+ deleteDiscussionCommentInOrg: [
+ "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"
+ ],
+ deleteDiscussionInOrg: [
+ "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"
+ ],
+ deleteInOrg: ["DELETE /orgs/{org}/teams/{team_slug}"],
+ getByName: ["GET /orgs/{org}/teams/{team_slug}"],
+ getDiscussionCommentInOrg: [
+ "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"
+ ],
+ getDiscussionInOrg: [
+ "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"
+ ],
+ getMembershipForUserInOrg: [
+ "GET /orgs/{org}/teams/{team_slug}/memberships/{username}"
+ ],
+ list: ["GET /orgs/{org}/teams"],
+ listChildInOrg: ["GET /orgs/{org}/teams/{team_slug}/teams"],
+ listDiscussionCommentsInOrg: [
+ "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments"
+ ],
+ listDiscussionsInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions"],
+ listForAuthenticatedUser: ["GET /user/teams"],
+ listMembersInOrg: ["GET /orgs/{org}/teams/{team_slug}/members"],
+ listPendingInvitationsInOrg: [
+ "GET /orgs/{org}/teams/{team_slug}/invitations"
+ ],
+ listProjectsInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects"],
+ listReposInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos"],
+ removeMembershipForUserInOrg: [
+ "DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}"
+ ],
+ removeProjectInOrg: [
+ "DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}"
+ ],
+ removeRepoInOrg: [
+ "DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"
+ ],
+ updateDiscussionCommentInOrg: [
+ "PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"
+ ],
+ updateDiscussionInOrg: [
+ "PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"
+ ],
+ updateInOrg: ["PATCH /orgs/{org}/teams/{team_slug}"]
+ },
+ users: {
+ addEmailForAuthenticated: [
+ "POST /user/emails",
+ {},
+ { renamed: ["users", "addEmailForAuthenticatedUser"] }
+ ],
+ addEmailForAuthenticatedUser: ["POST /user/emails"],
+ addSocialAccountForAuthenticatedUser: ["POST /user/social_accounts"],
+ block: ["PUT /user/blocks/{username}"],
+ checkBlocked: ["GET /user/blocks/{username}"],
+ checkFollowingForUser: ["GET /users/{username}/following/{target_user}"],
+ checkPersonIsFollowedByAuthenticated: ["GET /user/following/{username}"],
+ createGpgKeyForAuthenticated: [
+ "POST /user/gpg_keys",
+ {},
+ { renamed: ["users", "createGpgKeyForAuthenticatedUser"] }
+ ],
+ createGpgKeyForAuthenticatedUser: ["POST /user/gpg_keys"],
+ createPublicSshKeyForAuthenticated: [
+ "POST /user/keys",
+ {},
+ { renamed: ["users", "createPublicSshKeyForAuthenticatedUser"] }
+ ],
+ createPublicSshKeyForAuthenticatedUser: ["POST /user/keys"],
+ createSshSigningKeyForAuthenticatedUser: ["POST /user/ssh_signing_keys"],
+ deleteEmailForAuthenticated: [
+ "DELETE /user/emails",
+ {},
+ { renamed: ["users", "deleteEmailForAuthenticatedUser"] }
+ ],
+ deleteEmailForAuthenticatedUser: ["DELETE /user/emails"],
+ deleteGpgKeyForAuthenticated: [
+ "DELETE /user/gpg_keys/{gpg_key_id}",
+ {},
+ { renamed: ["users", "deleteGpgKeyForAuthenticatedUser"] }
+ ],
+ deleteGpgKeyForAuthenticatedUser: ["DELETE /user/gpg_keys/{gpg_key_id}"],
+ deletePublicSshKeyForAuthenticated: [
+ "DELETE /user/keys/{key_id}",
+ {},
+ { renamed: ["users", "deletePublicSshKeyForAuthenticatedUser"] }
+ ],
+ deletePublicSshKeyForAuthenticatedUser: ["DELETE /user/keys/{key_id}"],
+ deleteSocialAccountForAuthenticatedUser: ["DELETE /user/social_accounts"],
+ deleteSshSigningKeyForAuthenticatedUser: [
+ "DELETE /user/ssh_signing_keys/{ssh_signing_key_id}"
+ ],
+ follow: ["PUT /user/following/{username}"],
+ getAuthenticated: ["GET /user"],
+ getByUsername: ["GET /users/{username}"],
+ getContextForUser: ["GET /users/{username}/hovercard"],
+ getGpgKeyForAuthenticated: [
+ "GET /user/gpg_keys/{gpg_key_id}",
+ {},
+ { renamed: ["users", "getGpgKeyForAuthenticatedUser"] }
+ ],
+ getGpgKeyForAuthenticatedUser: ["GET /user/gpg_keys/{gpg_key_id}"],
+ getPublicSshKeyForAuthenticated: [
+ "GET /user/keys/{key_id}",
+ {},
+ { renamed: ["users", "getPublicSshKeyForAuthenticatedUser"] }
+ ],
+ getPublicSshKeyForAuthenticatedUser: ["GET /user/keys/{key_id}"],
+ getSshSigningKeyForAuthenticatedUser: [
+ "GET /user/ssh_signing_keys/{ssh_signing_key_id}"
+ ],
+ list: ["GET /users"],
+ listBlockedByAuthenticated: [
+ "GET /user/blocks",
+ {},
+ { renamed: ["users", "listBlockedByAuthenticatedUser"] }
+ ],
+ listBlockedByAuthenticatedUser: ["GET /user/blocks"],
+ listEmailsForAuthenticated: [
+ "GET /user/emails",
+ {},
+ { renamed: ["users", "listEmailsForAuthenticatedUser"] }
+ ],
+ listEmailsForAuthenticatedUser: ["GET /user/emails"],
+ listFollowedByAuthenticated: [
+ "GET /user/following",
+ {},
+ { renamed: ["users", "listFollowedByAuthenticatedUser"] }
+ ],
+ listFollowedByAuthenticatedUser: ["GET /user/following"],
+ listFollowersForAuthenticatedUser: ["GET /user/followers"],
+ listFollowersForUser: ["GET /users/{username}/followers"],
+ listFollowingForUser: ["GET /users/{username}/following"],
+ listGpgKeysForAuthenticated: [
+ "GET /user/gpg_keys",
+ {},
+ { renamed: ["users", "listGpgKeysForAuthenticatedUser"] }
+ ],
+ listGpgKeysForAuthenticatedUser: ["GET /user/gpg_keys"],
+ listGpgKeysForUser: ["GET /users/{username}/gpg_keys"],
+ listPublicEmailsForAuthenticated: [
+ "GET /user/public_emails",
+ {},
+ { renamed: ["users", "listPublicEmailsForAuthenticatedUser"] }
+ ],
+ listPublicEmailsForAuthenticatedUser: ["GET /user/public_emails"],
+ listPublicKeysForUser: ["GET /users/{username}/keys"],
+ listPublicSshKeysForAuthenticated: [
+ "GET /user/keys",
+ {},
+ { renamed: ["users", "listPublicSshKeysForAuthenticatedUser"] }
+ ],
+ listPublicSshKeysForAuthenticatedUser: ["GET /user/keys"],
+ listSocialAccountsForAuthenticatedUser: ["GET /user/social_accounts"],
+ listSocialAccountsForUser: ["GET /users/{username}/social_accounts"],
+ listSshSigningKeysForAuthenticatedUser: ["GET /user/ssh_signing_keys"],
+ listSshSigningKeysForUser: ["GET /users/{username}/ssh_signing_keys"],
+ setPrimaryEmailVisibilityForAuthenticated: [
+ "PATCH /user/email/visibility",
+ {},
+ { renamed: ["users", "setPrimaryEmailVisibilityForAuthenticatedUser"] }
+ ],
+ setPrimaryEmailVisibilityForAuthenticatedUser: [
+ "PATCH /user/email/visibility"
+ ],
+ unblock: ["DELETE /user/blocks/{username}"],
+ unfollow: ["DELETE /user/following/{username}"],
+ updateAuthenticated: ["PATCH /user"]
+ }
+};
+var endpoints_default = Endpoints;
+
+// pkg/dist-src/endpoints-to-methods.js
+var endpointMethodsMap = /* @__PURE__ */ new Map();
+for (const [scope, endpoints] of Object.entries(endpoints_default)) {
+ for (const [methodName, endpoint] of Object.entries(endpoints)) {
+ const [route, defaults, decorations] = endpoint;
+ const [method, url] = route.split(/ /);
+ const endpointDefaults = Object.assign(
+ {
+ method,
+ url
+ },
+ defaults
+ );
+ if (!endpointMethodsMap.has(scope)) {
+ endpointMethodsMap.set(scope, /* @__PURE__ */ new Map());
+ }
+ endpointMethodsMap.get(scope).set(methodName, {
+ scope,
+ methodName,
+ endpointDefaults,
+ decorations
+ });
+ }
+}
+var handler = {
+ has({ scope }, methodName) {
+ return endpointMethodsMap.get(scope).has(methodName);
+ },
+ getOwnPropertyDescriptor(target, methodName) {
+ return {
+ value: this.get(target, methodName),
+ // ensures method is in the cache
+ configurable: true,
+ writable: true,
+ enumerable: true
+ };
+ },
+ defineProperty(target, methodName, descriptor) {
+ Object.defineProperty(target.cache, methodName, descriptor);
+ return true;
+ },
+ deleteProperty(target, methodName) {
+ delete target.cache[methodName];
+ return true;
+ },
+ ownKeys({ scope }) {
+ return [...endpointMethodsMap.get(scope).keys()];
+ },
+ set(target, methodName, value) {
+ return target.cache[methodName] = value;
+ },
+ get({ octokit, scope, cache }, methodName) {
+ if (cache[methodName]) {
+ return cache[methodName];
+ }
+ const method = endpointMethodsMap.get(scope).get(methodName);
+ if (!method) {
+ return void 0;
+ }
+ const { endpointDefaults, decorations } = method;
+ if (decorations) {
+ cache[methodName] = decorate(
+ octokit,
+ scope,
+ methodName,
+ endpointDefaults,
+ decorations
+ );
+ } else {
+ cache[methodName] = octokit.request.defaults(endpointDefaults);
+ }
+ return cache[methodName];
+ }
+};
+function endpointsToMethods(octokit) {
+ const newMethods = {};
+ for (const scope of endpointMethodsMap.keys()) {
+ newMethods[scope] = new Proxy({ octokit, scope, cache: {} }, handler);
+ }
+ return newMethods;
+}
+function decorate(octokit, scope, methodName, defaults, decorations) {
+ const requestWithDefaults = octokit.request.defaults(defaults);
+ function withDecorations(...args) {
+ let options = requestWithDefaults.endpoint.merge(...args);
+ if (decorations.mapToData) {
+ options = Object.assign({}, options, {
+ data: options[decorations.mapToData],
+ [decorations.mapToData]: void 0
+ });
+ return requestWithDefaults(options);
+ }
+ if (decorations.renamed) {
+ const [newScope, newMethodName] = decorations.renamed;
+ octokit.log.warn(
+ `octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()`
+ );
+ }
+ if (decorations.deprecated) {
+ octokit.log.warn(decorations.deprecated);
+ }
+ if (decorations.renamedParameters) {
+ const options2 = requestWithDefaults.endpoint.merge(...args);
+ for (const [name, alias] of Object.entries(
+ decorations.renamedParameters
+ )) {
+ if (name in options2) {
+ octokit.log.warn(
+ `"${name}" parameter is deprecated for "octokit.${scope}.${methodName}()". Use "${alias}" instead`
+ );
+ if (!(alias in options2)) {
+ options2[alias] = options2[name];
+ }
+ delete options2[name];
+ }
+ }
+ return requestWithDefaults(options2);
+ }
+ return requestWithDefaults(...args);
+ }
+ return Object.assign(withDecorations, requestWithDefaults);
+}
+
+// pkg/dist-src/index.js
+function restEndpointMethods(octokit) {
+ const api = endpointsToMethods(octokit);
+ return {
+ rest: api
+ };
+}
+restEndpointMethods.VERSION = VERSION;
+function legacyRestEndpointMethods(octokit) {
+ const api = endpointsToMethods(octokit);
+ return {
+ ...api,
+ rest: api
+ };
+}
+legacyRestEndpointMethods.VERSION = VERSION;
+// Annotate the CommonJS export names for ESM import in node:
+0 && (0);
+
+
+/***/ }),
+
+/***/ 537:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+var __create = Object.create;
+var __defProp = Object.defineProperty;
+var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
+var __getOwnPropNames = Object.getOwnPropertyNames;
+var __getProtoOf = Object.getPrototypeOf;
+var __hasOwnProp = Object.prototype.hasOwnProperty;
+var __export = (target, all) => {
+ for (var name in all)
+ __defProp(target, name, { get: all[name], enumerable: true });
+};
+var __copyProps = (to, from, except, desc) => {
+ if (from && typeof from === "object" || typeof from === "function") {
+ for (let key of __getOwnPropNames(from))
+ if (!__hasOwnProp.call(to, key) && key !== except)
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
+ }
+ return to;
+};
+var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
+ // If the importer is in node compatibility mode or this is not an ESM
+ // file that has been converted to a CommonJS file using a Babel-
+ // compatible transform (i.e. "__esModule" has not been set), then set
+ // "default" to the CommonJS "module.exports" for node compatibility.
+ isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
+ mod
+));
+var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
+
+// pkg/dist-src/index.js
+var dist_src_exports = {};
+__export(dist_src_exports, {
+ RequestError: () => RequestError
+});
+module.exports = __toCommonJS(dist_src_exports);
+var import_deprecation = __nccwpck_require__(8932);
+var import_once = __toESM(__nccwpck_require__(1223));
+var logOnceCode = (0, import_once.default)((deprecation) => console.warn(deprecation));
+var logOnceHeaders = (0, import_once.default)((deprecation) => console.warn(deprecation));
+var RequestError = class extends Error {
+ constructor(message, statusCode, options) {
+ super(message);
+ if (Error.captureStackTrace) {
+ Error.captureStackTrace(this, this.constructor);
+ }
+ this.name = "HttpError";
+ this.status = statusCode;
+ let headers;
+ if ("headers" in options && typeof options.headers !== "undefined") {
+ headers = options.headers;
+ }
+ if ("response" in options) {
+ this.response = options.response;
+ headers = options.response.headers;
+ }
+ const requestCopy = Object.assign({}, options.request);
+ if (options.request.headers.authorization) {
+ requestCopy.headers = Object.assign({}, options.request.headers, {
+ authorization: options.request.headers.authorization.replace(
+ / .*$/,
+ " [REDACTED]"
+ )
+ });
+ }
+ requestCopy.url = requestCopy.url.replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]").replace(/\baccess_token=\w+/g, "access_token=[REDACTED]");
+ this.request = requestCopy;
+ Object.defineProperty(this, "code", {
+ get() {
+ logOnceCode(
+ new import_deprecation.Deprecation(
+ "[@octokit/request-error] `error.code` is deprecated, use `error.status`."
+ )
+ );
+ return statusCode;
+ }
+ });
+ Object.defineProperty(this, "headers", {
+ get() {
+ logOnceHeaders(
+ new import_deprecation.Deprecation(
+ "[@octokit/request-error] `error.headers` is deprecated, use `error.response.headers`."
+ )
+ );
+ return headers || {};
+ }
+ });
+ }
+};
+// Annotate the CommonJS export names for ESM import in node:
+0 && (0);
+
+
+/***/ }),
+
+/***/ 6234:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+var __defProp = Object.defineProperty;
+var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
+var __getOwnPropNames = Object.getOwnPropertyNames;
+var __hasOwnProp = Object.prototype.hasOwnProperty;
+var __export = (target, all) => {
+ for (var name in all)
+ __defProp(target, name, { get: all[name], enumerable: true });
+};
+var __copyProps = (to, from, except, desc) => {
+ if (from && typeof from === "object" || typeof from === "function") {
+ for (let key of __getOwnPropNames(from))
+ if (!__hasOwnProp.call(to, key) && key !== except)
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
+ }
+ return to;
+};
+var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
+
+// pkg/dist-src/index.js
+var dist_src_exports = {};
+__export(dist_src_exports, {
+ request: () => request
+});
+module.exports = __toCommonJS(dist_src_exports);
+var import_endpoint = __nccwpck_require__(9440);
+var import_universal_user_agent = __nccwpck_require__(5030);
+
+// pkg/dist-src/version.js
+var VERSION = "8.4.0";
+
+// pkg/dist-src/is-plain-object.js
+function isPlainObject(value) {
+ if (typeof value !== "object" || value === null)
+ return false;
+ if (Object.prototype.toString.call(value) !== "[object Object]")
+ return false;
+ const proto = Object.getPrototypeOf(value);
+ if (proto === null)
+ return true;
+ const Ctor = Object.prototype.hasOwnProperty.call(proto, "constructor") && proto.constructor;
+ return typeof Ctor === "function" && Ctor instanceof Ctor && Function.prototype.call(Ctor) === Function.prototype.call(value);
+}
+
+// pkg/dist-src/fetch-wrapper.js
+var import_request_error = __nccwpck_require__(537);
+
+// pkg/dist-src/get-buffer-response.js
+function getBufferResponse(response) {
+ return response.arrayBuffer();
+}
+
+// pkg/dist-src/fetch-wrapper.js
+function fetchWrapper(requestOptions) {
+ var _a, _b, _c, _d;
+ const log = requestOptions.request && requestOptions.request.log ? requestOptions.request.log : console;
+ const parseSuccessResponseBody = ((_a = requestOptions.request) == null ? void 0 : _a.parseSuccessResponseBody) !== false;
+ if (isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) {
+ requestOptions.body = JSON.stringify(requestOptions.body);
+ }
+ let headers = {};
+ let status;
+ let url;
+ let { fetch } = globalThis;
+ if ((_b = requestOptions.request) == null ? void 0 : _b.fetch) {
+ fetch = requestOptions.request.fetch;
+ }
+ if (!fetch) {
+ throw new Error(
+ "fetch is not set. Please pass a fetch implementation as new Octokit({ request: { fetch }}). Learn more at https://github.com/octokit/octokit.js/#fetch-missing"
+ );
+ }
+ return fetch(requestOptions.url, {
+ method: requestOptions.method,
+ body: requestOptions.body,
+ redirect: (_c = requestOptions.request) == null ? void 0 : _c.redirect,
+ headers: requestOptions.headers,
+ signal: (_d = requestOptions.request) == null ? void 0 : _d.signal,
+ // duplex must be set if request.body is ReadableStream or Async Iterables.
+ // See https://fetch.spec.whatwg.org/#dom-requestinit-duplex.
+ ...requestOptions.body && { duplex: "half" }
+ }).then(async (response) => {
+ url = response.url;
+ status = response.status;
+ for (const keyAndValue of response.headers) {
+ headers[keyAndValue[0]] = keyAndValue[1];
+ }
+ if ("deprecation" in headers) {
+ const matches = headers.link && headers.link.match(/<([^>]+)>; rel="deprecation"/);
+ const deprecationLink = matches && matches.pop();
+ log.warn(
+ `[@octokit/request] "${requestOptions.method} ${requestOptions.url}" is deprecated. It is scheduled to be removed on ${headers.sunset}${deprecationLink ? `. See ${deprecationLink}` : ""}`
+ );
+ }
+ if (status === 204 || status === 205) {
+ return;
+ }
+ if (requestOptions.method === "HEAD") {
+ if (status < 400) {
+ return;
+ }
+ throw new import_request_error.RequestError(response.statusText, status, {
+ response: {
+ url,
+ status,
+ headers,
+ data: void 0
+ },
+ request: requestOptions
+ });
+ }
+ if (status === 304) {
+ throw new import_request_error.RequestError("Not modified", status, {
+ response: {
+ url,
+ status,
+ headers,
+ data: await getResponseData(response)
+ },
+ request: requestOptions
+ });
+ }
+ if (status >= 400) {
+ const data = await getResponseData(response);
+ const error = new import_request_error.RequestError(toErrorMessage(data), status, {
+ response: {
+ url,
+ status,
+ headers,
+ data
+ },
+ request: requestOptions
+ });
+ throw error;
+ }
+ return parseSuccessResponseBody ? await getResponseData(response) : response.body;
+ }).then((data) => {
+ return {
+ status,
+ url,
+ headers,
+ data
+ };
+ }).catch((error) => {
+ if (error instanceof import_request_error.RequestError)
+ throw error;
+ else if (error.name === "AbortError")
+ throw error;
+ let message = error.message;
+ if (error.name === "TypeError" && "cause" in error) {
+ if (error.cause instanceof Error) {
+ message = error.cause.message;
+ } else if (typeof error.cause === "string") {
+ message = error.cause;
+ }
+ }
+ throw new import_request_error.RequestError(message, 500, {
+ request: requestOptions
+ });
+ });
+}
+async function getResponseData(response) {
+ const contentType = response.headers.get("content-type");
+ if (/application\/json/.test(contentType)) {
+ return response.json().catch(() => response.text()).catch(() => "");
+ }
+ if (!contentType || /^text\/|charset=utf-8$/.test(contentType)) {
+ return response.text();
+ }
+ return getBufferResponse(response);
+}
+function toErrorMessage(data) {
+ if (typeof data === "string")
+ return data;
+ let suffix;
+ if ("documentation_url" in data) {
+ suffix = ` - ${data.documentation_url}`;
+ } else {
+ suffix = "";
+ }
+ if ("message" in data) {
+ if (Array.isArray(data.errors)) {
+ return `${data.message}: ${data.errors.map(JSON.stringify).join(", ")}${suffix}`;
+ }
+ return `${data.message}${suffix}`;
+ }
+ return `Unknown error: ${JSON.stringify(data)}`;
+}
+
+// pkg/dist-src/with-defaults.js
+function withDefaults(oldEndpoint, newDefaults) {
+ const endpoint2 = oldEndpoint.defaults(newDefaults);
+ const newApi = function(route, parameters) {
+ const endpointOptions = endpoint2.merge(route, parameters);
+ if (!endpointOptions.request || !endpointOptions.request.hook) {
+ return fetchWrapper(endpoint2.parse(endpointOptions));
+ }
+ const request2 = (route2, parameters2) => {
+ return fetchWrapper(
+ endpoint2.parse(endpoint2.merge(route2, parameters2))
+ );
+ };
+ Object.assign(request2, {
+ endpoint: endpoint2,
+ defaults: withDefaults.bind(null, endpoint2)
+ });
+ return endpointOptions.request.hook(request2, endpointOptions);
+ };
+ return Object.assign(newApi, {
+ endpoint: endpoint2,
+ defaults: withDefaults.bind(null, endpoint2)
+ });
+}
+
+// pkg/dist-src/index.js
+var request = withDefaults(import_endpoint.endpoint, {
+ headers: {
+ "user-agent": `octokit-request.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}`
+ }
+});
+// Annotate the CommonJS export names for ESM import in node:
+0 && (0);
+
+
+/***/ }),
+
+/***/ 9417:
+/***/ ((module) => {
+
+"use strict";
+
+module.exports = balanced;
+function balanced(a, b, str) {
+ if (a instanceof RegExp) a = maybeMatch(a, str);
+ if (b instanceof RegExp) b = maybeMatch(b, str);
+
+ var r = range(a, b, str);
+
+ return r && {
+ start: r[0],
+ end: r[1],
+ pre: str.slice(0, r[0]),
+ body: str.slice(r[0] + a.length, r[1]),
+ post: str.slice(r[1] + b.length)
+ };
+}
+
+function maybeMatch(reg, str) {
+ var m = str.match(reg);
+ return m ? m[0] : null;
+}
+
+balanced.range = range;
+function range(a, b, str) {
+ var begs, beg, left, right, result;
+ var ai = str.indexOf(a);
+ var bi = str.indexOf(b, ai + 1);
+ var i = ai;
+
+ if (ai >= 0 && bi > 0) {
+ if(a===b) {
+ return [ai, bi];
+ }
+ begs = [];
+ left = str.length;
+
+ while (i >= 0 && !result) {
+ if (i == ai) {
+ begs.push(i);
+ ai = str.indexOf(a, i + 1);
+ } else if (begs.length == 1) {
+ result = [ begs.pop(), bi ];
+ } else {
+ beg = begs.pop();
+ if (beg < left) {
+ left = beg;
+ right = bi;
+ }
+
+ bi = str.indexOf(b, i + 1);
+ }
+
+ i = ai < bi && ai >= 0 ? ai : bi;
+ }
+
+ if (begs.length) {
+ result = [ left, right ];
+ }
+ }
+
+ return result;
+}
+
+
+/***/ }),
+
+/***/ 3682:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+var register = __nccwpck_require__(4670);
+var addHook = __nccwpck_require__(5549);
+var removeHook = __nccwpck_require__(6819);
+
+// bind with array of arguments: https://stackoverflow.com/a/21792913
+var bind = Function.bind;
+var bindable = bind.bind(bind);
+
+function bindApi(hook, state, name) {
+ var removeHookRef = bindable(removeHook, null).apply(
+ null,
+ name ? [state, name] : [state]
+ );
+ hook.api = { remove: removeHookRef };
+ hook.remove = removeHookRef;
+ ["before", "error", "after", "wrap"].forEach(function (kind) {
+ var args = name ? [state, kind, name] : [state, kind];
+ hook[kind] = hook.api[kind] = bindable(addHook, null).apply(null, args);
+ });
+}
+
+function HookSingular() {
+ var singularHookName = "h";
+ var singularHookState = {
+ registry: {},
+ };
+ var singularHook = register.bind(null, singularHookState, singularHookName);
+ bindApi(singularHook, singularHookState, singularHookName);
+ return singularHook;
+}
+
+function HookCollection() {
+ var state = {
+ registry: {},
+ };
+
+ var hook = register.bind(null, state);
+ bindApi(hook, state);
+
+ return hook;
+}
+
+var collectionHookDeprecationMessageDisplayed = false;
+function Hook() {
+ if (!collectionHookDeprecationMessageDisplayed) {
+ console.warn(
+ '[before-after-hook]: "Hook()" repurposing warning, use "Hook.Collection()". Read more: https://git.io/upgrade-before-after-hook-to-1.4'
+ );
+ collectionHookDeprecationMessageDisplayed = true;
+ }
+ return HookCollection();
+}
+
+Hook.Singular = HookSingular.bind();
+Hook.Collection = HookCollection.bind();
+
+module.exports = Hook;
+// expose constructors as a named property for TypeScript
+module.exports.Hook = Hook;
+module.exports.Singular = Hook.Singular;
+module.exports.Collection = Hook.Collection;
+
+
+/***/ }),
+
+/***/ 5549:
+/***/ ((module) => {
+
+module.exports = addHook;
+
+function addHook(state, kind, name, hook) {
+ var orig = hook;
+ if (!state.registry[name]) {
+ state.registry[name] = [];
+ }
+
+ if (kind === "before") {
+ hook = function (method, options) {
+ return Promise.resolve()
+ .then(orig.bind(null, options))
+ .then(method.bind(null, options));
+ };
+ }
+
+ if (kind === "after") {
+ hook = function (method, options) {
+ var result;
+ return Promise.resolve()
+ .then(method.bind(null, options))
+ .then(function (result_) {
+ result = result_;
+ return orig(result, options);
+ })
+ .then(function () {
+ return result;
+ });
+ };
+ }
+
+ if (kind === "error") {
+ hook = function (method, options) {
+ return Promise.resolve()
+ .then(method.bind(null, options))
+ .catch(function (error) {
+ return orig(error, options);
+ });
+ };
+ }
+
+ state.registry[name].push({
+ hook: hook,
+ orig: orig,
+ });
+}
+
+
+/***/ }),
+
+/***/ 4670:
+/***/ ((module) => {
+
+module.exports = register;
+
+function register(state, name, method, options) {
+ if (typeof method !== "function") {
+ throw new Error("method for before hook must be a function");
+ }
+
+ if (!options) {
+ options = {};
+ }
+
+ if (Array.isArray(name)) {
+ return name.reverse().reduce(function (callback, name) {
+ return register.bind(null, state, name, callback, options);
+ }, method)();
+ }
+
+ return Promise.resolve().then(function () {
+ if (!state.registry[name]) {
+ return method(options);
+ }
+
+ return state.registry[name].reduce(function (method, registered) {
+ return registered.hook.bind(null, method, options);
+ }, method)();
+ });
+}
+
+
+/***/ }),
+
+/***/ 6819:
+/***/ ((module) => {
+
+module.exports = removeHook;
+
+function removeHook(state, name, method) {
+ if (!state.registry[name]) {
+ return;
+ }
+
+ var index = state.registry[name]
+ .map(function (registered) {
+ return registered.orig;
+ })
+ .indexOf(method);
+
+ if (index === -1) {
+ return;
+ }
+
+ state.registry[name].splice(index, 1);
+}
+
+
+/***/ }),
+
+/***/ 6891:
+/***/ ((module) => {
+
+module.exports = function (xs, fn) {
+ var res = [];
+ for (var i = 0; i < xs.length; i++) {
+ var x = fn(xs[i], i);
+ if (isArray(x)) res.push.apply(res, x);
+ else res.push(x);
+ }
+ return res;
+};
+
+var isArray = Array.isArray || function (xs) {
+ return Object.prototype.toString.call(xs) === '[object Array]';
+};
+
+
+/***/ }),
+
+/***/ 8932:
+/***/ ((__unused_webpack_module, exports) => {
+
+"use strict";
+
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+
+class Deprecation extends Error {
+ constructor(message) {
+ super(message); // Maintains proper stack trace (only available on V8)
+
+ /* istanbul ignore next */
+
+ if (Error.captureStackTrace) {
+ Error.captureStackTrace(this, this.constructor);
+ }
+
+ this.name = 'Deprecation';
+ }
+
+}
+
+exports.Deprecation = Deprecation;
+
+
+/***/ }),
+
+/***/ 1223:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+var wrappy = __nccwpck_require__(2940)
+module.exports = wrappy(once)
+module.exports.strict = wrappy(onceStrict)
+
+once.proto = once(function () {
+ Object.defineProperty(Function.prototype, 'once', {
+ value: function () {
+ return once(this)
+ },
+ configurable: true
+ })
+
+ Object.defineProperty(Function.prototype, 'onceStrict', {
+ value: function () {
+ return onceStrict(this)
+ },
+ configurable: true
+ })
+})
+
+function once (fn) {
+ var f = function () {
+ if (f.called) return f.value
+ f.called = true
+ return f.value = fn.apply(this, arguments)
+ }
+ f.called = false
+ return f
+}
+
+function onceStrict (fn) {
+ var f = function () {
+ if (f.called)
+ throw new Error(f.onceError)
+ f.called = true
+ return f.value = fn.apply(this, arguments)
+ }
+ var name = fn.name || 'Function wrapped with `once`'
+ f.onceError = name + " shouldn't be called more than once"
+ f.called = false
+ return f
+}
+
+
+/***/ }),
+
+/***/ 2043:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+;(function (sax) { // wrapper for non-node envs
+ sax.parser = function (strict, opt) { return new SAXParser(strict, opt) }
+ sax.SAXParser = SAXParser
+ sax.SAXStream = SAXStream
+ sax.createStream = createStream
+
+ // When we pass the MAX_BUFFER_LENGTH position, start checking for buffer overruns.
+ // When we check, schedule the next check for MAX_BUFFER_LENGTH - (max(buffer lengths)),
+ // since that's the earliest that a buffer overrun could occur. This way, checks are
+ // as rare as required, but as often as necessary to ensure never crossing this bound.
+ // Furthermore, buffers are only tested at most once per write(), so passing a very
+ // large string into write() might have undesirable effects, but this is manageable by
+ // the caller, so it is assumed to be safe. Thus, a call to write() may, in the extreme
+ // edge case, result in creating at most one complete copy of the string passed in.
+ // Set to Infinity to have unlimited buffers.
+ sax.MAX_BUFFER_LENGTH = 64 * 1024
+
+ var buffers = [
+ 'comment', 'sgmlDecl', 'textNode', 'tagName', 'doctype',
+ 'procInstName', 'procInstBody', 'entity', 'attribName',
+ 'attribValue', 'cdata', 'script'
+ ]
+
+ sax.EVENTS = [
+ 'text',
+ 'processinginstruction',
+ 'sgmldeclaration',
+ 'doctype',
+ 'comment',
+ 'opentagstart',
+ 'attribute',
+ 'opentag',
+ 'closetag',
+ 'opencdata',
+ 'cdata',
+ 'closecdata',
+ 'error',
+ 'end',
+ 'ready',
+ 'script',
+ 'opennamespace',
+ 'closenamespace'
+ ]
+
+ function SAXParser (strict, opt) {
+ if (!(this instanceof SAXParser)) {
+ return new SAXParser(strict, opt)
+ }
+
+ var parser = this
+ clearBuffers(parser)
+ parser.q = parser.c = ''
+ parser.bufferCheckPosition = sax.MAX_BUFFER_LENGTH
+ parser.opt = opt || {}
+ parser.opt.lowercase = parser.opt.lowercase || parser.opt.lowercasetags
+ parser.looseCase = parser.opt.lowercase ? 'toLowerCase' : 'toUpperCase'
+ parser.tags = []
+ parser.closed = parser.closedRoot = parser.sawRoot = false
+ parser.tag = parser.error = null
+ parser.strict = !!strict
+ parser.noscript = !!(strict || parser.opt.noscript)
+ parser.state = S.BEGIN
+ parser.strictEntities = parser.opt.strictEntities
+ parser.ENTITIES = parser.strictEntities ? Object.create(sax.XML_ENTITIES) : Object.create(sax.ENTITIES)
+ parser.attribList = []
+
+ // namespaces form a prototype chain.
+ // it always points at the current tag,
+ // which protos to its parent tag.
+ if (parser.opt.xmlns) {
+ parser.ns = Object.create(rootNS)
+ }
+
+ // disallow unquoted attribute values if not otherwise configured
+ // and strict mode is true
+ if (parser.opt.unquotedAttributeValues === undefined) {
+ parser.opt.unquotedAttributeValues = !strict;
+ }
+
+ // mostly just for error reporting
+ parser.trackPosition = parser.opt.position !== false
+ if (parser.trackPosition) {
+ parser.position = parser.line = parser.column = 0
+ }
+ emit(parser, 'onready')
+ }
+
+ if (!Object.create) {
+ Object.create = function (o) {
+ function F () {}
+ F.prototype = o
+ var newf = new F()
+ return newf
+ }
+ }
+
+ if (!Object.keys) {
+ Object.keys = function (o) {
+ var a = []
+ for (var i in o) if (o.hasOwnProperty(i)) a.push(i)
+ return a
+ }
+ }
+
+ function checkBufferLength (parser) {
+ var maxAllowed = Math.max(sax.MAX_BUFFER_LENGTH, 10)
+ var maxActual = 0
+ for (var i = 0, l = buffers.length; i < l; i++) {
+ var len = parser[buffers[i]].length
+ if (len > maxAllowed) {
+ // Text/cdata nodes can get big, and since they're buffered,
+ // we can get here under normal conditions.
+ // Avoid issues by emitting the text node now,
+ // so at least it won't get any bigger.
+ switch (buffers[i]) {
+ case 'textNode':
+ closeText(parser)
+ break
+
+ case 'cdata':
+ emitNode(parser, 'oncdata', parser.cdata)
+ parser.cdata = ''
+ break
+
+ case 'script':
+ emitNode(parser, 'onscript', parser.script)
+ parser.script = ''
+ break
+
+ default:
+ error(parser, 'Max buffer length exceeded: ' + buffers[i])
+ }
+ }
+ maxActual = Math.max(maxActual, len)
+ }
+ // schedule the next check for the earliest possible buffer overrun.
+ var m = sax.MAX_BUFFER_LENGTH - maxActual
+ parser.bufferCheckPosition = m + parser.position
+ }
+
+ function clearBuffers (parser) {
+ for (var i = 0, l = buffers.length; i < l; i++) {
+ parser[buffers[i]] = ''
+ }
+ }
+
+ function flushBuffers (parser) {
+ closeText(parser)
+ if (parser.cdata !== '') {
+ emitNode(parser, 'oncdata', parser.cdata)
+ parser.cdata = ''
+ }
+ if (parser.script !== '') {
+ emitNode(parser, 'onscript', parser.script)
+ parser.script = ''
+ }
+ }
+
+ SAXParser.prototype = {
+ end: function () { end(this) },
+ write: write,
+ resume: function () { this.error = null; return this },
+ close: function () { return this.write(null) },
+ flush: function () { flushBuffers(this) }
+ }
+
+ var Stream
+ try {
+ Stream = (__nccwpck_require__(2781).Stream)
+ } catch (ex) {
+ Stream = function () {}
+ }
+ if (!Stream) Stream = function () {}
+
+ var streamWraps = sax.EVENTS.filter(function (ev) {
+ return ev !== 'error' && ev !== 'end'
+ })
+
+ function createStream (strict, opt) {
+ return new SAXStream(strict, opt)
+ }
+
+ function SAXStream (strict, opt) {
+ if (!(this instanceof SAXStream)) {
+ return new SAXStream(strict, opt)
+ }
+
+ Stream.apply(this)
+
+ this._parser = new SAXParser(strict, opt)
+ this.writable = true
+ this.readable = true
+
+ var me = this
+
+ this._parser.onend = function () {
+ me.emit('end')
+ }
+
+ this._parser.onerror = function (er) {
+ me.emit('error', er)
+
+ // if didn't throw, then means error was handled.
+ // go ahead and clear error, so we can write again.
+ me._parser.error = null
+ }
+
+ this._decoder = null
+
+ streamWraps.forEach(function (ev) {
+ Object.defineProperty(me, 'on' + ev, {
+ get: function () {
+ return me._parser['on' + ev]
+ },
+ set: function (h) {
+ if (!h) {
+ me.removeAllListeners(ev)
+ me._parser['on' + ev] = h
+ return h
+ }
+ me.on(ev, h)
+ },
+ enumerable: true,
+ configurable: false
+ })
+ })
+ }
+
+ SAXStream.prototype = Object.create(Stream.prototype, {
+ constructor: {
+ value: SAXStream
+ }
+ })
+
+ SAXStream.prototype.write = function (data) {
+ if (typeof Buffer === 'function' &&
+ typeof Buffer.isBuffer === 'function' &&
+ Buffer.isBuffer(data)) {
+ if (!this._decoder) {
+ var SD = (__nccwpck_require__(1576).StringDecoder)
+ this._decoder = new SD('utf8')
+ }
+ data = this._decoder.write(data)
+ }
+
+ this._parser.write(data.toString())
+ this.emit('data', data)
+ return true
+ }
+
+ SAXStream.prototype.end = function (chunk) {
+ if (chunk && chunk.length) {
+ this.write(chunk)
+ }
+ this._parser.end()
+ return true
+ }
+
+ SAXStream.prototype.on = function (ev, handler) {
+ var me = this
+ if (!me._parser['on' + ev] && streamWraps.indexOf(ev) !== -1) {
+ me._parser['on' + ev] = function () {
+ var args = arguments.length === 1 ? [arguments[0]] : Array.apply(null, arguments)
+ args.splice(0, 0, ev)
+ me.emit.apply(me, args)
+ }
+ }
+
+ return Stream.prototype.on.call(me, ev, handler)
+ }
+
+ // this really needs to be replaced with character classes.
+ // XML allows all manner of ridiculous numbers and digits.
+ var CDATA = '[CDATA['
+ var DOCTYPE = 'DOCTYPE'
+ var XML_NAMESPACE = 'http://www.w3.org/XML/1998/namespace'
+ var XMLNS_NAMESPACE = 'http://www.w3.org/2000/xmlns/'
+ var rootNS = { xml: XML_NAMESPACE, xmlns: XMLNS_NAMESPACE }
+
+ // http://www.w3.org/TR/REC-xml/#NT-NameStartChar
+ // This implementation works on strings, a single character at a time
+ // as such, it cannot ever support astral-plane characters (10000-EFFFF)
+ // without a significant breaking change to either this parser, or the
+ // JavaScript language. Implementation of an emoji-capable xml parser
+ // is left as an exercise for the reader.
+ var nameStart = /[:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD]/
+
+ var nameBody = /[:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD\u00B7\u0300-\u036F\u203F-\u2040.\d-]/
+
+ var entityStart = /[#:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD]/
+ var entityBody = /[#:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD\u00B7\u0300-\u036F\u203F-\u2040.\d-]/
+
+ function isWhitespace (c) {
+ return c === ' ' || c === '\n' || c === '\r' || c === '\t'
+ }
+
+ function isQuote (c) {
+ return c === '"' || c === '\''
+ }
+
+ function isAttribEnd (c) {
+ return c === '>' || isWhitespace(c)
+ }
+
+ function isMatch (regex, c) {
+ return regex.test(c)
+ }
+
+ function notMatch (regex, c) {
+ return !isMatch(regex, c)
+ }
+
+ var S = 0
+ sax.STATE = {
+ BEGIN: S++, // leading byte order mark or whitespace
+ BEGIN_WHITESPACE: S++, // leading whitespace
+ TEXT: S++, // general stuff
+ TEXT_ENTITY: S++, // & and such.
+ OPEN_WAKA: S++, // <
+ SGML_DECL: S++, //
+ SCRIPT: S++, //