Merge branch 'main' into marcogario/skip_proxy
This commit is contained in:
commit
8aa028b476
452 changed files with 26268 additions and 1143 deletions
2
.github/workflows/__all-platform-bundle.yml
generated
vendored
2
.github/workflows/__all-platform-bundle.yml
generated
vendored
|
|
@ -32,7 +32,7 @@ jobs:
|
|||
name: All-platform bundle
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
security-events: read
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
|
|
|
|||
2
.github/workflows/__analyze-ref-input.yml
generated
vendored
2
.github/workflows/__analyze-ref-input.yml
generated
vendored
|
|
@ -36,7 +36,7 @@ jobs:
|
|||
name: "Analyze: 'ref' and 'sha' from inputs"
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
security-events: read
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
|
|
|
|||
2
.github/workflows/__autobuild-action.yml
generated
vendored
2
.github/workflows/__autobuild-action.yml
generated
vendored
|
|
@ -36,7 +36,7 @@ jobs:
|
|||
name: autobuild-action
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
security-events: read
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
|
|
|
|||
2
.github/workflows/__autobuild-direct-tracing-with-working-dir.yml
generated
vendored
2
.github/workflows/__autobuild-direct-tracing-with-working-dir.yml
generated
vendored
|
|
@ -38,7 +38,7 @@ jobs:
|
|||
name: Autobuild direct tracing (custom working directory)
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
security-events: read
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
|
|
|
|||
2
.github/workflows/__autobuild-direct-tracing.yml
generated
vendored
2
.github/workflows/__autobuild-direct-tracing.yml
generated
vendored
|
|
@ -38,7 +38,7 @@ jobs:
|
|||
name: Autobuild direct tracing
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
security-events: read
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
|
|
|
|||
2
.github/workflows/__build-mode-autobuild.yml
generated
vendored
2
.github/workflows/__build-mode-autobuild.yml
generated
vendored
|
|
@ -32,7 +32,7 @@ jobs:
|
|||
name: Build mode autobuild
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
security-events: read
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
|
|
|
|||
2
.github/workflows/__build-mode-manual.yml
generated
vendored
2
.github/workflows/__build-mode-manual.yml
generated
vendored
|
|
@ -32,7 +32,7 @@ jobs:
|
|||
name: Build mode manual
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
security-events: read
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
|
|
|
|||
2
.github/workflows/__build-mode-none.yml
generated
vendored
2
.github/workflows/__build-mode-none.yml
generated
vendored
|
|
@ -34,7 +34,7 @@ jobs:
|
|||
name: Build mode none
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
security-events: read
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
|
|
|
|||
2
.github/workflows/__build-mode-rollback.yml
generated
vendored
2
.github/workflows/__build-mode-rollback.yml
generated
vendored
|
|
@ -32,7 +32,7 @@ jobs:
|
|||
name: Build mode rollback
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
security-events: read
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
|
|
|
|||
2
.github/workflows/__cleanup-db-cluster-dir.yml
generated
vendored
2
.github/workflows/__cleanup-db-cluster-dir.yml
generated
vendored
|
|
@ -32,7 +32,7 @@ jobs:
|
|||
name: Clean up database cluster directory
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
security-events: read
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
|
|
|
|||
2
.github/workflows/__config-export.yml
generated
vendored
2
.github/workflows/__config-export.yml
generated
vendored
|
|
@ -42,7 +42,7 @@ jobs:
|
|||
name: Config export
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
security-events: read
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
|
|
|
|||
2
.github/workflows/__config-input.yml
generated
vendored
2
.github/workflows/__config-input.yml
generated
vendored
|
|
@ -32,7 +32,7 @@ jobs:
|
|||
name: Config input
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
security-events: read
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
|
|
|
|||
2
.github/workflows/__cpp-deptrace-disabled.yml
generated
vendored
2
.github/workflows/__cpp-deptrace-disabled.yml
generated
vendored
|
|
@ -36,7 +36,7 @@ jobs:
|
|||
name: 'C/C++: disabling autoinstalling dependencies (Linux)'
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
security-events: read
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
|
|
|
|||
2
.github/workflows/__cpp-deptrace-enabled-on-macos.yml
generated
vendored
2
.github/workflows/__cpp-deptrace-enabled-on-macos.yml
generated
vendored
|
|
@ -32,7 +32,7 @@ jobs:
|
|||
name: 'C/C++: autoinstalling dependencies is skipped (macOS)'
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
security-events: read
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
|
|
|
|||
2
.github/workflows/__cpp-deptrace-enabled.yml
generated
vendored
2
.github/workflows/__cpp-deptrace-enabled.yml
generated
vendored
|
|
@ -36,7 +36,7 @@ jobs:
|
|||
name: 'C/C++: autoinstalling dependencies (Linux)'
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
security-events: read
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
|
|
|
|||
2
.github/workflows/__diagnostics-export.yml
generated
vendored
2
.github/workflows/__diagnostics-export.yml
generated
vendored
|
|
@ -42,7 +42,7 @@ jobs:
|
|||
name: Diagnostic export
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
security-events: read
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
|
|
|
|||
2
.github/workflows/__export-file-baseline-information.yml
generated
vendored
2
.github/workflows/__export-file-baseline-information.yml
generated
vendored
|
|
@ -36,7 +36,7 @@ jobs:
|
|||
name: Export file baseline information
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
security-events: read
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
|
|
|
|||
2
.github/workflows/__extract-direct-to-toolcache.yml
generated
vendored
2
.github/workflows/__extract-direct-to-toolcache.yml
generated
vendored
|
|
@ -36,7 +36,7 @@ jobs:
|
|||
name: Extract directly to toolcache
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
security-events: read
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
|
|
|
|||
2
.github/workflows/__extractor-ram-threads.yml
generated
vendored
2
.github/workflows/__extractor-ram-threads.yml
generated
vendored
|
|
@ -32,7 +32,7 @@ jobs:
|
|||
name: Extractor ram and threads options test
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
security-events: read
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
|
|
|
|||
2
.github/workflows/__go-custom-queries.yml
generated
vendored
2
.github/workflows/__go-custom-queries.yml
generated
vendored
|
|
@ -34,7 +34,7 @@ jobs:
|
|||
name: 'Go: Custom queries'
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
security-events: read
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
|
|
|
|||
2
.github/workflows/__go-indirect-tracing-workaround-diagnostic.yml
generated
vendored
2
.github/workflows/__go-indirect-tracing-workaround-diagnostic.yml
generated
vendored
|
|
@ -32,7 +32,7 @@ jobs:
|
|||
name: 'Go: diagnostic when Go is changed after init step'
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
security-events: read
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
|
|
|
|||
2
.github/workflows/__go-indirect-tracing-workaround-no-file-program.yml
generated
vendored
2
.github/workflows/__go-indirect-tracing-workaround-no-file-program.yml
generated
vendored
|
|
@ -32,7 +32,7 @@ jobs:
|
|||
name: 'Go: diagnostic when `file` is not installed'
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
security-events: read
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
|
|
|
|||
2
.github/workflows/__go-indirect-tracing-workaround.yml
generated
vendored
2
.github/workflows/__go-indirect-tracing-workaround.yml
generated
vendored
|
|
@ -32,7 +32,7 @@ jobs:
|
|||
name: 'Go: workaround for indirect tracing'
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
security-events: read
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
|
|
|
|||
2
.github/workflows/__go-tracing-autobuilder.yml
generated
vendored
2
.github/workflows/__go-tracing-autobuilder.yml
generated
vendored
|
|
@ -62,7 +62,7 @@ jobs:
|
|||
name: 'Go: tracing with autobuilder step'
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
security-events: read
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
|
|
|
|||
2
.github/workflows/__go-tracing-custom-build-steps.yml
generated
vendored
2
.github/workflows/__go-tracing-custom-build-steps.yml
generated
vendored
|
|
@ -62,7 +62,7 @@ jobs:
|
|||
name: 'Go: tracing with custom build steps'
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
security-events: read
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
|
|
|
|||
2
.github/workflows/__go-tracing-legacy-workflow.yml
generated
vendored
2
.github/workflows/__go-tracing-legacy-workflow.yml
generated
vendored
|
|
@ -62,7 +62,7 @@ jobs:
|
|||
name: 'Go: tracing with legacy workflow'
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
security-events: read
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
|
|
|
|||
2
.github/workflows/__javascript-source-root.yml
generated
vendored
2
.github/workflows/__javascript-source-root.yml
generated
vendored
|
|
@ -36,7 +36,7 @@ jobs:
|
|||
name: Custom source root
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
security-events: read
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
|
|
|
|||
2
.github/workflows/__job-run-uuid-sarif.yml
generated
vendored
2
.github/workflows/__job-run-uuid-sarif.yml
generated
vendored
|
|
@ -32,7 +32,7 @@ jobs:
|
|||
name: Job run UUID added to SARIF
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
security-events: read
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
|
|
|
|||
2
.github/workflows/__language-aliases.yml
generated
vendored
2
.github/workflows/__language-aliases.yml
generated
vendored
|
|
@ -32,7 +32,7 @@ jobs:
|
|||
name: Language aliases
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
security-events: read
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
|
|
|
|||
2
.github/workflows/__multi-language-autodetect.yml
generated
vendored
2
.github/workflows/__multi-language-autodetect.yml
generated
vendored
|
|
@ -62,7 +62,7 @@ jobs:
|
|||
name: Multi-language repository
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
security-events: read
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
|
|
|
|||
2
.github/workflows/__packaging-codescanning-config-inputs-js.yml
generated
vendored
2
.github/workflows/__packaging-codescanning-config-inputs-js.yml
generated
vendored
|
|
@ -48,7 +48,7 @@ jobs:
|
|||
name: 'Packaging: Config and input passed to the CLI'
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
security-events: read
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
|
|
|
|||
2
.github/workflows/__packaging-config-inputs-js.yml
generated
vendored
2
.github/workflows/__packaging-config-inputs-js.yml
generated
vendored
|
|
@ -48,7 +48,7 @@ jobs:
|
|||
name: 'Packaging: Config and input'
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
security-events: read
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
|
|
|
|||
2
.github/workflows/__packaging-config-js.yml
generated
vendored
2
.github/workflows/__packaging-config-js.yml
generated
vendored
|
|
@ -48,7 +48,7 @@ jobs:
|
|||
name: 'Packaging: Config file'
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
security-events: read
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
|
|
|
|||
2
.github/workflows/__packaging-inputs-js.yml
generated
vendored
2
.github/workflows/__packaging-inputs-js.yml
generated
vendored
|
|
@ -48,7 +48,7 @@ jobs:
|
|||
name: 'Packaging: Action input'
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
security-events: read
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
|
|
|
|||
2
.github/workflows/__remote-config.yml
generated
vendored
2
.github/workflows/__remote-config.yml
generated
vendored
|
|
@ -34,7 +34,7 @@ jobs:
|
|||
name: Remote config file
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
security-events: read
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
|
|
|
|||
2
.github/workflows/__resolve-environment-action.yml
generated
vendored
2
.github/workflows/__resolve-environment-action.yml
generated
vendored
|
|
@ -48,7 +48,7 @@ jobs:
|
|||
name: Resolve environment
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
security-events: read
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
|
|
|
|||
2
.github/workflows/__rubocop-multi-language.yml
generated
vendored
2
.github/workflows/__rubocop-multi-language.yml
generated
vendored
|
|
@ -32,7 +32,7 @@ jobs:
|
|||
name: RuboCop multi-language
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
security-events: read
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
|
|
|
|||
2
.github/workflows/__ruby.yml
generated
vendored
2
.github/workflows/__ruby.yml
generated
vendored
|
|
@ -42,7 +42,7 @@ jobs:
|
|||
name: Ruby analysis
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
security-events: read
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
|
|
|
|||
2
.github/workflows/__split-workflow.yml
generated
vendored
2
.github/workflows/__split-workflow.yml
generated
vendored
|
|
@ -42,7 +42,7 @@ jobs:
|
|||
name: Split workflow
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
security-events: read
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
|
|
|
|||
2
.github/workflows/__start-proxy.yml
generated
vendored
2
.github/workflows/__start-proxy.yml
generated
vendored
|
|
@ -36,7 +36,7 @@ jobs:
|
|||
name: Start proxy
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
security-events: read
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
|
|
|
|||
3
.github/workflows/__submit-sarif-failure.yml
generated
vendored
3
.github/workflows/__submit-sarif-failure.yml
generated
vendored
|
|
@ -36,7 +36,8 @@ jobs:
|
|||
name: Submit SARIF after failure
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
security-events: write # needed to upload the SARIF file
|
||||
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
|
|
|
|||
2
.github/workflows/__swift-autobuild.yml
generated
vendored
2
.github/workflows/__swift-autobuild.yml
generated
vendored
|
|
@ -32,7 +32,7 @@ jobs:
|
|||
name: Swift analysis using autobuild
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
security-events: read
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
|
|
|
|||
2
.github/workflows/__swift-custom-build.yml
generated
vendored
2
.github/workflows/__swift-custom-build.yml
generated
vendored
|
|
@ -36,7 +36,7 @@ jobs:
|
|||
name: Swift analysis using a custom build command
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
security-events: read
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
|
|
|
|||
2
.github/workflows/__test-autobuild-working-dir.yml
generated
vendored
2
.github/workflows/__test-autobuild-working-dir.yml
generated
vendored
|
|
@ -32,7 +32,7 @@ jobs:
|
|||
name: Autobuild working directory
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
security-events: read
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
|
|
|
|||
2
.github/workflows/__test-local-codeql.yml
generated
vendored
2
.github/workflows/__test-local-codeql.yml
generated
vendored
|
|
@ -32,7 +32,7 @@ jobs:
|
|||
name: Local CodeQL bundle
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
security-events: read
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
|
|
|
|||
2
.github/workflows/__test-proxy.yml
generated
vendored
2
.github/workflows/__test-proxy.yml
generated
vendored
|
|
@ -34,7 +34,7 @@ jobs:
|
|||
name: Proxy test
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
security-events: read
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
|
|
|
|||
2
.github/workflows/__unset-environment.yml
generated
vendored
2
.github/workflows/__unset-environment.yml
generated
vendored
|
|
@ -34,7 +34,7 @@ jobs:
|
|||
name: Test unsetting environment variables
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
security-events: read
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
|
|
|
|||
2
.github/workflows/__upload-ref-sha-input.yml
generated
vendored
2
.github/workflows/__upload-ref-sha-input.yml
generated
vendored
|
|
@ -36,7 +36,7 @@ jobs:
|
|||
name: "Upload-sarif: 'ref' and 'sha' from inputs"
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
security-events: read
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
|
|
|
|||
2
.github/workflows/__with-checkout-path.yml
generated
vendored
2
.github/workflows/__with-checkout-path.yml
generated
vendored
|
|
@ -36,7 +36,7 @@ jobs:
|
|||
name: Use a custom `checkout_path`
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
security-events: read
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
|
|
|
|||
2
.github/workflows/__zstd-bundle-streaming.yml
generated
vendored
2
.github/workflows/__zstd-bundle-streaming.yml
generated
vendored
|
|
@ -34,7 +34,7 @@ jobs:
|
|||
name: Zstandard bundle (streaming)
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
security-events: read
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
|
|
|
|||
2
.github/workflows/__zstd-bundle.yml
generated
vendored
2
.github/workflows/__zstd-bundle.yml
generated
vendored
|
|
@ -36,7 +36,7 @@ jobs:
|
|||
name: Zstandard bundle
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
security-events: read
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
|
|
|
|||
|
|
@ -13,6 +13,9 @@ jobs:
|
|||
check-expected-release-files:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
steps:
|
||||
- name: Checkout CodeQL Action
|
||||
uses: actions/checkout@v4
|
||||
|
|
|
|||
5
.github/workflows/codeql.yml
vendored
5
.github/workflows/codeql.yml
vendored
|
|
@ -24,7 +24,7 @@ jobs:
|
|||
versions: ${{ steps.compare.outputs.versions }}
|
||||
|
||||
permissions:
|
||||
security-events: write
|
||||
contents: read
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
|
@ -80,7 +80,8 @@ jobs:
|
|||
runs-on: ${{ matrix.os }}
|
||||
|
||||
permissions:
|
||||
security-events: write
|
||||
contents: read
|
||||
security-events: write # needed to upload results
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
|
|
|
|||
|
|
@ -23,6 +23,11 @@ jobs:
|
|||
code-scanning-config-tests:
|
||||
continue-on-error: true
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
packages: read
|
||||
security-events: read
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
|
|
|
|||
|
|
@ -19,10 +19,20 @@ on:
|
|||
workflow_dispatch: {}
|
||||
jobs:
|
||||
upload-artifacts:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
version:
|
||||
- stable-v2.20.3
|
||||
- default
|
||||
- linked
|
||||
- nightly-latest
|
||||
name: Upload debug artifacts after failure in analyze
|
||||
continue-on-error: true
|
||||
env:
|
||||
CODEQL_ACTION_TEST_MODE: true
|
||||
permissions:
|
||||
contents: read
|
||||
timeout-minutes: 45
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
|
|
@ -34,7 +44,7 @@ jobs:
|
|||
id: prepare-test
|
||||
uses: ./.github/actions/prepare-test
|
||||
with:
|
||||
version: linked
|
||||
version: ${{ matrix.version }}
|
||||
- uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: ^1.13.1
|
||||
|
|
@ -58,6 +68,8 @@ jobs:
|
|||
name: Download and check debug artifacts after failure in analyze
|
||||
needs: upload-artifacts
|
||||
timeout-minutes: 45
|
||||
permissions:
|
||||
contents: read
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Download all artifacts
|
||||
|
|
@ -66,22 +78,25 @@ jobs:
|
|||
shell: bash
|
||||
run: |
|
||||
LANGUAGES="cpp csharp go java javascript python"
|
||||
cd "./my-debug-artifacts"
|
||||
echo "Artifacts from run:"
|
||||
for language in $LANGUAGES; do
|
||||
echo "- Checking $language"
|
||||
if [[ ! -f "my-db-$language-partial.zip" ]] ; then
|
||||
echo "Missing a partial database bundle for $language"
|
||||
exit 1
|
||||
fi
|
||||
if [[ ! -d "log" ]] ; then
|
||||
echo "Missing database initialization logs"
|
||||
exit 1
|
||||
fi
|
||||
if [[ ! "$language" == "go" ]] && [[ ! -d "$language/log" ]] ; then
|
||||
echo "Missing logs for $language"
|
||||
exit 1
|
||||
fi
|
||||
for version in $VERSIONS; do
|
||||
echo "Artifacts from version $version:"
|
||||
pushd "./my-debug-artifacts-${version//./}"
|
||||
for language in $LANGUAGES; do
|
||||
echo "- Checking $language"
|
||||
if [[ ! -f "my-db-$language-partial.zip" ]] ; then
|
||||
echo "Missing a partial database bundle for $language"
|
||||
exit 1
|
||||
fi
|
||||
if [[ ! -d "log" ]] ; then
|
||||
echo "Missing database initialization logs"
|
||||
exit 1
|
||||
fi
|
||||
if [[ ! "$language" == "go" ]] && [[ ! -d "$language/log" ]] ; then
|
||||
echo "Missing logs for $language"
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
popd
|
||||
done
|
||||
env:
|
||||
GO111MODULE: auto
|
||||
|
|
@ -22,11 +22,7 @@ jobs:
|
|||
fail-fast: false
|
||||
matrix:
|
||||
version:
|
||||
- stable-v2.15.5
|
||||
- stable-v2.16.6
|
||||
- stable-v2.17.6
|
||||
- stable-v2.18.4
|
||||
- stable-v2.19.4
|
||||
- stable-v2.20.3
|
||||
- default
|
||||
- linked
|
||||
- nightly-latest
|
||||
|
|
@ -34,6 +30,8 @@ jobs:
|
|||
env:
|
||||
CODEQL_ACTION_TEST_MODE: true
|
||||
timeout-minutes: 45
|
||||
permissions:
|
||||
contents: read
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out repository
|
||||
|
|
@ -64,6 +62,8 @@ jobs:
|
|||
name: Download and check debug artifacts
|
||||
needs: upload-artifacts
|
||||
timeout-minutes: 45
|
||||
permissions:
|
||||
contents: read
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Download all artifacts
|
||||
|
|
@ -71,7 +71,7 @@ jobs:
|
|||
- name: Check expected artifacts exist
|
||||
shell: bash
|
||||
run: |
|
||||
VERSIONS="stable-v2.15.5 stable-v2.16.6 stable-v2.17.6 stable-v2.18.4 stable-v2.19.4 default linked nightly-latest"
|
||||
VERSIONS="stable-v2.20.3 default linked nightly-latest"
|
||||
LANGUAGES="cpp csharp go java javascript python"
|
||||
for version in $VERSIONS; do
|
||||
pushd "./my-debug-artifacts-${version//./}"
|
||||
2
.github/workflows/expected-queries-runs.yml
vendored
2
.github/workflows/expected-queries-runs.yml
vendored
|
|
@ -24,7 +24,7 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
security-events: read
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v4
|
||||
|
|
|
|||
3
.github/workflows/post-release-mergeback.yml
vendored
3
.github/workflows/post-release-mergeback.yml
vendored
|
|
@ -27,6 +27,9 @@ jobs:
|
|||
BASE_BRANCH: "${{ github.event.inputs.baseBranch || 'main' }}"
|
||||
HEAD_BRANCH: "${{ github.head_ref || github.ref }}"
|
||||
|
||||
permissions:
|
||||
contents: write # needed to create tags and push commits
|
||||
|
||||
steps:
|
||||
- name: Dump environment
|
||||
run: env
|
||||
|
|
|
|||
11
.github/workflows/pr-checks.yml
vendored
11
.github/workflows/pr-checks.yml
vendored
|
|
@ -15,7 +15,7 @@ jobs:
|
|||
timeout-minutes: 45
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
security-events: write # needed to upload ESLint results
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
|
|
@ -40,6 +40,8 @@ jobs:
|
|||
check-node-modules:
|
||||
if: github.event_name != 'push' || github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/heads/releases/v')
|
||||
name: Check modules up to date
|
||||
permissions:
|
||||
contents: read
|
||||
runs-on: macos-latest
|
||||
timeout-minutes: 45
|
||||
|
||||
|
|
@ -51,6 +53,8 @@ jobs:
|
|||
check-file-contents:
|
||||
if: github.event_name != 'push' || github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/heads/releases/v')
|
||||
name: Check file contents
|
||||
permissions:
|
||||
contents: read
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 45
|
||||
|
||||
|
|
@ -81,6 +85,8 @@ jobs:
|
|||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-latest, macos-latest, windows-latest]
|
||||
permissions:
|
||||
contents: read
|
||||
runs-on: ${{ matrix.os }}
|
||||
timeout-minutes: 45
|
||||
|
||||
|
|
@ -101,6 +107,9 @@ jobs:
|
|||
env:
|
||||
BASE_REF: ${{ github.base_ref }}
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- id: head-version
|
||||
|
|
|
|||
2
.github/workflows/python312-windows.yml
vendored
2
.github/workflows/python312-windows.yml
vendored
|
|
@ -17,6 +17,8 @@ jobs:
|
|||
env:
|
||||
CODEQL_ACTION_TEST_MODE: true
|
||||
timeout-minutes: 45
|
||||
permissions:
|
||||
contents: read
|
||||
runs-on: windows-latest
|
||||
|
||||
steps:
|
||||
|
|
|
|||
3
.github/workflows/rebuild.yml
vendored
3
.github/workflows/rebuild.yml
vendored
|
|
@ -11,6 +11,9 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
if: github.event.label.name == 'Rebuild'
|
||||
|
||||
permissions:
|
||||
contents: write # needed to push rebuilt commit
|
||||
pull-requests: write # needed to comment on the PR
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
|
|
|||
2
.github/workflows/test-codeql-bundle-all.yml
vendored
2
.github/workflows/test-codeql-bundle-all.yml
vendored
|
|
@ -27,7 +27,7 @@ jobs:
|
|||
name: 'CodeQL Bundle All'
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
security-events: read
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
|
|
|
|||
3
.github/workflows/update-bundle.yml
vendored
3
.github/workflows/update-bundle.yml
vendored
|
|
@ -17,6 +17,9 @@ jobs:
|
|||
update-bundle:
|
||||
if: github.event.release.prerelease && startsWith(github.event.release.tag_name, 'codeql-bundle-')
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write # needed to push commits
|
||||
pull-requests: write # needed to create pull requests
|
||||
steps:
|
||||
- name: Dump environment
|
||||
run: env
|
||||
|
|
|
|||
3
.github/workflows/update-dependencies.yml
vendored
3
.github/workflows/update-dependencies.yml
vendored
|
|
@ -9,6 +9,9 @@ jobs:
|
|||
timeout-minutes: 45
|
||||
runs-on: macos-latest
|
||||
if: contains(github.event.pull_request.labels.*.name, 'Update dependencies') && (github.event.pull_request.head.repo.full_name == 'github/codeql-action')
|
||||
permissions:
|
||||
contents: write # needed to push the updated dependencies
|
||||
pull-requests: write # needed to comment on the PR
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
|
|
|||
8
.github/workflows/update-release-branch.yml
vendored
8
.github/workflows/update-release-branch.yml
vendored
|
|
@ -22,6 +22,8 @@ jobs:
|
|||
latest_tag: ${{ steps.versions.outputs.latest_tag }}
|
||||
backport_source_branch: ${{ steps.branches.outputs.backport_source_branch }}
|
||||
backport_target_branches: ${{ steps.branches.outputs.backport_target_branches }}
|
||||
permissions:
|
||||
contents: read
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
|
|
@ -63,6 +65,9 @@ jobs:
|
|||
REPOSITORY: "${{ github.repository }}"
|
||||
MAJOR_VERSION: "${{ needs.prepare.outputs.major_version }}"
|
||||
LATEST_TAG: "${{ needs.prepare.outputs.latest_tag }}"
|
||||
permissions:
|
||||
contents: write # needed to push commits
|
||||
pull-requests: write # needed to create pull request
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
|
|
@ -114,6 +119,9 @@ jobs:
|
|||
env:
|
||||
SOURCE_BRANCH: ${{ needs.prepare.outputs.backport_source_branch }}
|
||||
TARGET_BRANCH: ${{ matrix.target_branch }}
|
||||
permissions:
|
||||
contents: write # needed to push commits
|
||||
pull-requests: write # needed to create pull request
|
||||
steps:
|
||||
- name: Generate token
|
||||
uses: actions/create-github-app-token@c1a285145b9d317df6ced56c09f525b5c2b6f755
|
||||
|
|
|
|||
|
|
@ -10,7 +10,10 @@ jobs:
|
|||
name: Update Supported Enterprise Server Versions
|
||||
timeout-minutes: 45
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ github.repository == 'github/codeql-action' }}
|
||||
if: github.repository == 'github/codeql-action'
|
||||
permissions:
|
||||
contents: write # needed to push commits
|
||||
pull-requests: write # needed to create pull request
|
||||
|
||||
steps:
|
||||
- name: Setup Python
|
||||
|
|
|
|||
|
|
@ -4,6 +4,10 @@ See the [releases page](https://github.com/github/codeql-action/releases) for th
|
|||
|
||||
## [UNRELEASED]
|
||||
|
||||
- Re-enable debug artifact upload for CLI versions 2.20.3 or greater. [#2726](https://github.com/github/codeql-action/pull/2726)
|
||||
|
||||
## 3.28.5 - 24 Jan 2025
|
||||
|
||||
- Update default CodeQL bundle version to 2.20.3. [#2717](https://github.com/github/codeql-action/pull/2717)
|
||||
|
||||
## 3.28.4 - 23 Jan 2025
|
||||
|
|
|
|||
5
lib/analyze-action-post.js
generated
5
lib/analyze-action-post.js
generated
|
|
@ -41,6 +41,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|||
const core = __importStar(require("@actions/core"));
|
||||
const actionsUtil = __importStar(require("./actions-util"));
|
||||
const api_client_1 = require("./api-client");
|
||||
const codeql_1 = require("./codeql");
|
||||
const config_utils_1 = require("./config-utils");
|
||||
const debugArtifacts = __importStar(require("./debug-artifacts"));
|
||||
const environment_1 = require("./environment");
|
||||
|
|
@ -57,7 +58,9 @@ async function runWrapper() {
|
|||
if (process.env[environment_1.EnvVar.INIT_ACTION_HAS_RUN] === "true") {
|
||||
const config = await (0, config_utils_1.getConfig)(actionsUtil.getTemporaryDirectory(), logger);
|
||||
if (config !== undefined) {
|
||||
await (0, logging_1.withGroup)("Uploading combined SARIF debug artifact", () => debugArtifacts.uploadCombinedSarifArtifacts(logger, config.gitHubVersion.type));
|
||||
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
||||
const version = await codeql.getVersion();
|
||||
await (0, logging_1.withGroup)("Uploading combined SARIF debug artifact", () => debugArtifacts.uploadCombinedSarifArtifacts(logger, config.gitHubVersion.type, version.version));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
{"version":3,"file":"analyze-action-post.js","sourceRoot":"","sources":["../src/analyze-action-post.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;;;;GAIG;AACH,oDAAsC;AAEtC,4DAA8C;AAC9C,6CAAgD;AAChD,iDAA2C;AAC3C,kEAAoD;AACpD,+CAAuC;AACvC,uCAAwD;AACxD,iCAAoE;AAEpE,KAAK,UAAU,UAAU;IACvB,IAAI,CAAC;QACH,WAAW,CAAC,aAAa,EAAE,CAAC;QAC5B,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;QAClC,MAAM,aAAa,GAAG,MAAM,IAAA,6BAAgB,GAAE,CAAC;QAC/C,IAAA,gCAAyB,EAAC,aAAa,EAAE,MAAM,CAAC,CAAC;QAEjD,kFAAkF;QAClF,wFAAwF;QACxF,IAAI,OAAO,CAAC,GAAG,CAAC,oBAAM,CAAC,mBAAmB,CAAC,KAAK,MAAM,EAAE,CAAC;YACvD,MAAM,MAAM,GAAG,MAAM,IAAA,wBAAS,EAC5B,WAAW,CAAC,qBAAqB,EAAE,EACnC,MAAM,CACP,CAAC;YACF,IAAI,MAAM,KAAK,SAAS,EAAE,CAAC;gBACzB,MAAM,IAAA,mBAAS,EAAC,yCAAyC,EAAE,GAAG,EAAE,CAC9D,cAAc,CAAC,4BAA4B,CACzC,MAAM,EACN,MAAM,CAAC,aAAa,CAAC,IAAI,CAC1B,CACF,CAAC;YACJ,CAAC;QACH,CAAC;IACH,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACf,IAAI,CAAC,SAAS,CACZ,oCAAoC,IAAA,sBAAe,EAAC,KAAK,CAAC,EAAE,CAC7D,CAAC;IACJ,CAAC;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||
{"version":3,"file":"analyze-action-post.js","sourceRoot":"","sources":["../src/analyze-action-post.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;;;;GAIG;AACH,oDAAsC;AAEtC,4DAA8C;AAC9C,6CAAgD;AAChD,qCAAqC;AACrC,iDAA2C;AAC3C,kEAAoD;AACpD,+CAAuC;AACvC,uCAAwD;AACxD,iCAAoE;AAEpE,KAAK,UAAU,UAAU;IACvB,IAAI,CAAC;QACH,WAAW,CAAC,aAAa,EAAE,CAAC;QAC5B,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;QAClC,MAAM,aAAa,GAAG,MAAM,IAAA,6BAAgB,GAAE,CAAC;QAC/C,IAAA,gCAAyB,EAAC,aAAa,EAAE,MAAM,CAAC,CAAC;QAEjD,kFAAkF;QAClF,wFAAwF;QACxF,IAAI,OAAO,CAAC,GAAG,CAAC,oBAAM,CAAC,mBAAmB,CAAC,KAAK,MAAM,EAAE,CAAC;YACvD,MAAM,MAAM,GAAG,MAAM,IAAA,wBAAS,EAC5B,WAAW,CAAC,qBAAqB,EAAE,EACnC,MAAM,CACP,CAAC;YACF,IAAI,MAAM,KAAK,SAAS,EAAE,CAAC;gBACzB,MAAM,MAAM,GAAG,MAAM,IAAA,kBAAS,EAAC,MAAM,CAAC,SAAS,CAAC,CAAC;gBACjD,MAAM,OAAO,GAAG,MAAM,MAAM,CAAC,UAAU,EAAE,CAAC;gBAC1C,MAAM,IAAA,mBAAS,EAAC,yCAAyC,EAAE,GAAG,EAAE,CAC9D,cAAc,CAAC,4BAA4B,CACzC,MAAM,EACN,MAAM,CAAC,aAAa,CAAC,IAAI,EACzB,OAAO,CAAC,OAAO,CAChB,CACF,CAAC;YACJ,CAAC;QACH,CAAC;IACH,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACf,IAAI,CAAC,SAAS,CACZ,oCAAoC,IAAA,sBAAe,EAAC,KAAK,CAAC,EAAE,CAC7D,CAAC;IACJ,CAAC;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||
24
lib/debug-artifacts.js
generated
24
lib/debug-artifacts.js
generated
|
|
@ -53,6 +53,7 @@ const analyze_1 = require("./analyze");
|
|||
const codeql_1 = require("./codeql");
|
||||
const environment_1 = require("./environment");
|
||||
const logging_1 = require("./logging");
|
||||
const tools_features_1 = require("./tools-features");
|
||||
const util_1 = require("./util");
|
||||
function sanitizeArtifactName(name) {
|
||||
return name.replace(/[^a-zA-Z0-9_\\-]+/g, "");
|
||||
|
|
@ -61,7 +62,7 @@ function sanitizeArtifactName(name) {
|
|||
* Upload Actions SARIF artifacts for debugging when CODEQL_ACTION_DEBUG_COMBINED_SARIF
|
||||
* environment variable is set
|
||||
*/
|
||||
async function uploadCombinedSarifArtifacts(logger, gitHubVariant) {
|
||||
async function uploadCombinedSarifArtifacts(logger, gitHubVariant, codeQlVersion) {
|
||||
const tempDir = (0, actions_util_1.getTemporaryDirectory)();
|
||||
// Upload Actions SARIF artifacts for debugging when environment variable is set
|
||||
if (process.env["CODEQL_ACTION_DEBUG_COMBINED_SARIF"] === "true") {
|
||||
|
|
@ -80,7 +81,7 @@ async function uploadCombinedSarifArtifacts(logger, gitHubVariant) {
|
|||
}
|
||||
}
|
||||
try {
|
||||
await uploadDebugArtifacts(logger, toUpload, baseTempDir, "combined-sarif-artifacts", gitHubVariant);
|
||||
await uploadDebugArtifacts(logger, toUpload, baseTempDir, "combined-sarif-artifacts", gitHubVariant, codeQlVersion);
|
||||
}
|
||||
catch (e) {
|
||||
logger.warning(`Failed to upload combined SARIF files as Actions debugging artifact. Reason: ${(0, util_1.getErrorMessage)(e)}`);
|
||||
|
|
@ -140,7 +141,7 @@ async function tryBundleDatabase(config, language, logger) {
|
|||
*
|
||||
* Logs and suppresses any errors that occur.
|
||||
*/
|
||||
async function tryUploadAllAvailableDebugArtifacts(config, logger) {
|
||||
async function tryUploadAllAvailableDebugArtifacts(config, logger, codeQlVersion) {
|
||||
const filesToUpload = [];
|
||||
try {
|
||||
for (const language of config.languages) {
|
||||
|
|
@ -180,20 +181,23 @@ async function tryUploadAllAvailableDebugArtifacts(config, logger) {
|
|||
return;
|
||||
}
|
||||
try {
|
||||
await (0, logging_1.withGroup)("Uploading debug artifacts", async () => uploadDebugArtifacts(logger, filesToUpload, config.dbLocation, config.debugArtifactName, config.gitHubVersion.type));
|
||||
await (0, logging_1.withGroup)("Uploading debug artifacts", async () => uploadDebugArtifacts(logger, filesToUpload, config.dbLocation, config.debugArtifactName, config.gitHubVersion.type, codeQlVersion));
|
||||
}
|
||||
catch (e) {
|
||||
logger.warning(`Failed to upload debug artifacts. Reason: ${(0, util_1.getErrorMessage)(e)}`);
|
||||
}
|
||||
}
|
||||
async function uploadDebugArtifacts(logger, toUpload, rootDir, artifactName, ghVariant) {
|
||||
async function uploadDebugArtifacts(logger, toUpload, rootDir, artifactName, ghVariant, codeQlVersion) {
|
||||
if (toUpload.length === 0) {
|
||||
return;
|
||||
return "no-artifacts-to-upload";
|
||||
}
|
||||
const uploadSupported = (0, tools_features_1.isSafeArtifactUpload)(codeQlVersion);
|
||||
if (!uploadSupported) {
|
||||
core.info(`Skipping debug artifact upload because the current CLI does not support safe upload. Please upgrade to CLI v${tools_features_1.SafeArtifactUploadVersion} or later.`);
|
||||
return "upload-not-supported";
|
||||
}
|
||||
logger.info("Uploading debug artifacts is temporarily disabled");
|
||||
return;
|
||||
let suffix = "";
|
||||
const matrix = (0, actions_util_1.getRequiredInput)("matrix");
|
||||
const matrix = (0, actions_util_1.getOptionalInput)("matrix");
|
||||
if (matrix) {
|
||||
try {
|
||||
for (const [, matrixVal] of Object.entries(JSON.parse(matrix)).sort())
|
||||
|
|
@ -209,10 +213,12 @@ async function uploadDebugArtifacts(logger, toUpload, rootDir, artifactName, ghV
|
|||
// ensure we don't keep the debug artifacts around for too long since they can be large.
|
||||
retentionDays: 7,
|
||||
});
|
||||
return "upload-successful";
|
||||
}
|
||||
catch (e) {
|
||||
// A failure to upload debug artifacts should not fail the entire action.
|
||||
core.warning(`Failed to upload debug artifacts: ${e}`);
|
||||
return "upload-failed";
|
||||
}
|
||||
}
|
||||
// `@actions/artifact@v2` is not yet supported on GHES so the legacy version of the client will be used on GHES
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
42
lib/debug-artifacts.test.js
generated
42
lib/debug-artifacts.test.js
generated
|
|
@ -46,9 +46,47 @@ const util_1 = require("./util");
|
|||
t.deepEqual(debugArtifacts.sanitizeArtifactName("hello===123"), "hello123");
|
||||
t.deepEqual(debugArtifacts.sanitizeArtifactName("*m)a&n^y%i££n+v!a:l[i]d"), "manyinvalid");
|
||||
});
|
||||
(0, ava_1.default)("uploadDebugArtifacts", async (t) => {
|
||||
// These next tests check the correctness of the logic to determine whether or not
|
||||
// artifacts are uploaded in debug mode. Since it's not easy to mock the actual
|
||||
// call to upload an artifact, we just check that we get an "upload-failed" result,
|
||||
// instead of actually uploading the artifact.
|
||||
//
|
||||
// For tests where we expect artifact upload to be blocked, we check for a different
|
||||
// response from the function.
|
||||
(0, ava_1.default)("uploadDebugArtifacts when artifacts empty should emit 'no-artifacts-to-upload'", async (t) => {
|
||||
// Test that no error is thrown if artifacts list is empty.
|
||||
const logger = (0, logging_1.getActionsLogger)();
|
||||
await t.notThrowsAsync(debugArtifacts.uploadDebugArtifacts(logger, [], "rootDir", "artifactName", util_1.GitHubVariant.DOTCOM));
|
||||
await t.notThrowsAsync(async () => {
|
||||
const uploaded = await debugArtifacts.uploadDebugArtifacts(logger, [], "i-dont-exist", "artifactName", util_1.GitHubVariant.DOTCOM, undefined);
|
||||
t.is(uploaded, "no-artifacts-to-upload", "Should not have uploaded any artifacts");
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("uploadDebugArtifacts when no codeql version is used should invoke artifact upload", async (t) => {
|
||||
// Test that the artifact is uploaded.
|
||||
const logger = (0, logging_1.getActionsLogger)();
|
||||
await t.notThrowsAsync(async () => {
|
||||
const uploaded = await debugArtifacts.uploadDebugArtifacts(logger, ["hucairz"], "i-dont-exist", "artifactName", util_1.GitHubVariant.DOTCOM, undefined);
|
||||
t.is(uploaded,
|
||||
// The failure is expected since we don't want to actually upload any artifacts in unit tests.
|
||||
"upload-failed", "Expect failure to upload artifacts since root dir does not exist");
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("uploadDebugArtifacts when new codeql version is used should invoke artifact upload", async (t) => {
|
||||
// Test that the artifact is uploaded.
|
||||
const logger = (0, logging_1.getActionsLogger)();
|
||||
await t.notThrowsAsync(async () => {
|
||||
const uploaded = await debugArtifacts.uploadDebugArtifacts(logger, ["hucairz"], "i-dont-exist", "artifactName", util_1.GitHubVariant.DOTCOM, "2.20.3");
|
||||
t.is(uploaded,
|
||||
// The failure is expected since we don't want to actually upload any artifacts in unit tests.
|
||||
"upload-failed", "Expect failure to upload artifacts since root dir does not exist");
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("uploadDebugArtifacts when old codeql is used should avoid trying to upload artifacts", async (t) => {
|
||||
// Test that the artifact is not uploaded.
|
||||
const logger = (0, logging_1.getActionsLogger)();
|
||||
await t.notThrowsAsync(async () => {
|
||||
const uploaded = await debugArtifacts.uploadDebugArtifacts(logger, ["hucairz"], "i-dont-exist", "artifactName", util_1.GitHubVariant.DOTCOM, "2.20.2");
|
||||
t.is(uploaded, "upload-not-supported", "Expected artifact upload to be blocked because of old CodeQL version");
|
||||
});
|
||||
});
|
||||
//# sourceMappingURL=debug-artifacts.test.js.map
|
||||
|
|
@ -1 +1 @@
|
|||
{"version":3,"file":"debug-artifacts.test.js","sourceRoot":"","sources":["../src/debug-artifacts.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,8CAAuB;AAEvB,kEAAoD;AACpD,uCAA6C;AAC7C,iCAAuC;AAEvC,IAAA,aAAI,EAAC,sBAAsB,EAAE,CAAC,CAAC,EAAE,EAAE;IACjC,CAAC,CAAC,SAAS,CACT,cAAc,CAAC,oBAAoB,CAAC,cAAc,CAAC,EACnD,cAAc,CACf,CAAC;IACF,CAAC,CAAC,SAAS,CACT,cAAc,CAAC,oBAAoB,CAAC,cAAc,CAAC,EACnD,YAAY,CACb,CAAC;IACF,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,oBAAoB,CAAC,aAAa,CAAC,EAAE,UAAU,CAAC,CAAC;IAC5E,CAAC,CAAC,SAAS,CACT,cAAc,CAAC,oBAAoB,CAAC,yBAAyB,CAAC,EAC9D,aAAa,CACd,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,sBAAsB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACvC,2DAA2D;IAC3D,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;IAClC,MAAM,CAAC,CAAC,cAAc,CACpB,cAAc,CAAC,oBAAoB,CACjC,MAAM,EACN,EAAE,EACF,SAAS,EACT,cAAc,EACd,oBAAa,CAAC,MAAM,CACrB,CACF,CAAC;AACJ,CAAC,CAAC,CAAC"}
|
||||
{"version":3,"file":"debug-artifacts.test.js","sourceRoot":"","sources":["../src/debug-artifacts.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,8CAAuB;AAEvB,kEAAoD;AACpD,uCAA6C;AAC7C,iCAAuC;AAEvC,IAAA,aAAI,EAAC,sBAAsB,EAAE,CAAC,CAAC,EAAE,EAAE;IACjC,CAAC,CAAC,SAAS,CACT,cAAc,CAAC,oBAAoB,CAAC,cAAc,CAAC,EACnD,cAAc,CACf,CAAC;IACF,CAAC,CAAC,SAAS,CACT,cAAc,CAAC,oBAAoB,CAAC,cAAc,CAAC,EACnD,YAAY,CACb,CAAC;IACF,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,oBAAoB,CAAC,aAAa,CAAC,EAAE,UAAU,CAAC,CAAC;IAC5E,CAAC,CAAC,SAAS,CACT,cAAc,CAAC,oBAAoB,CAAC,yBAAyB,CAAC,EAC9D,aAAa,CACd,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,kFAAkF;AAClF,+EAA+E;AAC/E,mFAAmF;AACnF,8CAA8C;AAC9C,EAAE;AACF,oFAAoF;AACpF,8BAA8B;AAE9B,IAAA,aAAI,EAAC,gFAAgF,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACjG,2DAA2D;IAC3D,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;IAClC,MAAM,CAAC,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;QAChC,MAAM,QAAQ,GAAG,MAAM,cAAc,CAAC,oBAAoB,CACxD,MAAM,EACN,EAAE,EACF,cAAc,EACd,cAAc,EACd,oBAAa,CAAC,MAAM,EACpB,SAAS,CACV,CAAC;QACF,CAAC,CAAC,EAAE,CACF,QAAQ,EACR,wBAAwB,EACxB,wCAAwC,CACzC,CAAC;IACJ,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,mFAAmF,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACpG,sCAAsC;IACtC,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;IAClC,MAAM,CAAC,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;QAChC,MAAM,QAAQ,GAAG,MAAM,cAAc,CAAC,oBAAoB,CACxD,MAAM,EACN,CAAC,SAAS,CAAC,EACX,cAAc,EACd,cAAc,EACd,oBAAa,CAAC,MAAM,EACpB,SAAS,CACV,CAAC;QACF,CAAC,CAAC,EAAE,CACF,QAAQ;QACR,8FAA8F;QAC9F,eAAe,EACf,kEAAkE,CACnE,CAAC;IACJ,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,oFAAoF,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACrG,sCAAsC;IACtC,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;IAClC,MAAM,CAAC,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;QAChC,MAAM,QAAQ,GAAG,MAAM,cAAc,CAAC,oBAAoB,CACxD,MAAM,EACN,CAAC,SAAS,CAAC,EACX,cAAc,EACd,cAAc,EACd,oBAAa,CAAC,MAAM,EACpB,QAAQ,CACT,CAAC;QACF,CAAC,CAAC,EAAE,CACF,QAAQ;QACR,8FAA8F;QAC9F,eAAe,EACf,kEAAkE,CACnE,CAAC;IACJ,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,sFAAsF,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACvG,0CAA0C;IAC1C,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;IAClC,MAAM,CAAC,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;QAChC,MAAM,QAAQ,GAAG,MAAM,cAAc,CAAC,oBAAoB,CACxD,MAAM,EACN,CAAC,SAAS,CAAC,EACX,cAAc,EACd,cAAc,EACd,oBAAa,CAAC,MAAM,EACpB,QAAQ,CACT,CAAC;QACF,CAAC,CAAC,EAAE,CACF,QAAQ,EACR,sBAAsB,EACtB,sEAAsE,CACvE,CAAC;IACJ,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
||||
4
lib/init-action-post-helper.js
generated
4
lib/init-action-post-helper.js
generated
|
|
@ -142,7 +142,9 @@ async function run(uploadAllAvailableDebugArtifacts, printDebugLogs, config, rep
|
|||
// Upload appropriate Actions artifacts for debugging
|
||||
if (config.debugMode) {
|
||||
logger.info("Debug mode is on. Uploading available database bundles and logs as Actions debugging artifacts...");
|
||||
await uploadAllAvailableDebugArtifacts(config, logger, features);
|
||||
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
||||
const version = await codeql.getVersion();
|
||||
await uploadAllAvailableDebugArtifacts(config, logger, version.version);
|
||||
await printDebugLogs(config);
|
||||
}
|
||||
if (actionsUtil.isSelfHostedRunner()) {
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
7
lib/start-proxy.js
generated
7
lib/start-proxy.js
generated
|
|
@ -2,6 +2,7 @@
|
|||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.getCredentials = getCredentials;
|
||||
const languages_1 = require("./languages");
|
||||
const util_1 = require("./util");
|
||||
const LANGUAGE_TO_REGISTRY_TYPE = {
|
||||
java: "maven_repository",
|
||||
csharp: "nuget_feed",
|
||||
|
|
@ -44,13 +45,13 @@ function getCredentials(logger, registrySecrets, registriesCredentials, language
|
|||
catch {
|
||||
// Don't log the error since it might contain sensitive information.
|
||||
logger.error("Failed to parse the credentials data.");
|
||||
throw new Error("Invalid credentials format.");
|
||||
throw new util_1.ConfigurationError("Invalid credentials format.");
|
||||
}
|
||||
const out = [];
|
||||
for (const e of parsed) {
|
||||
if (e.url === undefined && e.host === undefined) {
|
||||
// The proxy needs one of these to work. If both are defined, the url has the precedence.
|
||||
throw new Error("Invalid credentials - must specify host or url");
|
||||
throw new util_1.ConfigurationError("Invalid credentials - must specify host or url");
|
||||
}
|
||||
// Filter credentials based on language if specified. `type` is the registry type.
|
||||
// E.g., "maven_feed" for Java/Kotlin, "nuget_repository" for C#.
|
||||
|
|
@ -66,7 +67,7 @@ function getCredentials(logger, registrySecrets, registriesCredentials, language
|
|||
!isPrintable(e.username) ||
|
||||
!isPrintable(e.password) ||
|
||||
!isPrintable(e.token)) {
|
||||
throw new Error("Invalid credentials - fields must contain only printable characters");
|
||||
throw new util_1.ConfigurationError("Invalid credentials - fields must contain only printable characters");
|
||||
}
|
||||
out.push({
|
||||
type: e.type,
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
{"version":3,"file":"start-proxy.js","sourceRoot":"","sources":["../src/start-proxy.ts"],"names":[],"mappings":";;AA6BA,wCAyEC;AAtGD,2CAAsD;AAYtD,MAAM,yBAAyB,GAA6B;IAC1D,IAAI,EAAE,kBAAkB;IACxB,MAAM,EAAE,YAAY;IACpB,UAAU,EAAE,cAAc;IAC1B,MAAM,EAAE,cAAc;IACtB,IAAI,EAAE,iBAAiB;IACvB,IAAI,EAAE,gBAAgB;IACtB,oFAAoF;IACpF,OAAO,EAAE,EAAE;IACX,GAAG,EAAE,EAAE;IACP,EAAE,EAAE,EAAE;IACN,KAAK,EAAE,EAAE;CACD,CAAC;AAEX,kEAAkE;AAClE,+DAA+D;AAC/D,gDAAgD;AAChD,SAAgB,cAAc,CAC5B,MAAc,EACd,eAAmC,EACnC,qBAAyC,EACzC,cAAkC;IAElC,MAAM,QAAQ,GAAG,cAAc,CAAC,CAAC,CAAC,IAAA,yBAAa,EAAC,cAAc,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC;IAC5E,MAAM,uBAAuB,GAAG,QAAQ;QACtC,CAAC,CAAC,yBAAyB,CAAC,QAAQ,CAAC;QACrC,CAAC,CAAC,SAAS,CAAC;IAEd,IAAI,cAAsB,CAAC;IAC3B,IAAI,qBAAqB,KAAK,SAAS,EAAE,CAAC;QACxC,MAAM,CAAC,IAAI,CAAC,qCAAqC,CAAC,CAAC;QACnD,cAAc,GAAG,MAAM,CAAC,IAAI,CAAC,qBAAqB,EAAE,QAAQ,CAAC,CAAC,QAAQ,EAAE,CAAC;IAC3E,CAAC;SAAM,IAAI,eAAe,KAAK,SAAS,EAAE,CAAC;QACzC,MAAM,CAAC,IAAI,CAAC,+BAA+B,CAAC,CAAC;QAC7C,cAAc,GAAG,eAAe,CAAC;IACnC,CAAC;SAAM,CAAC;QACN,MAAM,CAAC,IAAI,CAAC,yBAAyB,CAAC,CAAC;QACvC,OAAO,EAAE,CAAC;IACZ,CAAC;IAED,qCAAqC;IACrC,IAAI,MAAoB,CAAC;IACzB,IAAI,CAAC;QACH,MAAM,GAAG,IAAI,CAAC,KAAK,CAAC,cAAc,CAAiB,CAAC;IACtD,CAAC;IAAC,MAAM,CAAC;QACP,oEAAoE;QACpE,MAAM,CAAC,KAAK,CAAC,uCAAuC,CAAC,CAAC;QACtD,MAAM,IAAI,KAAK,CAAC,6BAA6B,CAAC,CAAC;IACjD,CAAC;IAED,MAAM,GAAG,GAAiB,EAAE,CAAC;IAC7B,KAAK,MAAM,CAAC,IAAI,MAAM,EAAE,CAAC;QACvB,IAAI,CAAC,CAAC,GAAG,KAAK,SAAS,IAAI,CAAC,CAAC,IAAI,KAAK,SAAS,EAAE,CAAC;YAChD,yFAAyF;YACzF,MAAM,IAAI,KAAK,CAAC,gDAAgD,CAAC,CAAC;QACpE,CAAC;QAED,kFAAkF;QAClF,iEAAiE;QACjE,IAAI,uBAAuB,IAAI,CAAC,CAAC,IAAI,KAAK,uBAAuB,EAAE,CAAC;YAClE,SAAS;QACX,CAAC;QAED,MAAM,WAAW,GAAG,CAAC,GAAuB,EAAW,EAAE;YACvD,OAAO,GAAG,CAAC,CAAC,CAAC,gBAAgB,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC;QACjD,CAAC,CAAC;QAEF,IACE,CAAC,WAAW,CAAC,CAAC,CAAC,IAAI,CAAC;YACpB,CAAC,WAAW,CAAC,CAAC,CAAC,IAAI,CAAC;YACpB,CAAC,WAAW,CAAC,CAAC,CAAC,GAAG,CAAC;YACnB,CAAC,WAAW,CAAC,CAAC,CAAC,QAAQ,CAAC;YACxB,CAAC,WAAW,CAAC,CAAC,CAAC,QAAQ,CAAC;YACxB,CAAC,WAAW,CAAC,CAAC,CAAC,KAAK,CAAC,EACrB,CAAC;YACD,MAAM,IAAI,KAAK,CACb,qEAAqE,CACtE,CAAC;QACJ,CAAC;QAED,GAAG,CAAC,IAAI,CAAC;YACP,IAAI,EAAE,CAAC,CAAC,IAAI;YACZ,IAAI,EAAE,CAAC,CAAC,IAAI;YACZ,GAAG,EAAE,CAAC,CAAC,GAAG;YACV,QAAQ,EAAE,CAAC,CAAC,QAAQ;YACpB,QAAQ,EAAE,CAAC,CAAC,QAAQ;YACpB,KAAK,EAAE,CAAC,CAAC,KAAK;SACf,CAAC,CAAC;IACL,CAAC;IACD,OAAO,GAAG,CAAC;AACb,CAAC"}
|
||||
{"version":3,"file":"start-proxy.js","sourceRoot":"","sources":["../src/start-proxy.ts"],"names":[],"mappings":";;AA8BA,wCA2EC;AAzGD,2CAAsD;AAEtD,iCAA4C;AAW5C,MAAM,yBAAyB,GAA6B;IAC1D,IAAI,EAAE,kBAAkB;IACxB,MAAM,EAAE,YAAY;IACpB,UAAU,EAAE,cAAc;IAC1B,MAAM,EAAE,cAAc;IACtB,IAAI,EAAE,iBAAiB;IACvB,IAAI,EAAE,gBAAgB;IACtB,oFAAoF;IACpF,OAAO,EAAE,EAAE;IACX,GAAG,EAAE,EAAE;IACP,EAAE,EAAE,EAAE;IACN,KAAK,EAAE,EAAE;CACD,CAAC;AAEX,kEAAkE;AAClE,+DAA+D;AAC/D,gDAAgD;AAChD,SAAgB,cAAc,CAC5B,MAAc,EACd,eAAmC,EACnC,qBAAyC,EACzC,cAAkC;IAElC,MAAM,QAAQ,GAAG,cAAc,CAAC,CAAC,CAAC,IAAA,yBAAa,EAAC,cAAc,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC;IAC5E,MAAM,uBAAuB,GAAG,QAAQ;QACtC,CAAC,CAAC,yBAAyB,CAAC,QAAQ,CAAC;QACrC,CAAC,CAAC,SAAS,CAAC;IAEd,IAAI,cAAsB,CAAC;IAC3B,IAAI,qBAAqB,KAAK,SAAS,EAAE,CAAC;QACxC,MAAM,CAAC,IAAI,CAAC,qCAAqC,CAAC,CAAC;QACnD,cAAc,GAAG,MAAM,CAAC,IAAI,CAAC,qBAAqB,EAAE,QAAQ,CAAC,CAAC,QAAQ,EAAE,CAAC;IAC3E,CAAC;SAAM,IAAI,eAAe,KAAK,SAAS,EAAE,CAAC;QACzC,MAAM,CAAC,IAAI,CAAC,+BAA+B,CAAC,CAAC;QAC7C,cAAc,GAAG,eAAe,CAAC;IACnC,CAAC;SAAM,CAAC;QACN,MAAM,CAAC,IAAI,CAAC,yBAAyB,CAAC,CAAC;QACvC,OAAO,EAAE,CAAC;IACZ,CAAC;IAED,qCAAqC;IACrC,IAAI,MAAoB,CAAC;IACzB,IAAI,CAAC;QACH,MAAM,GAAG,IAAI,CAAC,KAAK,CAAC,cAAc,CAAiB,CAAC;IACtD,CAAC;IAAC,MAAM,CAAC;QACP,oEAAoE;QACpE,MAAM,CAAC,KAAK,CAAC,uCAAuC,CAAC,CAAC;QACtD,MAAM,IAAI,yBAAkB,CAAC,6BAA6B,CAAC,CAAC;IAC9D,CAAC;IAED,MAAM,GAAG,GAAiB,EAAE,CAAC;IAC7B,KAAK,MAAM,CAAC,IAAI,MAAM,EAAE,CAAC;QACvB,IAAI,CAAC,CAAC,GAAG,KAAK,SAAS,IAAI,CAAC,CAAC,IAAI,KAAK,SAAS,EAAE,CAAC;YAChD,yFAAyF;YACzF,MAAM,IAAI,yBAAkB,CAC1B,gDAAgD,CACjD,CAAC;QACJ,CAAC;QAED,kFAAkF;QAClF,iEAAiE;QACjE,IAAI,uBAAuB,IAAI,CAAC,CAAC,IAAI,KAAK,uBAAuB,EAAE,CAAC;YAClE,SAAS;QACX,CAAC;QAED,MAAM,WAAW,GAAG,CAAC,GAAuB,EAAW,EAAE;YACvD,OAAO,GAAG,CAAC,CAAC,CAAC,gBAAgB,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC;QACjD,CAAC,CAAC;QAEF,IACE,CAAC,WAAW,CAAC,CAAC,CAAC,IAAI,CAAC;YACpB,CAAC,WAAW,CAAC,CAAC,CAAC,IAAI,CAAC;YACpB,CAAC,WAAW,CAAC,CAAC,CAAC,GAAG,CAAC;YACnB,CAAC,WAAW,CAAC,CAAC,CAAC,QAAQ,CAAC;YACxB,CAAC,WAAW,CAAC,CAAC,CAAC,QAAQ,CAAC;YACxB,CAAC,WAAW,CAAC,CAAC,CAAC,KAAK,CAAC,EACrB,CAAC;YACD,MAAM,IAAI,yBAAkB,CAC1B,qEAAqE,CACtE,CAAC;QACJ,CAAC;QAED,GAAG,CAAC,IAAI,CAAC;YACP,IAAI,EAAE,CAAC,CAAC,IAAI;YACZ,IAAI,EAAE,CAAC,CAAC,IAAI;YACZ,GAAG,EAAE,CAAC,CAAC,GAAG;YACV,QAAQ,EAAE,CAAC,CAAC,QAAQ;YACpB,QAAQ,EAAE,CAAC,CAAC,QAAQ;YACpB,KAAK,EAAE,CAAC,CAAC,KAAK;SACf,CAAC,CAAC;IACL,CAAC;IACD,OAAO,GAAG,CAAC;AACb,CAAC"}
|
||||
53
lib/tools-features.js
generated
53
lib/tools-features.js
generated
|
|
@ -1,7 +1,42 @@
|
|||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || (function () {
|
||||
var ownKeys = function(o) {
|
||||
ownKeys = Object.getOwnPropertyNames || function (o) {
|
||||
var ar = [];
|
||||
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
||||
return ar;
|
||||
};
|
||||
return ownKeys(o);
|
||||
};
|
||||
return function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
})();
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.ToolsFeature = void 0;
|
||||
exports.SafeArtifactUploadVersion = exports.ToolsFeature = void 0;
|
||||
exports.isSupportedToolsFeature = isSupportedToolsFeature;
|
||||
exports.isSafeArtifactUpload = isSafeArtifactUpload;
|
||||
const semver = __importStar(require("semver"));
|
||||
var ToolsFeature;
|
||||
(function (ToolsFeature) {
|
||||
ToolsFeature["AnalysisSummaryV2IsDefault"] = "analysisSummaryV2Default";
|
||||
|
|
@ -25,4 +60,20 @@ var ToolsFeature;
|
|||
function isSupportedToolsFeature(versionInfo, feature) {
|
||||
return !!versionInfo.features && versionInfo.features[feature];
|
||||
}
|
||||
exports.SafeArtifactUploadVersion = "2.20.3";
|
||||
/**
|
||||
* The first version of the CodeQL CLI where artifact upload is safe to use
|
||||
* for failed runs. This is not really a feature flag, but it is easiest to
|
||||
* model the behavior as a feature flag.
|
||||
*
|
||||
* This was not captured in a tools feature, so we need to use semver.
|
||||
*
|
||||
* @param codeQlVersion The version of the CodeQL CLI to check. If not provided, it is assumed to be safe.
|
||||
* @returns True if artifact upload is safe to use for failed runs or false otherwise.
|
||||
*/
|
||||
function isSafeArtifactUpload(codeQlVersion) {
|
||||
return !codeQlVersion
|
||||
? true
|
||||
: semver.gte(codeQlVersion, exports.SafeArtifactUploadVersion);
|
||||
}
|
||||
//# sourceMappingURL=tools-features.js.map
|
||||
|
|
@ -1 +1 @@
|
|||
{"version":3,"file":"tools-features.js","sourceRoot":"","sources":["../src/tools-features.ts"],"names":[],"mappings":";;;AAsBA,0DAKC;AAzBD,IAAY,YAWX;AAXD,WAAY,YAAY;IACtB,uEAAuD,CAAA;IACvD,mDAAmC,CAAA;IACnC,qHAAqG,CAAA;IACrG,+FAA+E,CAAA;IAC/E,yFAAyE,CAAA;IACzE,iEAAiD,CAAA;IACjD,qEAAqD,CAAA;IACrD,mFAAmE,CAAA;IACnE,iDAAiC,CAAA;IACjC,uFAAuE,CAAA;AACzE,CAAC,EAXW,YAAY,4BAAZ,YAAY,QAWvB;AAED;;;;;;GAMG;AACH,SAAgB,uBAAuB,CACrC,WAAwB,EACxB,OAAqB;IAErB,OAAO,CAAC,CAAC,WAAW,CAAC,QAAQ,IAAI,WAAW,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC;AACjE,CAAC"}
|
||||
{"version":3,"file":"tools-features.js","sourceRoot":"","sources":["../src/tools-features.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAwBA,0DAKC;AAcD,oDAIC;AA/CD,+CAAiC;AAIjC,IAAY,YAWX;AAXD,WAAY,YAAY;IACtB,uEAAuD,CAAA;IACvD,mDAAmC,CAAA;IACnC,qHAAqG,CAAA;IACrG,+FAA+E,CAAA;IAC/E,yFAAyE,CAAA;IACzE,iEAAiD,CAAA;IACjD,qEAAqD,CAAA;IACrD,mFAAmE,CAAA;IACnE,iDAAiC,CAAA;IACjC,uFAAuE,CAAA;AACzE,CAAC,EAXW,YAAY,4BAAZ,YAAY,QAWvB;AAED;;;;;;GAMG;AACH,SAAgB,uBAAuB,CACrC,WAAwB,EACxB,OAAqB;IAErB,OAAO,CAAC,CAAC,WAAW,CAAC,QAAQ,IAAI,WAAW,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC;AACjE,CAAC;AAEY,QAAA,yBAAyB,GAAG,QAAQ,CAAC;AAElD;;;;;;;;;GASG;AACH,SAAgB,oBAAoB,CAAC,aAAsB;IACzD,OAAO,CAAC,aAAa;QACnB,CAAC,CAAC,IAAI;QACN,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,aAAa,EAAE,iCAAyB,CAAC,CAAC;AAC3D,CAAC"}
|
||||
5
lib/upload-sarif-action-post.js
generated
5
lib/upload-sarif-action-post.js
generated
|
|
@ -59,7 +59,10 @@ async function runWrapper() {
|
|||
core.warning(`Did not upload debug artifacts because cannot determine the GitHub variant running.`);
|
||||
return;
|
||||
}
|
||||
await (0, logging_1.withGroup)("Uploading combined SARIF debug artifact", () => debugArtifacts.uploadCombinedSarifArtifacts(logger, gitHubVersion.type));
|
||||
await (0, logging_1.withGroup)("Uploading combined SARIF debug artifact", () => debugArtifacts.uploadCombinedSarifArtifacts(logger, gitHubVersion.type,
|
||||
// The codeqlVersion is not applicable for uploading non-codeql sarif.
|
||||
// We can assume all versions are safe to upload.
|
||||
undefined));
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
{"version":3,"file":"upload-sarif-action-post.js","sourceRoot":"","sources":["../src/upload-sarif-action-post.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;;;;GAIG;AACH,oDAAsC;AAEtC,4DAA8C;AAC9C,6CAAgD;AAChD,kEAAoD;AACpD,+CAAuC;AACvC,uCAAwD;AACxD,iCAAoE;AAEpE,KAAK,UAAU,UAAU;IACvB,IAAI,CAAC;QACH,6CAA6C;QAC7C,WAAW,CAAC,aAAa,EAAE,CAAC;QAC5B,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;QAClC,MAAM,aAAa,GAAG,MAAM,IAAA,6BAAgB,GAAE,CAAC;QAC/C,IAAA,gCAAyB,EAAC,aAAa,EAAE,MAAM,CAAC,CAAC;QAEjD,kFAAkF;QAClF,mFAAmF;QACnF,IAAI,OAAO,CAAC,GAAG,CAAC,oBAAM,CAAC,mBAAmB,CAAC,KAAK,MAAM,EAAE,CAAC;YACvD,IAAI,aAAa,CAAC,IAAI,KAAK,SAAS,EAAE,CAAC;gBACrC,IAAI,CAAC,OAAO,CACV,qFAAqF,CACtF,CAAC;gBACF,OAAO;YACT,CAAC;YACD,MAAM,IAAA,mBAAS,EAAC,yCAAyC,EAAE,GAAG,EAAE,CAC9D,cAAc,CAAC,4BAA4B,CAAC,MAAM,EAAE,aAAa,CAAC,IAAI,CAAC,CACxE,CAAC;QACJ,CAAC;IACH,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACf,IAAI,CAAC,SAAS,CACZ,yCAAyC,IAAA,sBAAe,EAAC,KAAK,CAAC,EAAE,CAClE,CAAC;IACJ,CAAC;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||
{"version":3,"file":"upload-sarif-action-post.js","sourceRoot":"","sources":["../src/upload-sarif-action-post.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;;;;GAIG;AACH,oDAAsC;AAEtC,4DAA8C;AAC9C,6CAAgD;AAChD,kEAAoD;AACpD,+CAAuC;AACvC,uCAAwD;AACxD,iCAAoE;AAEpE,KAAK,UAAU,UAAU;IACvB,IAAI,CAAC;QACH,6CAA6C;QAC7C,WAAW,CAAC,aAAa,EAAE,CAAC;QAC5B,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;QAClC,MAAM,aAAa,GAAG,MAAM,IAAA,6BAAgB,GAAE,CAAC;QAC/C,IAAA,gCAAyB,EAAC,aAAa,EAAE,MAAM,CAAC,CAAC;QAEjD,kFAAkF;QAClF,mFAAmF;QACnF,IAAI,OAAO,CAAC,GAAG,CAAC,oBAAM,CAAC,mBAAmB,CAAC,KAAK,MAAM,EAAE,CAAC;YACvD,IAAI,aAAa,CAAC,IAAI,KAAK,SAAS,EAAE,CAAC;gBACrC,IAAI,CAAC,OAAO,CACV,qFAAqF,CACtF,CAAC;gBACF,OAAO;YACT,CAAC;YACD,MAAM,IAAA,mBAAS,EAAC,yCAAyC,EAAE,GAAG,EAAE,CAC9D,cAAc,CAAC,4BAA4B,CACzC,MAAM,EACN,aAAa,CAAC,IAAI;YAClB,sEAAsE;YACtE,iDAAiD;YACjD,SAAS,CACV,CACF,CAAC;QACJ,CAAC;IACH,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACf,IAAI,CAAC,SAAS,CACZ,yCAAyC,IAAA,sBAAe,EAAC,KAAK,CAAC,EAAE,CAClE,CAAC;IACJ,CAAC;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||
247
node_modules/.package-lock.json
generated
vendored
247
node_modules/.package-lock.json
generated
vendored
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "codeql",
|
||||
"version": "3.28.5",
|
||||
"version": "3.28.6",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
|
|
@ -638,9 +638,9 @@
|
|||
}
|
||||
},
|
||||
"node_modules/@eslint/js": {
|
||||
"version": "9.18.0",
|
||||
"resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.18.0.tgz",
|
||||
"integrity": "sha512-fK6L7rxcq6/z+AaQMtiFTkvbHkBLNlwyRxHpKawP0x3u9+NC6MQTnFW+AdpwC6gfHTW0051cokQgtTN2FqlxQA==",
|
||||
"version": "9.19.0",
|
||||
"resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.19.0.tgz",
|
||||
"integrity": "sha512-rbq9/g38qjfqFLOVPvwjIvFFdNziEC5S65jmjPw5r6A//QH+W91akh9irMwjDN8zKUTak6W9EsAv4m/7Wnw0UQ==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
|
|
@ -793,6 +793,24 @@
|
|||
"node": ">= 14"
|
||||
}
|
||||
},
|
||||
"node_modules/@mswjs/interceptors": {
|
||||
"version": "0.37.5",
|
||||
"resolved": "https://registry.npmjs.org/@mswjs/interceptors/-/interceptors-0.37.5.tgz",
|
||||
"integrity": "sha512-AAwRb5vXFcY4L+FvZ7LZusDuZ0vEe0Zm8ohn1FM6/X7A3bj4mqmkAcGRWuvC2JwSygNwHAAmMnAI73vPHeqsHA==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@open-draft/deferred-promise": "^2.2.0",
|
||||
"@open-draft/logger": "^0.3.0",
|
||||
"@open-draft/until": "^2.0.0",
|
||||
"is-node-process": "^1.2.0",
|
||||
"outvariant": "^1.4.3",
|
||||
"strict-event-emitter": "^0.5.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=18"
|
||||
}
|
||||
},
|
||||
"node_modules/@nodelib/fs.scandir": {
|
||||
"version": "2.1.5",
|
||||
"license": "MIT",
|
||||
|
|
@ -1061,6 +1079,31 @@
|
|||
"integrity": "sha512-izFjMJ1sir0jn0ldEKhZ7xegCTj/ObmEDlEfpFrx4k/JyZSMRHbO3/rBwgE7f3m2DHt+RrNGIVw4wSmwnm3t/g==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@open-draft/deferred-promise": {
|
||||
"version": "2.2.0",
|
||||
"resolved": "https://registry.npmjs.org/@open-draft/deferred-promise/-/deferred-promise-2.2.0.tgz",
|
||||
"integrity": "sha512-CecwLWx3rhxVQF6V4bAgPS5t+So2sTbPgAzafKkVizyi7tlwpcFpdFqq+wqF2OwNBmqFuu6tOyouTuxgpMfzmA==",
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@open-draft/logger": {
|
||||
"version": "0.3.0",
|
||||
"resolved": "https://registry.npmjs.org/@open-draft/logger/-/logger-0.3.0.tgz",
|
||||
"integrity": "sha512-X2g45fzhxH238HKO4xbSr7+wBS8Fvw6ixhTDuvLd5mqh6bJJCFAPwU9mPDxbcrRtfxv4u5IHCEH77BmxvXmmxQ==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"is-node-process": "^1.2.0",
|
||||
"outvariant": "^1.4.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@open-draft/until": {
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/@open-draft/until/-/until-2.1.0.tgz",
|
||||
"integrity": "sha512-U69T3ItWHvLwGg5eJ0n3I62nWuE6ilHlmz7zM0npLBRvPRd7e6NYmg54vvRtP5mZG7kZqZCFVdsTWo7BPtBujg==",
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@pkgjs/parseargs": {
|
||||
"version": "0.11.0",
|
||||
"resolved": "https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz",
|
||||
|
|
@ -1292,17 +1335,17 @@
|
|||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@typescript-eslint/eslint-plugin": {
|
||||
"version": "8.21.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.21.0.tgz",
|
||||
"integrity": "sha512-eTH+UOR4I7WbdQnG4Z48ebIA6Bgi7WO8HvFEneeYBxG8qCOYgTOFPSg6ek9ITIDvGjDQzWHcoWHCDO2biByNzA==",
|
||||
"version": "8.22.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.22.0.tgz",
|
||||
"integrity": "sha512-4Uta6REnz/xEJMvwf72wdUnC3rr4jAQf5jnTkeRQ9b6soxLxhDEbS/pfMPoJLDfFPNVRdryqWUIV/2GZzDJFZw==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@eslint-community/regexpp": "^4.10.0",
|
||||
"@typescript-eslint/scope-manager": "8.21.0",
|
||||
"@typescript-eslint/type-utils": "8.21.0",
|
||||
"@typescript-eslint/utils": "8.21.0",
|
||||
"@typescript-eslint/visitor-keys": "8.21.0",
|
||||
"@typescript-eslint/scope-manager": "8.22.0",
|
||||
"@typescript-eslint/type-utils": "8.22.0",
|
||||
"@typescript-eslint/utils": "8.22.0",
|
||||
"@typescript-eslint/visitor-keys": "8.22.0",
|
||||
"graphemer": "^1.4.0",
|
||||
"ignore": "^5.3.1",
|
||||
"natural-compare": "^1.4.0",
|
||||
|
|
@ -1322,14 +1365,14 @@
|
|||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/eslint-plugin/node_modules/@typescript-eslint/scope-manager": {
|
||||
"version": "8.21.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.21.0.tgz",
|
||||
"integrity": "sha512-G3IBKz0/0IPfdeGRMbp+4rbjfSSdnGkXsM/pFZA8zM9t9klXDnB/YnKOBQ0GoPmoROa4bCq2NeHgJa5ydsQ4mA==",
|
||||
"version": "8.22.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.22.0.tgz",
|
||||
"integrity": "sha512-/lwVV0UYgkj7wPSw0o8URy6YI64QmcOdwHuGuxWIYznO6d45ER0wXUbksr9pYdViAofpUCNJx/tAzNukgvaaiQ==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@typescript-eslint/types": "8.21.0",
|
||||
"@typescript-eslint/visitor-keys": "8.21.0"
|
||||
"@typescript-eslint/types": "8.22.0",
|
||||
"@typescript-eslint/visitor-keys": "8.22.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
|
||||
|
|
@ -1340,9 +1383,9 @@
|
|||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/eslint-plugin/node_modules/@typescript-eslint/types": {
|
||||
"version": "8.21.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.21.0.tgz",
|
||||
"integrity": "sha512-PAL6LUuQwotLW2a8VsySDBwYMm129vFm4tMVlylzdoTybTHaAi0oBp7Ac6LhSrHHOdLM3efH+nAR6hAWoMF89A==",
|
||||
"version": "8.22.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.22.0.tgz",
|
||||
"integrity": "sha512-0S4M4baNzp612zwpD4YOieP3VowOARgK2EkN/GBn95hpyF8E2fbMT55sRHWBq+Huaqk3b3XK+rxxlM8sPgGM6A==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
|
|
@ -1354,14 +1397,14 @@
|
|||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/eslint-plugin/node_modules/@typescript-eslint/typescript-estree": {
|
||||
"version": "8.21.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.21.0.tgz",
|
||||
"integrity": "sha512-x+aeKh/AjAArSauz0GiQZsjT8ciadNMHdkUSwBB9Z6PrKc/4knM4g3UfHml6oDJmKC88a6//cdxnO/+P2LkMcg==",
|
||||
"version": "8.22.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.22.0.tgz",
|
||||
"integrity": "sha512-SJX99NAS2ugGOzpyhMza/tX+zDwjvwAtQFLsBo3GQxiGcvaKlqGBkmZ+Y1IdiSi9h4Q0Lr5ey+Cp9CGWNY/F/w==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@typescript-eslint/types": "8.21.0",
|
||||
"@typescript-eslint/visitor-keys": "8.21.0",
|
||||
"@typescript-eslint/types": "8.22.0",
|
||||
"@typescript-eslint/visitor-keys": "8.22.0",
|
||||
"debug": "^4.3.4",
|
||||
"fast-glob": "^3.3.2",
|
||||
"is-glob": "^4.0.3",
|
||||
|
|
@ -1381,16 +1424,16 @@
|
|||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/eslint-plugin/node_modules/@typescript-eslint/utils": {
|
||||
"version": "8.21.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.21.0.tgz",
|
||||
"integrity": "sha512-xcXBfcq0Kaxgj7dwejMbFyq7IOHgpNMtVuDveK7w3ZGwG9owKzhALVwKpTF2yrZmEwl9SWdetf3fxNzJQaVuxw==",
|
||||
"version": "8.22.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.22.0.tgz",
|
||||
"integrity": "sha512-T8oc1MbF8L+Bk2msAvCUzjxVB2Z2f+vXYfcucE2wOmYs7ZUwco5Ep0fYZw8quNwOiw9K8GYVL+Kgc2pETNTLOg==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@eslint-community/eslint-utils": "^4.4.0",
|
||||
"@typescript-eslint/scope-manager": "8.21.0",
|
||||
"@typescript-eslint/types": "8.21.0",
|
||||
"@typescript-eslint/typescript-estree": "8.21.0"
|
||||
"@typescript-eslint/scope-manager": "8.22.0",
|
||||
"@typescript-eslint/types": "8.22.0",
|
||||
"@typescript-eslint/typescript-estree": "8.22.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
|
||||
|
|
@ -1405,13 +1448,13 @@
|
|||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/eslint-plugin/node_modules/@typescript-eslint/visitor-keys": {
|
||||
"version": "8.21.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.21.0.tgz",
|
||||
"integrity": "sha512-BkLMNpdV6prozk8LlyK/SOoWLmUFi+ZD+pcqti9ILCbVvHGk1ui1g4jJOc2WDLaeExz2qWwojxlPce5PljcT3w==",
|
||||
"version": "8.22.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.22.0.tgz",
|
||||
"integrity": "sha512-AWpYAXnUgvLNabGTy3uBylkgZoosva/miNd1I8Bz3SjotmQPbVqhO4Cczo8AsZ44XVErEBPr/CRSgaj8sG7g0w==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@typescript-eslint/types": "8.21.0",
|
||||
"@typescript-eslint/types": "8.22.0",
|
||||
"eslint-visitor-keys": "^4.2.0"
|
||||
},
|
||||
"engines": {
|
||||
|
|
@ -1475,16 +1518,16 @@
|
|||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/parser": {
|
||||
"version": "8.21.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.21.0.tgz",
|
||||
"integrity": "sha512-Wy+/sdEH9kI3w9civgACwabHbKl+qIOu0uFZ9IMKzX3Jpv9og0ZBJrZExGrPpFAY7rWsXuxs5e7CPPP17A4eYA==",
|
||||
"version": "8.22.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.22.0.tgz",
|
||||
"integrity": "sha512-MqtmbdNEdoNxTPzpWiWnqNac54h8JDAmkWtJExBVVnSrSmi9z+sZUt0LfKqk9rjqmKOIeRhO4fHHJ1nQIjduIQ==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@typescript-eslint/scope-manager": "8.21.0",
|
||||
"@typescript-eslint/types": "8.21.0",
|
||||
"@typescript-eslint/typescript-estree": "8.21.0",
|
||||
"@typescript-eslint/visitor-keys": "8.21.0",
|
||||
"@typescript-eslint/scope-manager": "8.22.0",
|
||||
"@typescript-eslint/types": "8.22.0",
|
||||
"@typescript-eslint/typescript-estree": "8.22.0",
|
||||
"@typescript-eslint/visitor-keys": "8.22.0",
|
||||
"debug": "^4.3.4"
|
||||
},
|
||||
"engines": {
|
||||
|
|
@ -1500,14 +1543,14 @@
|
|||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/scope-manager": {
|
||||
"version": "8.21.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.21.0.tgz",
|
||||
"integrity": "sha512-G3IBKz0/0IPfdeGRMbp+4rbjfSSdnGkXsM/pFZA8zM9t9klXDnB/YnKOBQ0GoPmoROa4bCq2NeHgJa5ydsQ4mA==",
|
||||
"version": "8.22.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.22.0.tgz",
|
||||
"integrity": "sha512-/lwVV0UYgkj7wPSw0o8URy6YI64QmcOdwHuGuxWIYznO6d45ER0wXUbksr9pYdViAofpUCNJx/tAzNukgvaaiQ==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@typescript-eslint/types": "8.21.0",
|
||||
"@typescript-eslint/visitor-keys": "8.21.0"
|
||||
"@typescript-eslint/types": "8.22.0",
|
||||
"@typescript-eslint/visitor-keys": "8.22.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
|
||||
|
|
@ -1518,9 +1561,9 @@
|
|||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/types": {
|
||||
"version": "8.21.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.21.0.tgz",
|
||||
"integrity": "sha512-PAL6LUuQwotLW2a8VsySDBwYMm129vFm4tMVlylzdoTybTHaAi0oBp7Ac6LhSrHHOdLM3efH+nAR6hAWoMF89A==",
|
||||
"version": "8.22.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.22.0.tgz",
|
||||
"integrity": "sha512-0S4M4baNzp612zwpD4YOieP3VowOARgK2EkN/GBn95hpyF8E2fbMT55sRHWBq+Huaqk3b3XK+rxxlM8sPgGM6A==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
|
|
@ -1532,14 +1575,14 @@
|
|||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/typescript-estree": {
|
||||
"version": "8.21.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.21.0.tgz",
|
||||
"integrity": "sha512-x+aeKh/AjAArSauz0GiQZsjT8ciadNMHdkUSwBB9Z6PrKc/4knM4g3UfHml6oDJmKC88a6//cdxnO/+P2LkMcg==",
|
||||
"version": "8.22.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.22.0.tgz",
|
||||
"integrity": "sha512-SJX99NAS2ugGOzpyhMza/tX+zDwjvwAtQFLsBo3GQxiGcvaKlqGBkmZ+Y1IdiSi9h4Q0Lr5ey+Cp9CGWNY/F/w==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@typescript-eslint/types": "8.21.0",
|
||||
"@typescript-eslint/visitor-keys": "8.21.0",
|
||||
"@typescript-eslint/types": "8.22.0",
|
||||
"@typescript-eslint/visitor-keys": "8.22.0",
|
||||
"debug": "^4.3.4",
|
||||
"fast-glob": "^3.3.2",
|
||||
"is-glob": "^4.0.3",
|
||||
|
|
@ -1559,13 +1602,13 @@
|
|||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/visitor-keys": {
|
||||
"version": "8.21.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.21.0.tgz",
|
||||
"integrity": "sha512-BkLMNpdV6prozk8LlyK/SOoWLmUFi+ZD+pcqti9ILCbVvHGk1ui1g4jJOc2WDLaeExz2qWwojxlPce5PljcT3w==",
|
||||
"version": "8.22.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.22.0.tgz",
|
||||
"integrity": "sha512-AWpYAXnUgvLNabGTy3uBylkgZoosva/miNd1I8Bz3SjotmQPbVqhO4Cczo8AsZ44XVErEBPr/CRSgaj8sG7g0w==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@typescript-eslint/types": "8.21.0",
|
||||
"@typescript-eslint/types": "8.22.0",
|
||||
"eslint-visitor-keys": "^4.2.0"
|
||||
},
|
||||
"engines": {
|
||||
|
|
@ -1647,14 +1690,14 @@
|
|||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/type-utils": {
|
||||
"version": "8.21.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.21.0.tgz",
|
||||
"integrity": "sha512-95OsL6J2BtzoBxHicoXHxgk3z+9P3BEcQTpBKriqiYzLKnM2DeSqs+sndMKdamU8FosiadQFT3D+BSL9EKnAJQ==",
|
||||
"version": "8.22.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.22.0.tgz",
|
||||
"integrity": "sha512-NzE3aB62fDEaGjaAYZE4LH7I1MUwHooQ98Byq0G0y3kkibPJQIXVUspzlFOmOfHhiDLwKzMlWxaNv+/qcZurJA==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@typescript-eslint/typescript-estree": "8.21.0",
|
||||
"@typescript-eslint/utils": "8.21.0",
|
||||
"@typescript-eslint/typescript-estree": "8.22.0",
|
||||
"@typescript-eslint/utils": "8.22.0",
|
||||
"debug": "^4.3.4",
|
||||
"ts-api-utils": "^2.0.0"
|
||||
},
|
||||
|
|
@ -1671,14 +1714,14 @@
|
|||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/type-utils/node_modules/@typescript-eslint/scope-manager": {
|
||||
"version": "8.21.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.21.0.tgz",
|
||||
"integrity": "sha512-G3IBKz0/0IPfdeGRMbp+4rbjfSSdnGkXsM/pFZA8zM9t9klXDnB/YnKOBQ0GoPmoROa4bCq2NeHgJa5ydsQ4mA==",
|
||||
"version": "8.22.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.22.0.tgz",
|
||||
"integrity": "sha512-/lwVV0UYgkj7wPSw0o8URy6YI64QmcOdwHuGuxWIYznO6d45ER0wXUbksr9pYdViAofpUCNJx/tAzNukgvaaiQ==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@typescript-eslint/types": "8.21.0",
|
||||
"@typescript-eslint/visitor-keys": "8.21.0"
|
||||
"@typescript-eslint/types": "8.22.0",
|
||||
"@typescript-eslint/visitor-keys": "8.22.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
|
||||
|
|
@ -1689,9 +1732,9 @@
|
|||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/type-utils/node_modules/@typescript-eslint/types": {
|
||||
"version": "8.21.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.21.0.tgz",
|
||||
"integrity": "sha512-PAL6LUuQwotLW2a8VsySDBwYMm129vFm4tMVlylzdoTybTHaAi0oBp7Ac6LhSrHHOdLM3efH+nAR6hAWoMF89A==",
|
||||
"version": "8.22.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.22.0.tgz",
|
||||
"integrity": "sha512-0S4M4baNzp612zwpD4YOieP3VowOARgK2EkN/GBn95hpyF8E2fbMT55sRHWBq+Huaqk3b3XK+rxxlM8sPgGM6A==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
|
|
@ -1703,14 +1746,14 @@
|
|||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/type-utils/node_modules/@typescript-eslint/typescript-estree": {
|
||||
"version": "8.21.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.21.0.tgz",
|
||||
"integrity": "sha512-x+aeKh/AjAArSauz0GiQZsjT8ciadNMHdkUSwBB9Z6PrKc/4knM4g3UfHml6oDJmKC88a6//cdxnO/+P2LkMcg==",
|
||||
"version": "8.22.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.22.0.tgz",
|
||||
"integrity": "sha512-SJX99NAS2ugGOzpyhMza/tX+zDwjvwAtQFLsBo3GQxiGcvaKlqGBkmZ+Y1IdiSi9h4Q0Lr5ey+Cp9CGWNY/F/w==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@typescript-eslint/types": "8.21.0",
|
||||
"@typescript-eslint/visitor-keys": "8.21.0",
|
||||
"@typescript-eslint/types": "8.22.0",
|
||||
"@typescript-eslint/visitor-keys": "8.22.0",
|
||||
"debug": "^4.3.4",
|
||||
"fast-glob": "^3.3.2",
|
||||
"is-glob": "^4.0.3",
|
||||
|
|
@ -1730,16 +1773,16 @@
|
|||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/type-utils/node_modules/@typescript-eslint/utils": {
|
||||
"version": "8.21.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.21.0.tgz",
|
||||
"integrity": "sha512-xcXBfcq0Kaxgj7dwejMbFyq7IOHgpNMtVuDveK7w3ZGwG9owKzhALVwKpTF2yrZmEwl9SWdetf3fxNzJQaVuxw==",
|
||||
"version": "8.22.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.22.0.tgz",
|
||||
"integrity": "sha512-T8oc1MbF8L+Bk2msAvCUzjxVB2Z2f+vXYfcucE2wOmYs7ZUwco5Ep0fYZw8quNwOiw9K8GYVL+Kgc2pETNTLOg==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@eslint-community/eslint-utils": "^4.4.0",
|
||||
"@typescript-eslint/scope-manager": "8.21.0",
|
||||
"@typescript-eslint/types": "8.21.0",
|
||||
"@typescript-eslint/typescript-estree": "8.21.0"
|
||||
"@typescript-eslint/scope-manager": "8.22.0",
|
||||
"@typescript-eslint/types": "8.22.0",
|
||||
"@typescript-eslint/typescript-estree": "8.22.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
|
||||
|
|
@ -1754,13 +1797,13 @@
|
|||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/type-utils/node_modules/@typescript-eslint/visitor-keys": {
|
||||
"version": "8.21.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.21.0.tgz",
|
||||
"integrity": "sha512-BkLMNpdV6prozk8LlyK/SOoWLmUFi+ZD+pcqti9ILCbVvHGk1ui1g4jJOc2WDLaeExz2qWwojxlPce5PljcT3w==",
|
||||
"version": "8.22.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.22.0.tgz",
|
||||
"integrity": "sha512-AWpYAXnUgvLNabGTy3uBylkgZoosva/miNd1I8Bz3SjotmQPbVqhO4Cczo8AsZ44XVErEBPr/CRSgaj8sG7g0w==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@typescript-eslint/types": "8.21.0",
|
||||
"@typescript-eslint/types": "8.22.0",
|
||||
"eslint-visitor-keys": "^4.2.0"
|
||||
},
|
||||
"engines": {
|
||||
|
|
@ -5160,6 +5203,13 @@
|
|||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/is-node-process": {
|
||||
"version": "1.2.0",
|
||||
"resolved": "https://registry.npmjs.org/is-node-process/-/is-node-process-1.2.0.tgz",
|
||||
"integrity": "sha512-Vg4o6/fqPxIjtxgUH5QLJhwZ7gW5diGCVlXpuUfELC62CuxM1iHcRe51f2W1FDy04Ai4KJkagKjx3XaqyfRKXw==",
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/is-number": {
|
||||
"version": "7.0.0",
|
||||
"license": "MIT",
|
||||
|
|
@ -5803,17 +5853,18 @@
|
|||
"license": "0BSD"
|
||||
},
|
||||
"node_modules/nock": {
|
||||
"version": "13.5.6",
|
||||
"resolved": "https://registry.npmjs.org/nock/-/nock-13.5.6.tgz",
|
||||
"integrity": "sha512-o2zOYiCpzRqSzPj0Zt/dQ/DqZeYoaQ7TUonc/xUPjCGl9WeHpNbxgVvOquXYAaJzI0M9BXV3HTzG0p8IUAbBTQ==",
|
||||
"version": "14.0.0",
|
||||
"resolved": "https://registry.npmjs.org/nock/-/nock-14.0.0.tgz",
|
||||
"integrity": "sha512-3Z2ZoZoYTR/y2I+NI16+6IzfZFKBX7MrADtoBAm7v/QKqxQUhKw+Dh+847PPS1j/FDutjfIXfrh3CJF74yITWg==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"debug": "^4.1.0",
|
||||
"@mswjs/interceptors": "^0.37.3",
|
||||
"json-stringify-safe": "^5.0.1",
|
||||
"propagate": "^2.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 10.13"
|
||||
"node": ">= 18"
|
||||
}
|
||||
},
|
||||
"node_modules/node-fetch": {
|
||||
|
|
@ -6041,6 +6092,13 @@
|
|||
"node": ">= 0.8.0"
|
||||
}
|
||||
},
|
||||
"node_modules/outvariant": {
|
||||
"version": "1.4.3",
|
||||
"resolved": "https://registry.npmjs.org/outvariant/-/outvariant-1.4.3.tgz",
|
||||
"integrity": "sha512-+Sl2UErvtsoajRDKCE5/dBz4DIvHXQQnAxtQTF04OJxY0+DyZXSo5P5Bb7XYWOh81syohlYL24hbDwxedPUJCA==",
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/p-defer": {
|
||||
"version": "1.0.0",
|
||||
"dev": true,
|
||||
|
|
@ -6885,6 +6943,13 @@
|
|||
"bare-events": "^2.2.0"
|
||||
}
|
||||
},
|
||||
"node_modules/strict-event-emitter": {
|
||||
"version": "0.5.1",
|
||||
"resolved": "https://registry.npmjs.org/strict-event-emitter/-/strict-event-emitter-0.5.1.tgz",
|
||||
"integrity": "sha512-vMgjE/GGEPEFnhFub6pa4FmJBRBVOLpIII2hvCZ8Kzb7K0hlHo7mQv6xYrBvCL2LtAIBwFUK8wvuJgTVSQ5MFQ==",
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/string_decoder": {
|
||||
"version": "1.3.0",
|
||||
"resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz",
|
||||
|
|
|
|||
2
node_modules/@eslint/js/package.json
generated
vendored
2
node_modules/@eslint/js/package.json
generated
vendored
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@eslint/js",
|
||||
"version": "9.18.0",
|
||||
"version": "9.19.0",
|
||||
"description": "ESLint JavaScript language implementation",
|
||||
"main": "./src/index.js",
|
||||
"types": "./types/index.d.ts",
|
||||
|
|
|
|||
6
node_modules/@mswjs/interceptors/ClientRequest/package.json
generated
vendored
Normal file
6
node_modules/@mswjs/interceptors/ClientRequest/package.json
generated
vendored
Normal file
|
|
@ -0,0 +1,6 @@
|
|||
{
|
||||
"main": "../lib/node/interceptors/ClientRequest/index.js",
|
||||
"module": "../lib/node/interceptors/ClientRequest/index.mjs",
|
||||
"browser": null,
|
||||
"types": "../lib/node/interceptors/ClientRequest/index.d.ts"
|
||||
}
|
||||
9
node_modules/@mswjs/interceptors/LICENSE.md
generated
vendored
Normal file
9
node_modules/@mswjs/interceptors/LICENSE.md
generated
vendored
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
MIT License
|
||||
|
||||
Copyright (c) 2018–present Artem Zakharchenko
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
623
node_modules/@mswjs/interceptors/README.md
generated
vendored
Normal file
623
node_modules/@mswjs/interceptors/README.md
generated
vendored
Normal file
|
|
@ -0,0 +1,623 @@
|
|||
[](https://www.npmjs.com/package/@mswjs/interceptors)
|
||||
|
||||
# `@mswjs/interceptors`
|
||||
|
||||
Low-level network interception library.
|
||||
|
||||
This library supports intercepting the following protocols:
|
||||
|
||||
- HTTP (via the `http` module, `XMLHttpRequest`, or `globalThis.fetch`);
|
||||
- [WebSocket](#websocket-interception) (the `WebSocket` class in Undici and in the browser).
|
||||
|
||||
## Motivation
|
||||
|
||||
While there are a lot of network mocking libraries, they tend to use request interception as an implementation detail, giving you a high-level API that includes request matching, timeouts, recording, and so forth.
|
||||
|
||||
This library is a barebones implementation that provides as little abstraction as possible to execute arbitrary logic upon any request. It's primarily designed as an underlying component for high-level API mocking solutions such as [Mock Service Worker](https://github.com/mswjs/msw).
|
||||
|
||||
### How is this library different?
|
||||
|
||||
A traditional API mocking implementation in Node.js looks roughly like this:
|
||||
|
||||
```js
|
||||
import http from 'node:http'
|
||||
|
||||
// Store the original request function.
|
||||
const originalHttpRequest = http.request
|
||||
|
||||
// Override the request function entirely.
|
||||
http.request = function (...args) {
|
||||
// Decide if the outgoing request matches a predicate.
|
||||
if (predicate(args)) {
|
||||
// If it does, never create a request, respond to it
|
||||
// using the mocked response from this blackbox.
|
||||
return coerceToResponse.bind(this, mock)
|
||||
}
|
||||
|
||||
// Otherwise, construct the original request
|
||||
// and perform it as-is.
|
||||
return originalHttpRequest(...args)
|
||||
}
|
||||
```
|
||||
|
||||
The core philosophy of Interceptors is to _run as much of the underlying network code as possible_. Strange for a network mocking library, isn't it? Turns out, respecting the system's integrity and executing more of the network code leads to more resilient tests and also helps to uncover bugs in the code that would otherwise go unnoticed.
|
||||
|
||||
Interceptors heavily rely on _class extension_ instead of function and module overrides. By extending the native network code, it can surgically insert the interception and mocking pieces only where necessary, leaving the rest of the system intact.
|
||||
|
||||
```js
|
||||
class XMLHttpRequestProxy extends XMLHttpRequest {
|
||||
async send() {
|
||||
// Call the request listeners and see if any of them
|
||||
// returns a mocked response for this request.
|
||||
const mockedResponse = await waitForRequestListeners({ request })
|
||||
|
||||
// If there is a mocked response, use it. This actually
|
||||
// transitions the XMLHttpRequest instance into the correct
|
||||
// response state (below is a simplified illustration).
|
||||
if (mockedResponse) {
|
||||
// Handle the response headers.
|
||||
this.request.status = mockedResponse.status
|
||||
this.request.statusText = mockedResponse.statusText
|
||||
this.request.responseUrl = mockedResponse.url
|
||||
this.readyState = 2
|
||||
this.trigger('readystatechange')
|
||||
|
||||
// Start streaming the response body.
|
||||
this.trigger('loadstart')
|
||||
this.readyState = 3
|
||||
this.trigger('readystatechange')
|
||||
await streamResponseBody(mockedResponse)
|
||||
|
||||
// Finish the response.
|
||||
this.trigger('load')
|
||||
this.trigger('loadend')
|
||||
this.readyState = 4
|
||||
return
|
||||
}
|
||||
|
||||
// Otherwise, perform the original "XMLHttpRequest.prototype.send" call.
|
||||
return super.send(...args)
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
> The request interception algorithms differ dramatically based on the request API. Interceptors acommodate for them all, bringing the intercepted requests to a common ground—the Fetch API `Request` instance. The same applies for responses, where a Fetch API `Response` instance is translated to the appropriate response format.
|
||||
|
||||
This library aims to provide _full specification compliance_ with the APIs and protocols it extends.
|
||||
|
||||
## What this library does
|
||||
|
||||
This library extends the following native modules:
|
||||
|
||||
- `http.get`/`http.request`
|
||||
- `https.get`/`https.request`
|
||||
- `XMLHttpRequest`
|
||||
- `fetch`
|
||||
- `WebSocket`
|
||||
|
||||
Once extended, it intercepts and normalizes all requests to the Fetch API `Request` instances. This way, no matter the request source (`http.ClientRequest`, `XMLHttpRequest`, `window.Request`, etc), you always get a specification-compliant request instance to work with.
|
||||
|
||||
You can respond to the intercepted HTTP request by constructing a Fetch API Response instance. Instead of designing custom abstractions, this library respects the Fetch API specification and takes the responsibility to coerce a single response declaration to the appropriate response formats based on the request-issuing modules (like `http.OutgoingMessage` to respond to `http.ClientRequest`, or updating `XMLHttpRequest` response-related properties).
|
||||
|
||||
## What this library doesn't do
|
||||
|
||||
- Does **not** provide any request matching logic;
|
||||
- Does **not** handle requests by default.
|
||||
|
||||
## Getting started
|
||||
|
||||
```bash
|
||||
npm install @mswjs/interceptors
|
||||
```
|
||||
|
||||
## Interceptors
|
||||
|
||||
To use this library you need to choose one or multiple interceptors to apply. There are different interceptors exported by this library to spy on respective request-issuing modules:
|
||||
|
||||
- `ClientRequestInterceptor` to spy on `http.ClientRequest` (`http.get`/`http.request`);
|
||||
- `XMLHttpRequestInterceptor` to spy on `XMLHttpRequest`;
|
||||
- `FetchInterceptor` to spy on `fetch`.
|
||||
|
||||
Use an interceptor by constructing it and attaching request/response listeners:
|
||||
|
||||
```js
|
||||
import { ClientRequestInterceptor } from '@mswjs/interceptors/ClientRequest'
|
||||
|
||||
const interceptor = new ClientRequestInterceptor()
|
||||
|
||||
// Enable the interception of requests.
|
||||
interceptor.apply()
|
||||
|
||||
// Listen to any "http.ClientRequest" being dispatched,
|
||||
// and log its method and full URL.
|
||||
interceptor.on('request', ({ request, requestId }) => {
|
||||
console.log(request.method, request.url)
|
||||
})
|
||||
|
||||
// Listen to any responses sent to "http.ClientRequest".
|
||||
// Note that this listener is read-only and cannot affect responses.
|
||||
interceptor.on(
|
||||
'response',
|
||||
({ response, isMockedResponse, request, requestId }) => {
|
||||
console.log('response to %s %s was:', request.method, request.url, response)
|
||||
}
|
||||
)
|
||||
```
|
||||
|
||||
All HTTP request interceptors implement the same events:
|
||||
|
||||
- `request`, emitted whenever a request has been dispatched;
|
||||
- `response`, emitted whenever any request receives a response.
|
||||
|
||||
### Using multiple interceptors
|
||||
|
||||
You can combine multiple interceptors to capture requests from different request-issuing modules at once.
|
||||
|
||||
```js
|
||||
import { BatchInterceptor } from '@mswjs/interceptors'
|
||||
import { ClientRequestInterceptor } from '@mswjs/interceptors/ClientRequest'
|
||||
import { XMLHttpRequestInterceptor } from '@mswjs/interceptors/XMLHttpRequest'
|
||||
|
||||
const interceptor = new BatchInterceptor({
|
||||
name: 'my-interceptor',
|
||||
interceptors: [
|
||||
new ClientRequestInterceptor(),
|
||||
new XMLHttpRequestInterceptor(),
|
||||
],
|
||||
})
|
||||
|
||||
interceptor.apply()
|
||||
|
||||
// This "request" listener will be called on both
|
||||
// "http.ClientRequest" and "XMLHttpRequest" being dispatched.
|
||||
interceptor.on('request', listener)
|
||||
```
|
||||
|
||||
> Note that you can use [pre-defined presets](#presets) that cover all the request sources for a given environment type.
|
||||
|
||||
## Presets
|
||||
|
||||
When using [`BatchInterceptor`](#batchinterceptor), you can provide a pre-defined preset to its "interceptors" option to capture all request for that environment.
|
||||
|
||||
### Node.js preset
|
||||
|
||||
This preset combines `ClientRequestInterceptor`, `XMLHttpRequestInterceptor` and is meant to be used in Node.js.
|
||||
|
||||
```js
|
||||
import { BatchInterceptor } from '@mswjs/interceptors'
|
||||
import nodeInterceptors from '@mswjs/interceptors/presets/node'
|
||||
|
||||
const interceptor = new BatchInterceptor({
|
||||
name: 'my-interceptor',
|
||||
interceptors: nodeInterceptors,
|
||||
})
|
||||
|
||||
interceptor.apply()
|
||||
|
||||
interceptor.on('request', listener)
|
||||
```
|
||||
|
||||
### Browser preset
|
||||
|
||||
This preset combines `XMLHttpRequestInterceptor` and `FetchInterceptor` and is meant to be used in a browser.
|
||||
|
||||
```js
|
||||
import { BatchInterceptor } from '@mswjs/interceptors'
|
||||
import browserInterceptors from '@mswjs/interceptors/presets/browser'
|
||||
|
||||
const interceptor = new BatchInterceptor({
|
||||
name: 'my-interceptor',
|
||||
interceptors: browserInterceptors,
|
||||
})
|
||||
|
||||
interceptor.on('request', listener)
|
||||
```
|
||||
|
||||
## Introspecting requests
|
||||
|
||||
All HTTP request interceptors emit a "request" event. In the listener to this event, they expose a `request` reference, which is a [Fetch API Request](https://developer.mozilla.org/en-US/docs/Web/API/Request) instance.
|
||||
|
||||
> There are many ways to describe a request in Node.js but this library coerces different request definitions to a single specification-compliant `Request` instance to make the handling consistent.
|
||||
|
||||
```js
|
||||
interceptor.on('request', ({ request, requestId, controller }) => {
|
||||
console.log(request.method, request.url)
|
||||
})
|
||||
```
|
||||
|
||||
Since the exposed `request` instance implements the Fetch API specification, you can operate with it just as you do with the regular browser request. For example, this is how you would read the request body as JSON:
|
||||
|
||||
```js
|
||||
interceptor.on('request', async ({ request, requestId }) => {
|
||||
const json = await request.clone().json()
|
||||
})
|
||||
```
|
||||
|
||||
> **Do not forget to clone the request before reading its body!**
|
||||
|
||||
## Modifying requests
|
||||
|
||||
Request representations are readonly. You can, however, mutate the intercepted request's headers in the "request" listener:
|
||||
|
||||
```js
|
||||
interceptor.on('request', ({ request }) => {
|
||||
request.headers.set('X-My-Header', 'true')
|
||||
})
|
||||
```
|
||||
|
||||
> This restriction is done so that the library wouldn't have to unnecessarily synchronize the actual request instance and its Fetch API request representation. As of now, this library is not meant to be used as a full-scale proxy.
|
||||
|
||||
## Mocking responses
|
||||
|
||||
Although this library can be used purely for request introspection purposes, you can also affect request resolution by responding to any intercepted request within the "request" event.
|
||||
|
||||
Access the `controller` object from the request event listener arguments and call its `controller.respondWith()` method, providing it with a mocked `Response` instance:
|
||||
|
||||
```js
|
||||
interceptor.on('request', ({ request, controller }) => {
|
||||
controller.respondWith(
|
||||
new Response(
|
||||
JSON.stringify({
|
||||
firstName: 'John',
|
||||
lastName: 'Maverick',
|
||||
}),
|
||||
{
|
||||
status: 201,
|
||||
statusText: 'Created',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
}
|
||||
)
|
||||
)
|
||||
})
|
||||
```
|
||||
|
||||
> We use Fetch API `Response` class as the middle-ground for mocked response definition. This library then coerces the response instance to the appropriate response format (e.g. to `http.OutgoingMessage` in the case of `http.ClientRequest`).
|
||||
|
||||
**The `Response` class is built-in in since Node.js 18. Use a Fetch API-compatible polyfill, like `node-fetch`, for older versions of Node.js.`**
|
||||
|
||||
Note that a single request _can only be handled once_. You may want to introduce conditional logic, like routing, in your request listener but it's generally advised to use a higher-level library like [Mock Service Worker](https://github.com/mswjs/msw) that does request matching for you.
|
||||
|
||||
Requests must be responded to within the same tick as the request listener. This means you cannot respond to a request using `setTimeout`, as this will delegate the callback to the next tick. If you wish to introduce asynchronous side-effects in the listener, consider making it an `async` function, awaiting any side-effects you need.
|
||||
|
||||
```js
|
||||
// Respond to all requests with a 500 response
|
||||
// delayed by 500ms.
|
||||
interceptor.on('request', async ({ controller }) => {
|
||||
await sleep(500)
|
||||
controller.respondWith(new Response(null, { status: 500 }))
|
||||
})
|
||||
```
|
||||
|
||||
### Mocking response errors
|
||||
|
||||
You can provide an instance of `Response.error()` to error the pending request.
|
||||
|
||||
```js
|
||||
interceptor.on('request', ({ request, controller }) => {
|
||||
controller.respondWith(Response.error())
|
||||
})
|
||||
```
|
||||
|
||||
This will automatically translate to the appropriate request error based on the request client that issued the request. **Use this method to produce a generic network error**.
|
||||
|
||||
> Note that the standard `Response.error()` API does not accept an error message.
|
||||
|
||||
## Mocking errors
|
||||
|
||||
Use the `controller.errorWith()` method to error the request.
|
||||
|
||||
```js
|
||||
interceptor.on('request', ({ request, controller }) => {
|
||||
controller.errorWith(new Error('reason'))
|
||||
})
|
||||
```
|
||||
|
||||
Unlike responding with `Response.error()`, you can provide an exact error reason to use to `.errorWith()`. **Use this method to error the request**.
|
||||
|
||||
> Note that it is up to the request client to respect your custom error. Some clients, like `ClientRequest` will use the provided error message, while others, like `fetch`, will produce a generic `TypeError: failed to fetch` responses. Interceptors will try to preserve the original error in the `cause` property of such generic errors.
|
||||
|
||||
## Observing responses
|
||||
|
||||
You can use the "response" event to transparently observe any incoming responses in your Node.js process.
|
||||
|
||||
```js
|
||||
interceptor.on(
|
||||
'response',
|
||||
({ response, isMockedResponse, request, requestId }) => {
|
||||
// react to the incoming response...
|
||||
}
|
||||
)
|
||||
```
|
||||
|
||||
> Note that the `isMockedResponse` property will only be set to `true` if you resolved this request in the "request" event listener using the `controller.respondWith()` method and providing a mocked `Response` instance.
|
||||
|
||||
## Error handling
|
||||
|
||||
By default, all unhandled exceptions thrown within the `request` listener are coerced to 500 error responses, emulating those exceptions occurring on the actual server. You can listen to the exceptions by adding the `unhandledException` listener to the interceptor:
|
||||
|
||||
```js
|
||||
interceptor.on(
|
||||
'unhandledException',
|
||||
({ error, request, requestId, controller }) => {
|
||||
console.log(error)
|
||||
}
|
||||
)
|
||||
```
|
||||
|
||||
To opt out from the default coercion of unhandled exceptions to server responses, you need to either:
|
||||
|
||||
1. Respond to the request with [a mocked response](#mocking-responses) (including error responses);
|
||||
1. Propagate the error up by throwing it explicitly in the `unhandledException` listener.
|
||||
|
||||
Here's an example of propagating the unhandled exception up:
|
||||
|
||||
```js
|
||||
interceptor.on('unhandledException', ({ error }) => {
|
||||
// Now, any unhandled exception will NOT be coerced to a 500 error response,
|
||||
// and instead will be thrown during the process execution as-is.
|
||||
throw error
|
||||
})
|
||||
```
|
||||
|
||||
## WebSocket interception
|
||||
|
||||
You can intercept a WebSocket communication using the `WebSocketInterceptor` class.
|
||||
|
||||
> [!IMPORTANT]
|
||||
> This library only supports intercepting WebSocket connections created using the global WHATWG `WebSocket` class. Third-party transports, such as HTTP/XHR polling, are not supported by design due to their contrived nature.
|
||||
|
||||
```js
|
||||
import { WebSocketInterceptor } from '@mswjs/interceptors/WebSocket'
|
||||
|
||||
const interceptor = new WebSocketInterceptor()
|
||||
```
|
||||
|
||||
Unlike the HTTP-based interceptors that share the same `request`/`response` events, the WebSocket interceptor only emits the `connection` event and let's you handle the incoming/outgoing events in its listener.
|
||||
|
||||
### Important defaults
|
||||
|
||||
1. Intercepted WebSocket connections are _not opened_. To open the actual WebSocket connection, call [`server.connect()`](#connect) in the interceptor.
|
||||
1. Once connected to the actual server, the outgoing client events are _forwarded to that server by default_. If you wish to prevent a client message from reaching the server, call `event.preventDefault()` for that client message event.
|
||||
1. Once connected to the actual server, the incoming server events are _forwarded to the client by default_. If you wish to prevent a server message from reaching the client, call `event.preventDefault()` for the server message event.
|
||||
1. Once connected to the actual server, the `close` event received from that server is _forwarded to the client by default_. If you wish to prevent that, call `event.preventDefault()` for that close event of the server.
|
||||
|
||||
### WebSocket connection
|
||||
|
||||
Whenever a WebSocket instance is constructed, the `connection` event is emitted on the WebSocket interceptor.
|
||||
|
||||
```js
|
||||
intereceptor.on('connection', ({ client }) => {
|
||||
console.log(client.url)
|
||||
})
|
||||
```
|
||||
|
||||
The `connection` event exposes the following arguments:
|
||||
|
||||
| Name | Type | Description |
|
||||
| -------- | --------------------------------------------------------- | ----------------------------------------------------------------------------------- |
|
||||
| `client` | [`WebSocketClientConnection`](#websocketclientconnection) | An object representing a connected WebSocket client instance. |
|
||||
| `server` | [`WebSocketServerConnection`](#websocketserverconnection) | An object representing the original WebSocket server connection. |
|
||||
| `info` | `object` | Additional WebSocket connection information (like the original client `protocols`). |
|
||||
|
||||
### `WebSocketClientConnection`
|
||||
|
||||
#### `.addEventListener(type, listener)`
|
||||
|
||||
- `type`, `string`
|
||||
- `listener`, `EventListener`
|
||||
|
||||
Adds an event listener to the given event type of the WebSocket client.
|
||||
|
||||
```ts
|
||||
interface WebSocketServerConnectionEventMap {
|
||||
// Dispatched when the WebSocket client sends data.
|
||||
message: (this: WebSocket, event: MessageEvent<WebSocketData>) => void
|
||||
|
||||
// Dispatched when the WebSocket client is closed.
|
||||
close: (this: WebSocket, event: CloseEvent) => void
|
||||
}
|
||||
```
|
||||
|
||||
```js
|
||||
client.addEventListener('message', (event) => {
|
||||
console.log('outgoing:', event.data)
|
||||
})
|
||||
```
|
||||
|
||||
#### `.removeEventListener(type, listener)`
|
||||
|
||||
- `type`, `string`
|
||||
- `listener`, `EventListener`
|
||||
|
||||
Removes the listener for the given event type.
|
||||
|
||||
#### `.send(data)`
|
||||
|
||||
- `data`, `string | Blob | ArrayBuffer`
|
||||
|
||||
Sends the data to the intercepted WebSocket client.
|
||||
|
||||
```js
|
||||
client.send('text')
|
||||
client.send(new Blob(['blob']))
|
||||
client.send(new TextEncoder().encode('array buffer'))
|
||||
```
|
||||
|
||||
#### `.close(code, reason)`
|
||||
|
||||
- `code`, close [status code](https://www.rfc-editor.org/rfc/rfc6455#section-7.4.1).
|
||||
- `reason`, [close reason](https://www.rfc-editor.org/rfc/rfc6455#section-7.1.6).
|
||||
|
||||
Closes the client connection. Unlike the regular `WebSocket.prototype.close()`, the `client.close()` method can accept a non-configurable status codes, such as 1001, 1003, etc.
|
||||
|
||||
```js
|
||||
// Gracefully close the connection with the
|
||||
// intercepted WebSocket client.
|
||||
client.close()
|
||||
```
|
||||
|
||||
```js
|
||||
// Terminate the connection by emulating
|
||||
// the server unable to process the received data.
|
||||
client.close(1003)
|
||||
```
|
||||
|
||||
### `WebSocketServerConnection`
|
||||
|
||||
#### `.connect()`
|
||||
|
||||
Establishes the connection to the original WebSocket server. Connection cannot be awaited. Any data sent via `server.send()` while connecting is buffered and flushed once the connection is open.
|
||||
|
||||
#### `.addEventListener(type, listener)`
|
||||
|
||||
- `type`, `string`
|
||||
- `listener`, `EventListener`
|
||||
|
||||
Adds an event listener to the given event type of the WebSocket server.
|
||||
|
||||
```ts
|
||||
interface WebSocketServerConnectionEventMap {
|
||||
// Dispatched when the server connection is open.
|
||||
open: (this: WebSocket, event: Event) => void
|
||||
|
||||
// Dispatched when the server sends data to the client.
|
||||
message: (this: WebSocket, event: MessageEvent<WebSocketData>) => void
|
||||
|
||||
// Dispatched when the server connection closes.
|
||||
close: (this: WebSocket, event: CloseEvent) => void
|
||||
}
|
||||
```
|
||||
|
||||
```js
|
||||
server.addEventListener('message', (event) => {
|
||||
console.log('incoming:', event.data)
|
||||
})
|
||||
```
|
||||
|
||||
#### `.removeEventListener(type, listener)`
|
||||
|
||||
- `type`, `string`
|
||||
- `listener`, `EventListener`
|
||||
|
||||
Removes the listener for the given event type.
|
||||
|
||||
#### `.send(data)`
|
||||
|
||||
- `data`, `string | Blob | ArrayBuffer`
|
||||
|
||||
Sends the data to the original WebSocket server. Useful in a combination with the client-sent events forwarding:
|
||||
|
||||
```js
|
||||
client.addEventListener('message', (event) => {
|
||||
server.send(event.data)
|
||||
})
|
||||
```
|
||||
|
||||
#### `.close()`
|
||||
|
||||
Closes the connection with the original WebSocket server. Unlike `client.close()`, closing the server connection does not accept any arguments and always asumes a graceful closure. Sending data via `server.send()` after the connection has been closed will have no effect.
|
||||
|
||||
## API
|
||||
|
||||
### `Interceptor`
|
||||
|
||||
A generic class implemented by all interceptors. You do not interact with this class directly.
|
||||
|
||||
```ts
|
||||
class Interceptor {
|
||||
// Applies the interceptor, enabling the interception of requests
|
||||
// in the current process.
|
||||
apply(): void
|
||||
|
||||
// Listens to the public interceptor events.
|
||||
// For HTTP requests, these are "request' and "response" events.
|
||||
on(event, listener): void
|
||||
|
||||
// Cleans up any side-effects introduced by the interceptor
|
||||
// and disables the interception of requests.
|
||||
dispose(): void
|
||||
}
|
||||
```
|
||||
|
||||
**For public consumption, use [interceptors](#interceptors) instead**.
|
||||
|
||||
### `BatchInterceptor`
|
||||
|
||||
Applies multiple request interceptors at the same time.
|
||||
|
||||
```js
|
||||
import { BatchInterceptor } from '@mswjs/interceptors'
|
||||
import nodeInterceptors from '@mswjs/interceptors/presets/node'
|
||||
|
||||
const interceptor = new BatchInterceptor({
|
||||
name: 'my-interceptor',
|
||||
interceptors: nodeInterceptors,
|
||||
})
|
||||
|
||||
interceptor.apply()
|
||||
|
||||
interceptor.on('request', ({ request, requestId }) => {
|
||||
// Inspect the intercepted "request".
|
||||
// Optionally, return a mocked response.
|
||||
})
|
||||
```
|
||||
|
||||
> Using the `/presets/node` interceptors preset is the recommended way to ensure all requests get intercepted, regardless of their origin.
|
||||
|
||||
### `RemoteHttpInterceptor`
|
||||
|
||||
Enables request interception in the current process while delegating the response resolution logic to the _parent process_. **Requires the current process to be a child process**. Requires the parent process to establish a resolver by calling the `createRemoteResolver` function.
|
||||
|
||||
```js
|
||||
// child.js
|
||||
import { RemoteHttpInterceptor } from '@mswjs/interceptors/RemoteHttpInterceptor'
|
||||
import { ClientRequestInterceptor } from '@mswjs/interceptors/ClientRequest'
|
||||
|
||||
const interceptor = new RemoteHttpInterceptor({
|
||||
// Alternatively, you can use presets.
|
||||
interceptors: [new ClientRequestInterceptor()],
|
||||
})
|
||||
|
||||
interceptor.apply()
|
||||
|
||||
process.on('disconnect', () => {
|
||||
interceptor.dispose()
|
||||
})
|
||||
```
|
||||
|
||||
You can still listen to and handle any requests in the child process via the `request` event listener. Keep in mind that a single request can only be responded to once.
|
||||
|
||||
### `RemoteHttpResolver`
|
||||
|
||||
Resolves an intercepted request in the given child `process`. Requires for that child process to enable request interception by calling the `createRemoteInterceptor` function.
|
||||
|
||||
```js
|
||||
// parent.js
|
||||
import { spawn } from 'child_process'
|
||||
import { RemoteHttpResolver } from '@mswjs/interceptors/RemoteHttpInterceptor'
|
||||
|
||||
const appProcess = spawn('node', ['app.js'], {
|
||||
stdio: ['inherit', 'inherit', 'inherit', 'ipc'],
|
||||
})
|
||||
|
||||
const resolver = new RemoteHttpResolver({
|
||||
process: appProcess,
|
||||
})
|
||||
|
||||
resolver.on('request', ({ request, requestId }) => {
|
||||
// Optionally, return a mocked response
|
||||
// for a request that occurred in the "appProcess".
|
||||
})
|
||||
|
||||
resolver.apply()
|
||||
```
|
||||
|
||||
## Special mention
|
||||
|
||||
The following libraries were used as an inspiration to write this low-level API:
|
||||
|
||||
- [`node`](https://github.com/nodejs/node)
|
||||
- [`nock`](https://github.com/nock/nock)
|
||||
- [`mock-xmlhttprequest`](https://github.com/berniegp/mock-xmlhttprequest)
|
||||
6
node_modules/@mswjs/interceptors/RemoteHttpInterceptor/package.json
generated
vendored
Normal file
6
node_modules/@mswjs/interceptors/RemoteHttpInterceptor/package.json
generated
vendored
Normal file
|
|
@ -0,0 +1,6 @@
|
|||
{
|
||||
"main": "../lib/node/RemoteHttpInterceptor.js",
|
||||
"module": "../lib/node/RemoteHttpInterceptor.mjs",
|
||||
"browser": null,
|
||||
"types": "../lib/node/RemoteHttpInterceptor.d.ts"
|
||||
}
|
||||
5
node_modules/@mswjs/interceptors/WebSocket/package.json
generated
vendored
Normal file
5
node_modules/@mswjs/interceptors/WebSocket/package.json
generated
vendored
Normal file
|
|
@ -0,0 +1,5 @@
|
|||
{
|
||||
"main": "../lib/browser/interceptors/WebSocket/index.js",
|
||||
"module": "../lib/browser/interceptors/WebSocket/index.mjs",
|
||||
"types": "../lib/browser/interceptors/WebSocket/index.d.ts"
|
||||
}
|
||||
6
node_modules/@mswjs/interceptors/XMLHttpRequest/package.json
generated
vendored
Normal file
6
node_modules/@mswjs/interceptors/XMLHttpRequest/package.json
generated
vendored
Normal file
|
|
@ -0,0 +1,6 @@
|
|||
{
|
||||
"main": "../lib/node/interceptors/XMLHttpRequest/index.js",
|
||||
"module": "../lib/node/interceptors/XMLHttpRequest/index.mjs",
|
||||
"browser": "../lib/browser/interceptors/XMLHttpRequest/index.js",
|
||||
"types": "../lib/node/interceptors/XMLHttpRequest/index.d.ts"
|
||||
}
|
||||
6
node_modules/@mswjs/interceptors/fetch/package.json
generated
vendored
Normal file
6
node_modules/@mswjs/interceptors/fetch/package.json
generated
vendored
Normal file
|
|
@ -0,0 +1,6 @@
|
|||
{
|
||||
"main": "../lib/node/interceptors/fetch/index.js",
|
||||
"module": "../lib/node/interceptors/fetch/index.mjs",
|
||||
"browser": "../lib/browser/interceptors/fetch/index.js",
|
||||
"types": "../lib/node/interceptors/fetch/index.d.ts"
|
||||
}
|
||||
63
node_modules/@mswjs/interceptors/lib/browser/Interceptor-af98b768.d.ts
generated
vendored
Normal file
63
node_modules/@mswjs/interceptors/lib/browser/Interceptor-af98b768.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,63 @@
|
|||
import { Logger } from '@open-draft/logger';
|
||||
import { Emitter, Listener } from 'strict-event-emitter';
|
||||
|
||||
type InterceptorEventMap = Record<string, any>;
|
||||
type InterceptorSubscription = () => void;
|
||||
/**
|
||||
* Request header name to detect when a single request
|
||||
* is being handled by nested interceptors (XHR -> ClientRequest).
|
||||
* Obscure by design to prevent collisions with user-defined headers.
|
||||
* Ideally, come up with the Interceptor-level mechanism for this.
|
||||
* @see https://github.com/mswjs/interceptors/issues/378
|
||||
*/
|
||||
declare const INTERNAL_REQUEST_ID_HEADER_NAME = "x-interceptors-internal-request-id";
|
||||
declare function getGlobalSymbol<V>(symbol: Symbol): V | undefined;
|
||||
declare function deleteGlobalSymbol(symbol: Symbol): void;
|
||||
declare enum InterceptorReadyState {
|
||||
INACTIVE = "INACTIVE",
|
||||
APPLYING = "APPLYING",
|
||||
APPLIED = "APPLIED",
|
||||
DISPOSING = "DISPOSING",
|
||||
DISPOSED = "DISPOSED"
|
||||
}
|
||||
type ExtractEventNames<Events extends Record<string, any>> = Events extends Record<infer EventName, any> ? EventName : never;
|
||||
declare class Interceptor<Events extends InterceptorEventMap> {
|
||||
private readonly symbol;
|
||||
protected emitter: Emitter<Events>;
|
||||
protected subscriptions: Array<InterceptorSubscription>;
|
||||
protected logger: Logger;
|
||||
readyState: InterceptorReadyState;
|
||||
constructor(symbol: symbol);
|
||||
/**
|
||||
* Determine if this interceptor can be applied
|
||||
* in the current environment.
|
||||
*/
|
||||
protected checkEnvironment(): boolean;
|
||||
/**
|
||||
* Apply this interceptor to the current process.
|
||||
* Returns an already running interceptor instance if it's present.
|
||||
*/
|
||||
apply(): void;
|
||||
/**
|
||||
* Setup the module augments and stubs necessary for this interceptor.
|
||||
* This method is not run if there's a running interceptor instance
|
||||
* to prevent instantiating an interceptor multiple times.
|
||||
*/
|
||||
protected setup(): void;
|
||||
/**
|
||||
* Listen to the interceptor's public events.
|
||||
*/
|
||||
on<EventName extends ExtractEventNames<Events>>(event: EventName, listener: Listener<Events[EventName]>): this;
|
||||
once<EventName extends ExtractEventNames<Events>>(event: EventName, listener: Listener<Events[EventName]>): this;
|
||||
off<EventName extends ExtractEventNames<Events>>(event: EventName, listener: Listener<Events[EventName]>): this;
|
||||
removeAllListeners<EventName extends ExtractEventNames<Events>>(event?: EventName): this;
|
||||
/**
|
||||
* Disposes of any side-effects this interceptor has introduced.
|
||||
*/
|
||||
dispose(): void;
|
||||
private getInstance;
|
||||
private setInstance;
|
||||
private clearInstance;
|
||||
}
|
||||
|
||||
export { ExtractEventNames as E, Interceptor as I, InterceptorEventMap as a, InterceptorSubscription as b, INTERNAL_REQUEST_ID_HEADER_NAME as c, deleteGlobalSymbol as d, InterceptorReadyState as e, getGlobalSymbol as g };
|
||||
83
node_modules/@mswjs/interceptors/lib/browser/chunk-5UK33FSU.mjs
generated
vendored
Normal file
83
node_modules/@mswjs/interceptors/lib/browser/chunk-5UK33FSU.mjs
generated
vendored
Normal file
|
|
@ -0,0 +1,83 @@
|
|||
// src/glossary.ts
|
||||
var IS_PATCHED_MODULE = Symbol("isPatchedModule");
|
||||
|
||||
// src/utils/fetchUtils.ts
|
||||
var _FetchResponse = class extends Response {
|
||||
static isConfigurableStatusCode(status) {
|
||||
return status >= 200 && status <= 599;
|
||||
}
|
||||
static isRedirectResponse(status) {
|
||||
return _FetchResponse.STATUS_CODES_WITH_REDIRECT.includes(status);
|
||||
}
|
||||
/**
|
||||
* Returns a boolean indicating whether the given response status
|
||||
* code represents a response that can have a body.
|
||||
*/
|
||||
static isResponseWithBody(status) {
|
||||
return !_FetchResponse.STATUS_CODES_WITHOUT_BODY.includes(status);
|
||||
}
|
||||
static setUrl(url, response) {
|
||||
if (!url) {
|
||||
return;
|
||||
}
|
||||
if (response.url != "") {
|
||||
return;
|
||||
}
|
||||
Object.defineProperty(response, "url", {
|
||||
value: url,
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: false
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Parses the given raw HTTP headers into a Fetch API `Headers` instance.
|
||||
*/
|
||||
static parseRawHeaders(rawHeaders) {
|
||||
const headers = new Headers();
|
||||
for (let line = 0; line < rawHeaders.length; line += 2) {
|
||||
headers.append(rawHeaders[line], rawHeaders[line + 1]);
|
||||
}
|
||||
return headers;
|
||||
}
|
||||
constructor(body, init = {}) {
|
||||
var _a;
|
||||
const status = (_a = init.status) != null ? _a : 200;
|
||||
const safeStatus = _FetchResponse.isConfigurableStatusCode(status) ? status : 200;
|
||||
const finalBody = _FetchResponse.isResponseWithBody(status) ? body : null;
|
||||
super(finalBody, {
|
||||
...init,
|
||||
status: safeStatus
|
||||
});
|
||||
if (status !== safeStatus) {
|
||||
const stateSymbol = Object.getOwnPropertySymbols(this).find(
|
||||
(symbol) => symbol.description === "state"
|
||||
);
|
||||
if (stateSymbol) {
|
||||
const state = Reflect.get(this, stateSymbol);
|
||||
Reflect.set(state, "status", status);
|
||||
} else {
|
||||
Object.defineProperty(this, "status", {
|
||||
value: status,
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: false
|
||||
});
|
||||
}
|
||||
}
|
||||
_FetchResponse.setUrl(init.url, this);
|
||||
}
|
||||
};
|
||||
var FetchResponse = _FetchResponse;
|
||||
/**
|
||||
* Response status codes for responses that cannot have body.
|
||||
* @see https://fetch.spec.whatwg.org/#statuses
|
||||
*/
|
||||
FetchResponse.STATUS_CODES_WITHOUT_BODY = [101, 103, 204, 205, 304];
|
||||
FetchResponse.STATUS_CODES_WITH_REDIRECT = [301, 302, 303, 307, 308];
|
||||
|
||||
export {
|
||||
IS_PATCHED_MODULE,
|
||||
FetchResponse
|
||||
};
|
||||
//# sourceMappingURL=chunk-5UK33FSU.mjs.map
|
||||
1
node_modules/@mswjs/interceptors/lib/browser/chunk-5UK33FSU.mjs.map
generated
vendored
Normal file
1
node_modules/@mswjs/interceptors/lib/browser/chunk-5UK33FSU.mjs.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
22
node_modules/@mswjs/interceptors/lib/browser/chunk-6HYIRFX2.mjs
generated
vendored
Normal file
22
node_modules/@mswjs/interceptors/lib/browser/chunk-6HYIRFX2.mjs
generated
vendored
Normal file
|
|
@ -0,0 +1,22 @@
|
|||
// src/utils/bufferUtils.ts
|
||||
var encoder = new TextEncoder();
|
||||
function encodeBuffer(text) {
|
||||
return encoder.encode(text);
|
||||
}
|
||||
function decodeBuffer(buffer, encoding) {
|
||||
const decoder = new TextDecoder(encoding);
|
||||
return decoder.decode(buffer);
|
||||
}
|
||||
function toArrayBuffer(array) {
|
||||
return array.buffer.slice(
|
||||
array.byteOffset,
|
||||
array.byteOffset + array.byteLength
|
||||
);
|
||||
}
|
||||
|
||||
export {
|
||||
encodeBuffer,
|
||||
decodeBuffer,
|
||||
toArrayBuffer
|
||||
};
|
||||
//# sourceMappingURL=chunk-6HYIRFX2.mjs.map
|
||||
1
node_modules/@mswjs/interceptors/lib/browser/chunk-6HYIRFX2.mjs.map
generated
vendored
Normal file
1
node_modules/@mswjs/interceptors/lib/browser/chunk-6HYIRFX2.mjs.map
generated
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
{"version":3,"sources":["../../src/utils/bufferUtils.ts"],"sourcesContent":["const encoder = new TextEncoder()\n\nexport function encodeBuffer(text: string): Uint8Array {\n return encoder.encode(text)\n}\n\nexport function decodeBuffer(buffer: ArrayBuffer, encoding?: string): string {\n const decoder = new TextDecoder(encoding)\n return decoder.decode(buffer)\n}\n\n/**\n * Create an `ArrayBuffer` from the given `Uint8Array`.\n * Takes the byte offset into account to produce the right buffer\n * in the case when the buffer is bigger than the data view.\n */\nexport function toArrayBuffer(array: Uint8Array): ArrayBuffer {\n return array.buffer.slice(\n array.byteOffset,\n array.byteOffset + array.byteLength\n )\n}\n"],"mappings":";AAAA,IAAM,UAAU,IAAI,YAAY;AAEzB,SAAS,aAAa,MAA0B;AACrD,SAAO,QAAQ,OAAO,IAAI;AAC5B;AAEO,SAAS,aAAa,QAAqB,UAA2B;AAC3E,QAAM,UAAU,IAAI,YAAY,QAAQ;AACxC,SAAO,QAAQ,OAAO,MAAM;AAC9B;AAOO,SAAS,cAAc,OAAgC;AAC5D,SAAO,MAAM,OAAO;AAAA,IAClB,MAAM;AAAA,IACN,MAAM,aAAa,MAAM;AAAA,EAC3B;AACF;","names":[]}
|
||||
83
node_modules/@mswjs/interceptors/lib/browser/chunk-BC2BLJQN.js
generated
vendored
Normal file
83
node_modules/@mswjs/interceptors/lib/browser/chunk-BC2BLJQN.js
generated
vendored
Normal file
|
|
@ -0,0 +1,83 @@
|
|||
"use strict";Object.defineProperty(exports, "__esModule", {value: true});// src/glossary.ts
|
||||
var IS_PATCHED_MODULE = Symbol("isPatchedModule");
|
||||
|
||||
// src/utils/fetchUtils.ts
|
||||
var _FetchResponse = class extends Response {
|
||||
static isConfigurableStatusCode(status) {
|
||||
return status >= 200 && status <= 599;
|
||||
}
|
||||
static isRedirectResponse(status) {
|
||||
return _FetchResponse.STATUS_CODES_WITH_REDIRECT.includes(status);
|
||||
}
|
||||
/**
|
||||
* Returns a boolean indicating whether the given response status
|
||||
* code represents a response that can have a body.
|
||||
*/
|
||||
static isResponseWithBody(status) {
|
||||
return !_FetchResponse.STATUS_CODES_WITHOUT_BODY.includes(status);
|
||||
}
|
||||
static setUrl(url, response) {
|
||||
if (!url) {
|
||||
return;
|
||||
}
|
||||
if (response.url != "") {
|
||||
return;
|
||||
}
|
||||
Object.defineProperty(response, "url", {
|
||||
value: url,
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: false
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Parses the given raw HTTP headers into a Fetch API `Headers` instance.
|
||||
*/
|
||||
static parseRawHeaders(rawHeaders) {
|
||||
const headers = new Headers();
|
||||
for (let line = 0; line < rawHeaders.length; line += 2) {
|
||||
headers.append(rawHeaders[line], rawHeaders[line + 1]);
|
||||
}
|
||||
return headers;
|
||||
}
|
||||
constructor(body, init = {}) {
|
||||
var _a;
|
||||
const status = (_a = init.status) != null ? _a : 200;
|
||||
const safeStatus = _FetchResponse.isConfigurableStatusCode(status) ? status : 200;
|
||||
const finalBody = _FetchResponse.isResponseWithBody(status) ? body : null;
|
||||
super(finalBody, {
|
||||
...init,
|
||||
status: safeStatus
|
||||
});
|
||||
if (status !== safeStatus) {
|
||||
const stateSymbol = Object.getOwnPropertySymbols(this).find(
|
||||
(symbol) => symbol.description === "state"
|
||||
);
|
||||
if (stateSymbol) {
|
||||
const state = Reflect.get(this, stateSymbol);
|
||||
Reflect.set(state, "status", status);
|
||||
} else {
|
||||
Object.defineProperty(this, "status", {
|
||||
value: status,
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: false
|
||||
});
|
||||
}
|
||||
}
|
||||
_FetchResponse.setUrl(init.url, this);
|
||||
}
|
||||
};
|
||||
var FetchResponse = _FetchResponse;
|
||||
/**
|
||||
* Response status codes for responses that cannot have body.
|
||||
* @see https://fetch.spec.whatwg.org/#statuses
|
||||
*/
|
||||
FetchResponse.STATUS_CODES_WITHOUT_BODY = [101, 103, 204, 205, 304];
|
||||
FetchResponse.STATUS_CODES_WITH_REDIRECT = [301, 302, 303, 307, 308];
|
||||
|
||||
|
||||
|
||||
|
||||
exports.IS_PATCHED_MODULE = IS_PATCHED_MODULE; exports.FetchResponse = FetchResponse;
|
||||
//# sourceMappingURL=chunk-BC2BLJQN.js.map
|
||||
1
node_modules/@mswjs/interceptors/lib/browser/chunk-BC2BLJQN.js.map
generated
vendored
Normal file
1
node_modules/@mswjs/interceptors/lib/browser/chunk-BC2BLJQN.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
844
node_modules/@mswjs/interceptors/lib/browser/chunk-DODHRDV6.mjs
generated
vendored
Normal file
844
node_modules/@mswjs/interceptors/lib/browser/chunk-DODHRDV6.mjs
generated
vendored
Normal file
|
|
@ -0,0 +1,844 @@
|
|||
import {
|
||||
decodeBuffer,
|
||||
encodeBuffer,
|
||||
toArrayBuffer
|
||||
} from "./chunk-6HYIRFX2.mjs";
|
||||
import {
|
||||
RequestController,
|
||||
handleRequest
|
||||
} from "./chunk-H5O73WD2.mjs";
|
||||
import {
|
||||
FetchResponse,
|
||||
IS_PATCHED_MODULE
|
||||
} from "./chunk-5UK33FSU.mjs";
|
||||
import {
|
||||
hasConfigurableGlobal
|
||||
} from "./chunk-TX5GBTFY.mjs";
|
||||
import {
|
||||
INTERNAL_REQUEST_ID_HEADER_NAME,
|
||||
Interceptor,
|
||||
createRequestId
|
||||
} from "./chunk-QED3Q6Z2.mjs";
|
||||
|
||||
// src/interceptors/XMLHttpRequest/index.ts
|
||||
import { invariant as invariant2 } from "outvariant";
|
||||
|
||||
// src/interceptors/XMLHttpRequest/XMLHttpRequestController.ts
|
||||
import { invariant } from "outvariant";
|
||||
import { isNodeProcess } from "is-node-process";
|
||||
|
||||
// src/interceptors/XMLHttpRequest/utils/concatArrayBuffer.ts
|
||||
function concatArrayBuffer(left, right) {
|
||||
const result = new Uint8Array(left.byteLength + right.byteLength);
|
||||
result.set(left, 0);
|
||||
result.set(right, left.byteLength);
|
||||
return result;
|
||||
}
|
||||
|
||||
// src/interceptors/XMLHttpRequest/polyfills/EventPolyfill.ts
|
||||
var EventPolyfill = class {
|
||||
constructor(type, options) {
|
||||
this.NONE = 0;
|
||||
this.CAPTURING_PHASE = 1;
|
||||
this.AT_TARGET = 2;
|
||||
this.BUBBLING_PHASE = 3;
|
||||
this.type = "";
|
||||
this.srcElement = null;
|
||||
this.currentTarget = null;
|
||||
this.eventPhase = 0;
|
||||
this.isTrusted = true;
|
||||
this.composed = false;
|
||||
this.cancelable = true;
|
||||
this.defaultPrevented = false;
|
||||
this.bubbles = true;
|
||||
this.lengthComputable = true;
|
||||
this.loaded = 0;
|
||||
this.total = 0;
|
||||
this.cancelBubble = false;
|
||||
this.returnValue = true;
|
||||
this.type = type;
|
||||
this.target = (options == null ? void 0 : options.target) || null;
|
||||
this.currentTarget = (options == null ? void 0 : options.currentTarget) || null;
|
||||
this.timeStamp = Date.now();
|
||||
}
|
||||
composedPath() {
|
||||
return [];
|
||||
}
|
||||
initEvent(type, bubbles, cancelable) {
|
||||
this.type = type;
|
||||
this.bubbles = !!bubbles;
|
||||
this.cancelable = !!cancelable;
|
||||
}
|
||||
preventDefault() {
|
||||
this.defaultPrevented = true;
|
||||
}
|
||||
stopPropagation() {
|
||||
}
|
||||
stopImmediatePropagation() {
|
||||
}
|
||||
};
|
||||
|
||||
// src/interceptors/XMLHttpRequest/polyfills/ProgressEventPolyfill.ts
|
||||
var ProgressEventPolyfill = class extends EventPolyfill {
|
||||
constructor(type, init) {
|
||||
super(type);
|
||||
this.lengthComputable = (init == null ? void 0 : init.lengthComputable) || false;
|
||||
this.composed = (init == null ? void 0 : init.composed) || false;
|
||||
this.loaded = (init == null ? void 0 : init.loaded) || 0;
|
||||
this.total = (init == null ? void 0 : init.total) || 0;
|
||||
}
|
||||
};
|
||||
|
||||
// src/interceptors/XMLHttpRequest/utils/createEvent.ts
|
||||
var SUPPORTS_PROGRESS_EVENT = typeof ProgressEvent !== "undefined";
|
||||
function createEvent(target, type, init) {
|
||||
const progressEvents = [
|
||||
"error",
|
||||
"progress",
|
||||
"loadstart",
|
||||
"loadend",
|
||||
"load",
|
||||
"timeout",
|
||||
"abort"
|
||||
];
|
||||
const ProgressEventClass = SUPPORTS_PROGRESS_EVENT ? ProgressEvent : ProgressEventPolyfill;
|
||||
const event = progressEvents.includes(type) ? new ProgressEventClass(type, {
|
||||
lengthComputable: true,
|
||||
loaded: (init == null ? void 0 : init.loaded) || 0,
|
||||
total: (init == null ? void 0 : init.total) || 0
|
||||
}) : new EventPolyfill(type, {
|
||||
target,
|
||||
currentTarget: target
|
||||
});
|
||||
return event;
|
||||
}
|
||||
|
||||
// src/utils/findPropertySource.ts
|
||||
function findPropertySource(target, propertyName) {
|
||||
if (!(propertyName in target)) {
|
||||
return null;
|
||||
}
|
||||
const hasProperty = Object.prototype.hasOwnProperty.call(target, propertyName);
|
||||
if (hasProperty) {
|
||||
return target;
|
||||
}
|
||||
const prototype = Reflect.getPrototypeOf(target);
|
||||
return prototype ? findPropertySource(prototype, propertyName) : null;
|
||||
}
|
||||
|
||||
// src/utils/createProxy.ts
|
||||
function createProxy(target, options) {
|
||||
const proxy = new Proxy(target, optionsToProxyHandler(options));
|
||||
return proxy;
|
||||
}
|
||||
function optionsToProxyHandler(options) {
|
||||
const { constructorCall, methodCall, getProperty, setProperty } = options;
|
||||
const handler = {};
|
||||
if (typeof constructorCall !== "undefined") {
|
||||
handler.construct = function(target, args, newTarget) {
|
||||
const next = Reflect.construct.bind(null, target, args, newTarget);
|
||||
return constructorCall.call(newTarget, args, next);
|
||||
};
|
||||
}
|
||||
handler.set = function(target, propertyName, nextValue) {
|
||||
const next = () => {
|
||||
const propertySource = findPropertySource(target, propertyName) || target;
|
||||
const ownDescriptors = Reflect.getOwnPropertyDescriptor(
|
||||
propertySource,
|
||||
propertyName
|
||||
);
|
||||
if (typeof (ownDescriptors == null ? void 0 : ownDescriptors.set) !== "undefined") {
|
||||
ownDescriptors.set.apply(target, [nextValue]);
|
||||
return true;
|
||||
}
|
||||
return Reflect.defineProperty(propertySource, propertyName, {
|
||||
writable: true,
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
value: nextValue
|
||||
});
|
||||
};
|
||||
if (typeof setProperty !== "undefined") {
|
||||
return setProperty.call(target, [propertyName, nextValue], next);
|
||||
}
|
||||
return next();
|
||||
};
|
||||
handler.get = function(target, propertyName, receiver) {
|
||||
const next = () => target[propertyName];
|
||||
const value = typeof getProperty !== "undefined" ? getProperty.call(target, [propertyName, receiver], next) : next();
|
||||
if (typeof value === "function") {
|
||||
return (...args) => {
|
||||
const next2 = value.bind(target, ...args);
|
||||
if (typeof methodCall !== "undefined") {
|
||||
return methodCall.call(target, [propertyName, args], next2);
|
||||
}
|
||||
return next2();
|
||||
};
|
||||
}
|
||||
return value;
|
||||
};
|
||||
return handler;
|
||||
}
|
||||
|
||||
// src/interceptors/XMLHttpRequest/utils/isDomParserSupportedType.ts
|
||||
function isDomParserSupportedType(type) {
|
||||
const supportedTypes = [
|
||||
"application/xhtml+xml",
|
||||
"application/xml",
|
||||
"image/svg+xml",
|
||||
"text/html",
|
||||
"text/xml"
|
||||
];
|
||||
return supportedTypes.some((supportedType) => {
|
||||
return type.startsWith(supportedType);
|
||||
});
|
||||
}
|
||||
|
||||
// src/utils/parseJson.ts
|
||||
function parseJson(data) {
|
||||
try {
|
||||
const json = JSON.parse(data);
|
||||
return json;
|
||||
} catch (_) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
// src/interceptors/XMLHttpRequest/utils/createResponse.ts
|
||||
function createResponse(request, body) {
|
||||
const responseBodyOrNull = FetchResponse.isResponseWithBody(request.status) ? body : null;
|
||||
return new FetchResponse(responseBodyOrNull, {
|
||||
url: request.responseURL,
|
||||
status: request.status,
|
||||
statusText: request.statusText,
|
||||
headers: createHeadersFromXMLHttpReqestHeaders(
|
||||
request.getAllResponseHeaders()
|
||||
)
|
||||
});
|
||||
}
|
||||
function createHeadersFromXMLHttpReqestHeaders(headersString) {
|
||||
const headers = new Headers();
|
||||
const lines = headersString.split(/[\r\n]+/);
|
||||
for (const line of lines) {
|
||||
if (line.trim() === "") {
|
||||
continue;
|
||||
}
|
||||
const [name, ...parts] = line.split(": ");
|
||||
const value = parts.join(": ");
|
||||
headers.append(name, value);
|
||||
}
|
||||
return headers;
|
||||
}
|
||||
|
||||
// src/interceptors/XMLHttpRequest/utils/getBodyByteLength.ts
|
||||
async function getBodyByteLength(input) {
|
||||
const explicitContentLength = input.headers.get("content-length");
|
||||
if (explicitContentLength != null && explicitContentLength !== "") {
|
||||
return Number(explicitContentLength);
|
||||
}
|
||||
const buffer = await input.arrayBuffer();
|
||||
return buffer.byteLength;
|
||||
}
|
||||
|
||||
// src/interceptors/XMLHttpRequest/XMLHttpRequestController.ts
|
||||
var kIsRequestHandled = Symbol("kIsRequestHandled");
|
||||
var IS_NODE = isNodeProcess();
|
||||
var kFetchRequest = Symbol("kFetchRequest");
|
||||
var XMLHttpRequestController = class {
|
||||
constructor(initialRequest, logger) {
|
||||
this.initialRequest = initialRequest;
|
||||
this.logger = logger;
|
||||
this.method = "GET";
|
||||
this.url = null;
|
||||
this[kIsRequestHandled] = false;
|
||||
this.events = /* @__PURE__ */ new Map();
|
||||
this.uploadEvents = /* @__PURE__ */ new Map();
|
||||
this.requestId = createRequestId();
|
||||
this.requestHeaders = new Headers();
|
||||
this.responseBuffer = new Uint8Array();
|
||||
this.request = createProxy(initialRequest, {
|
||||
setProperty: ([propertyName, nextValue], invoke) => {
|
||||
switch (propertyName) {
|
||||
case "ontimeout": {
|
||||
const eventName = propertyName.slice(
|
||||
2
|
||||
);
|
||||
this.request.addEventListener(eventName, nextValue);
|
||||
return invoke();
|
||||
}
|
||||
default: {
|
||||
return invoke();
|
||||
}
|
||||
}
|
||||
},
|
||||
methodCall: ([methodName, args], invoke) => {
|
||||
var _a;
|
||||
switch (methodName) {
|
||||
case "open": {
|
||||
const [method, url] = args;
|
||||
if (typeof url === "undefined") {
|
||||
this.method = "GET";
|
||||
this.url = toAbsoluteUrl(method);
|
||||
} else {
|
||||
this.method = method;
|
||||
this.url = toAbsoluteUrl(url);
|
||||
}
|
||||
this.logger = this.logger.extend(`${this.method} ${this.url.href}`);
|
||||
this.logger.info("open", this.method, this.url.href);
|
||||
return invoke();
|
||||
}
|
||||
case "addEventListener": {
|
||||
const [eventName, listener] = args;
|
||||
this.registerEvent(eventName, listener);
|
||||
this.logger.info("addEventListener", eventName, listener);
|
||||
return invoke();
|
||||
}
|
||||
case "setRequestHeader": {
|
||||
const [name, value] = args;
|
||||
this.requestHeaders.set(name, value);
|
||||
this.logger.info("setRequestHeader", name, value);
|
||||
return invoke();
|
||||
}
|
||||
case "send": {
|
||||
const [body] = args;
|
||||
this.request.addEventListener("load", () => {
|
||||
if (typeof this.onResponse !== "undefined") {
|
||||
const fetchResponse = createResponse(
|
||||
this.request,
|
||||
/**
|
||||
* The `response` property is the right way to read
|
||||
* the ambiguous response body, as the request's "responseType" may differ.
|
||||
* @see https://xhr.spec.whatwg.org/#the-response-attribute
|
||||
*/
|
||||
this.request.response
|
||||
);
|
||||
this.onResponse.call(this, {
|
||||
response: fetchResponse,
|
||||
isMockedResponse: this[kIsRequestHandled],
|
||||
request: fetchRequest,
|
||||
requestId: this.requestId
|
||||
});
|
||||
}
|
||||
});
|
||||
const requestBody = typeof body === "string" ? encodeBuffer(body) : body;
|
||||
const fetchRequest = this.toFetchApiRequest(requestBody);
|
||||
this[kFetchRequest] = fetchRequest.clone();
|
||||
const onceRequestSettled = ((_a = this.onRequest) == null ? void 0 : _a.call(this, {
|
||||
request: fetchRequest,
|
||||
requestId: this.requestId
|
||||
})) || Promise.resolve();
|
||||
onceRequestSettled.finally(() => {
|
||||
if (!this[kIsRequestHandled]) {
|
||||
this.logger.info(
|
||||
"request callback settled but request has not been handled (readystate %d), performing as-is...",
|
||||
this.request.readyState
|
||||
);
|
||||
if (IS_NODE) {
|
||||
this.request.setRequestHeader(
|
||||
INTERNAL_REQUEST_ID_HEADER_NAME,
|
||||
this.requestId
|
||||
);
|
||||
}
|
||||
return invoke();
|
||||
}
|
||||
});
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
return invoke();
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
define(
|
||||
this.request,
|
||||
"upload",
|
||||
createProxy(this.request.upload, {
|
||||
setProperty: ([propertyName, nextValue], invoke) => {
|
||||
switch (propertyName) {
|
||||
case "onloadstart":
|
||||
case "onprogress":
|
||||
case "onaboart":
|
||||
case "onerror":
|
||||
case "onload":
|
||||
case "ontimeout":
|
||||
case "onloadend": {
|
||||
const eventName = propertyName.slice(
|
||||
2
|
||||
);
|
||||
this.registerUploadEvent(eventName, nextValue);
|
||||
}
|
||||
}
|
||||
return invoke();
|
||||
},
|
||||
methodCall: ([methodName, args], invoke) => {
|
||||
switch (methodName) {
|
||||
case "addEventListener": {
|
||||
const [eventName, listener] = args;
|
||||
this.registerUploadEvent(eventName, listener);
|
||||
this.logger.info("upload.addEventListener", eventName, listener);
|
||||
return invoke();
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
);
|
||||
}
|
||||
registerEvent(eventName, listener) {
|
||||
const prevEvents = this.events.get(eventName) || [];
|
||||
const nextEvents = prevEvents.concat(listener);
|
||||
this.events.set(eventName, nextEvents);
|
||||
this.logger.info('registered event "%s"', eventName, listener);
|
||||
}
|
||||
registerUploadEvent(eventName, listener) {
|
||||
const prevEvents = this.uploadEvents.get(eventName) || [];
|
||||
const nextEvents = prevEvents.concat(listener);
|
||||
this.uploadEvents.set(eventName, nextEvents);
|
||||
this.logger.info('registered upload event "%s"', eventName, listener);
|
||||
}
|
||||
/**
|
||||
* Responds to the current request with the given
|
||||
* Fetch API `Response` instance.
|
||||
*/
|
||||
async respondWith(response) {
|
||||
this[kIsRequestHandled] = true;
|
||||
if (this[kFetchRequest]) {
|
||||
const totalRequestBodyLength = await getBodyByteLength(
|
||||
this[kFetchRequest]
|
||||
);
|
||||
this.trigger("loadstart", this.request.upload, {
|
||||
loaded: 0,
|
||||
total: totalRequestBodyLength
|
||||
});
|
||||
this.trigger("progress", this.request.upload, {
|
||||
loaded: totalRequestBodyLength,
|
||||
total: totalRequestBodyLength
|
||||
});
|
||||
this.trigger("load", this.request.upload, {
|
||||
loaded: totalRequestBodyLength,
|
||||
total: totalRequestBodyLength
|
||||
});
|
||||
this.trigger("loadend", this.request.upload, {
|
||||
loaded: totalRequestBodyLength,
|
||||
total: totalRequestBodyLength
|
||||
});
|
||||
}
|
||||
this.logger.info(
|
||||
"responding with a mocked response: %d %s",
|
||||
response.status,
|
||||
response.statusText
|
||||
);
|
||||
define(this.request, "status", response.status);
|
||||
define(this.request, "statusText", response.statusText);
|
||||
define(this.request, "responseURL", this.url.href);
|
||||
this.request.getResponseHeader = new Proxy(this.request.getResponseHeader, {
|
||||
apply: (_, __, args) => {
|
||||
this.logger.info("getResponseHeader", args[0]);
|
||||
if (this.request.readyState < this.request.HEADERS_RECEIVED) {
|
||||
this.logger.info("headers not received yet, returning null");
|
||||
return null;
|
||||
}
|
||||
const headerValue = response.headers.get(args[0]);
|
||||
this.logger.info(
|
||||
'resolved response header "%s" to',
|
||||
args[0],
|
||||
headerValue
|
||||
);
|
||||
return headerValue;
|
||||
}
|
||||
});
|
||||
this.request.getAllResponseHeaders = new Proxy(
|
||||
this.request.getAllResponseHeaders,
|
||||
{
|
||||
apply: () => {
|
||||
this.logger.info("getAllResponseHeaders");
|
||||
if (this.request.readyState < this.request.HEADERS_RECEIVED) {
|
||||
this.logger.info("headers not received yet, returning empty string");
|
||||
return "";
|
||||
}
|
||||
const headersList = Array.from(response.headers.entries());
|
||||
const allHeaders = headersList.map(([headerName, headerValue]) => {
|
||||
return `${headerName}: ${headerValue}`;
|
||||
}).join("\r\n");
|
||||
this.logger.info("resolved all response headers to", allHeaders);
|
||||
return allHeaders;
|
||||
}
|
||||
}
|
||||
);
|
||||
Object.defineProperties(this.request, {
|
||||
response: {
|
||||
enumerable: true,
|
||||
configurable: false,
|
||||
get: () => this.response
|
||||
},
|
||||
responseText: {
|
||||
enumerable: true,
|
||||
configurable: false,
|
||||
get: () => this.responseText
|
||||
},
|
||||
responseXML: {
|
||||
enumerable: true,
|
||||
configurable: false,
|
||||
get: () => this.responseXML
|
||||
}
|
||||
});
|
||||
const totalResponseBodyLength = await getBodyByteLength(response.clone());
|
||||
this.logger.info("calculated response body length", totalResponseBodyLength);
|
||||
this.trigger("loadstart", this.request, {
|
||||
loaded: 0,
|
||||
total: totalResponseBodyLength
|
||||
});
|
||||
this.setReadyState(this.request.HEADERS_RECEIVED);
|
||||
this.setReadyState(this.request.LOADING);
|
||||
const finalizeResponse = () => {
|
||||
this.logger.info("finalizing the mocked response...");
|
||||
this.setReadyState(this.request.DONE);
|
||||
this.trigger("load", this.request, {
|
||||
loaded: this.responseBuffer.byteLength,
|
||||
total: totalResponseBodyLength
|
||||
});
|
||||
this.trigger("loadend", this.request, {
|
||||
loaded: this.responseBuffer.byteLength,
|
||||
total: totalResponseBodyLength
|
||||
});
|
||||
};
|
||||
if (response.body) {
|
||||
this.logger.info("mocked response has body, streaming...");
|
||||
const reader = response.body.getReader();
|
||||
const readNextResponseBodyChunk = async () => {
|
||||
const { value, done } = await reader.read();
|
||||
if (done) {
|
||||
this.logger.info("response body stream done!");
|
||||
finalizeResponse();
|
||||
return;
|
||||
}
|
||||
if (value) {
|
||||
this.logger.info("read response body chunk:", value);
|
||||
this.responseBuffer = concatArrayBuffer(this.responseBuffer, value);
|
||||
this.trigger("progress", this.request, {
|
||||
loaded: this.responseBuffer.byteLength,
|
||||
total: totalResponseBodyLength
|
||||
});
|
||||
}
|
||||
readNextResponseBodyChunk();
|
||||
};
|
||||
readNextResponseBodyChunk();
|
||||
} else {
|
||||
finalizeResponse();
|
||||
}
|
||||
}
|
||||
responseBufferToText() {
|
||||
return decodeBuffer(this.responseBuffer);
|
||||
}
|
||||
get response() {
|
||||
this.logger.info(
|
||||
"getResponse (responseType: %s)",
|
||||
this.request.responseType
|
||||
);
|
||||
if (this.request.readyState !== this.request.DONE) {
|
||||
return null;
|
||||
}
|
||||
switch (this.request.responseType) {
|
||||
case "json": {
|
||||
const responseJson = parseJson(this.responseBufferToText());
|
||||
this.logger.info("resolved response JSON", responseJson);
|
||||
return responseJson;
|
||||
}
|
||||
case "arraybuffer": {
|
||||
const arrayBuffer = toArrayBuffer(this.responseBuffer);
|
||||
this.logger.info("resolved response ArrayBuffer", arrayBuffer);
|
||||
return arrayBuffer;
|
||||
}
|
||||
case "blob": {
|
||||
const mimeType = this.request.getResponseHeader("Content-Type") || "text/plain";
|
||||
const responseBlob = new Blob([this.responseBufferToText()], {
|
||||
type: mimeType
|
||||
});
|
||||
this.logger.info(
|
||||
"resolved response Blob (mime type: %s)",
|
||||
responseBlob,
|
||||
mimeType
|
||||
);
|
||||
return responseBlob;
|
||||
}
|
||||
default: {
|
||||
const responseText = this.responseBufferToText();
|
||||
this.logger.info(
|
||||
'resolving "%s" response type as text',
|
||||
this.request.responseType,
|
||||
responseText
|
||||
);
|
||||
return responseText;
|
||||
}
|
||||
}
|
||||
}
|
||||
get responseText() {
|
||||
invariant(
|
||||
this.request.responseType === "" || this.request.responseType === "text",
|
||||
"InvalidStateError: The object is in invalid state."
|
||||
);
|
||||
if (this.request.readyState !== this.request.LOADING && this.request.readyState !== this.request.DONE) {
|
||||
return "";
|
||||
}
|
||||
const responseText = this.responseBufferToText();
|
||||
this.logger.info('getResponseText: "%s"', responseText);
|
||||
return responseText;
|
||||
}
|
||||
get responseXML() {
|
||||
invariant(
|
||||
this.request.responseType === "" || this.request.responseType === "document",
|
||||
"InvalidStateError: The object is in invalid state."
|
||||
);
|
||||
if (this.request.readyState !== this.request.DONE) {
|
||||
return null;
|
||||
}
|
||||
const contentType = this.request.getResponseHeader("Content-Type") || "";
|
||||
if (typeof DOMParser === "undefined") {
|
||||
console.warn(
|
||||
"Cannot retrieve XMLHttpRequest response body as XML: DOMParser is not defined. You are likely using an environment that is not browser or does not polyfill browser globals correctly."
|
||||
);
|
||||
return null;
|
||||
}
|
||||
if (isDomParserSupportedType(contentType)) {
|
||||
return new DOMParser().parseFromString(
|
||||
this.responseBufferToText(),
|
||||
contentType
|
||||
);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
errorWith(error) {
|
||||
this[kIsRequestHandled] = true;
|
||||
this.logger.info("responding with an error");
|
||||
this.setReadyState(this.request.DONE);
|
||||
this.trigger("error", this.request);
|
||||
this.trigger("loadend", this.request);
|
||||
}
|
||||
/**
|
||||
* Transitions this request's `readyState` to the given one.
|
||||
*/
|
||||
setReadyState(nextReadyState) {
|
||||
this.logger.info(
|
||||
"setReadyState: %d -> %d",
|
||||
this.request.readyState,
|
||||
nextReadyState
|
||||
);
|
||||
if (this.request.readyState === nextReadyState) {
|
||||
this.logger.info("ready state identical, skipping transition...");
|
||||
return;
|
||||
}
|
||||
define(this.request, "readyState", nextReadyState);
|
||||
this.logger.info("set readyState to: %d", nextReadyState);
|
||||
if (nextReadyState !== this.request.UNSENT) {
|
||||
this.logger.info('triggerring "readystatechange" event...');
|
||||
this.trigger("readystatechange", this.request);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Triggers given event on the `XMLHttpRequest` instance.
|
||||
*/
|
||||
trigger(eventName, target, options) {
|
||||
const callback = target[`on${eventName}`];
|
||||
const event = createEvent(target, eventName, options);
|
||||
this.logger.info('trigger "%s"', eventName, options || "");
|
||||
if (typeof callback === "function") {
|
||||
this.logger.info('found a direct "%s" callback, calling...', eventName);
|
||||
callback.call(target, event);
|
||||
}
|
||||
const events = target instanceof XMLHttpRequestUpload ? this.uploadEvents : this.events;
|
||||
for (const [registeredEventName, listeners] of events) {
|
||||
if (registeredEventName === eventName) {
|
||||
this.logger.info(
|
||||
'found %d listener(s) for "%s" event, calling...',
|
||||
listeners.length,
|
||||
eventName
|
||||
);
|
||||
listeners.forEach((listener) => listener.call(target, event));
|
||||
}
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Converts this `XMLHttpRequest` instance into a Fetch API `Request` instance.
|
||||
*/
|
||||
toFetchApiRequest(body) {
|
||||
this.logger.info("converting request to a Fetch API Request...");
|
||||
const resolvedBody = body instanceof Document ? body.documentElement.innerText : body;
|
||||
const fetchRequest = new Request(this.url.href, {
|
||||
method: this.method,
|
||||
headers: this.requestHeaders,
|
||||
/**
|
||||
* @see https://xhr.spec.whatwg.org/#cross-origin-credentials
|
||||
*/
|
||||
credentials: this.request.withCredentials ? "include" : "same-origin",
|
||||
body: ["GET", "HEAD"].includes(this.method.toUpperCase()) ? null : resolvedBody
|
||||
});
|
||||
const proxyHeaders = createProxy(fetchRequest.headers, {
|
||||
methodCall: ([methodName, args], invoke) => {
|
||||
switch (methodName) {
|
||||
case "append":
|
||||
case "set": {
|
||||
const [headerName, headerValue] = args;
|
||||
this.request.setRequestHeader(headerName, headerValue);
|
||||
break;
|
||||
}
|
||||
case "delete": {
|
||||
const [headerName] = args;
|
||||
console.warn(
|
||||
`XMLHttpRequest: Cannot remove a "${headerName}" header from the Fetch API representation of the "${fetchRequest.method} ${fetchRequest.url}" request. XMLHttpRequest headers cannot be removed.`
|
||||
);
|
||||
break;
|
||||
}
|
||||
}
|
||||
return invoke();
|
||||
}
|
||||
});
|
||||
define(fetchRequest, "headers", proxyHeaders);
|
||||
this.logger.info("converted request to a Fetch API Request!", fetchRequest);
|
||||
return fetchRequest;
|
||||
}
|
||||
};
|
||||
kIsRequestHandled, kFetchRequest;
|
||||
function toAbsoluteUrl(url) {
|
||||
if (typeof location === "undefined") {
|
||||
return new URL(url);
|
||||
}
|
||||
return new URL(url.toString(), location.href);
|
||||
}
|
||||
function define(target, property, value) {
|
||||
Reflect.defineProperty(target, property, {
|
||||
// Ensure writable properties to allow redefining readonly properties.
|
||||
writable: true,
|
||||
enumerable: true,
|
||||
value
|
||||
});
|
||||
}
|
||||
|
||||
// src/interceptors/XMLHttpRequest/XMLHttpRequestProxy.ts
|
||||
function createXMLHttpRequestProxy({
|
||||
emitter,
|
||||
logger
|
||||
}) {
|
||||
const XMLHttpRequestProxy = new Proxy(globalThis.XMLHttpRequest, {
|
||||
construct(target, args, newTarget) {
|
||||
logger.info("constructed new XMLHttpRequest");
|
||||
const originalRequest = Reflect.construct(
|
||||
target,
|
||||
args,
|
||||
newTarget
|
||||
);
|
||||
const prototypeDescriptors = Object.getOwnPropertyDescriptors(
|
||||
target.prototype
|
||||
);
|
||||
for (const propertyName in prototypeDescriptors) {
|
||||
Reflect.defineProperty(
|
||||
originalRequest,
|
||||
propertyName,
|
||||
prototypeDescriptors[propertyName]
|
||||
);
|
||||
}
|
||||
const xhrRequestController = new XMLHttpRequestController(
|
||||
originalRequest,
|
||||
logger
|
||||
);
|
||||
xhrRequestController.onRequest = async function({ request, requestId }) {
|
||||
const controller = new RequestController(request);
|
||||
this.logger.info("awaiting mocked response...");
|
||||
this.logger.info(
|
||||
'emitting the "request" event for %s listener(s)...',
|
||||
emitter.listenerCount("request")
|
||||
);
|
||||
const isRequestHandled = await handleRequest({
|
||||
request,
|
||||
requestId,
|
||||
controller,
|
||||
emitter,
|
||||
onResponse: async (response) => {
|
||||
await this.respondWith(response);
|
||||
},
|
||||
onRequestError: () => {
|
||||
this.errorWith(new TypeError("Network error"));
|
||||
},
|
||||
onError: (error) => {
|
||||
this.logger.info("request errored!", { error });
|
||||
if (error instanceof Error) {
|
||||
this.errorWith(error);
|
||||
}
|
||||
}
|
||||
});
|
||||
if (!isRequestHandled) {
|
||||
this.logger.info(
|
||||
"no mocked response received, performing request as-is..."
|
||||
);
|
||||
}
|
||||
};
|
||||
xhrRequestController.onResponse = async function({
|
||||
response,
|
||||
isMockedResponse,
|
||||
request,
|
||||
requestId
|
||||
}) {
|
||||
this.logger.info(
|
||||
'emitting the "response" event for %s listener(s)...',
|
||||
emitter.listenerCount("response")
|
||||
);
|
||||
emitter.emit("response", {
|
||||
response,
|
||||
isMockedResponse,
|
||||
request,
|
||||
requestId
|
||||
});
|
||||
};
|
||||
return xhrRequestController.request;
|
||||
}
|
||||
});
|
||||
return XMLHttpRequestProxy;
|
||||
}
|
||||
|
||||
// src/interceptors/XMLHttpRequest/index.ts
|
||||
var _XMLHttpRequestInterceptor = class extends Interceptor {
|
||||
constructor() {
|
||||
super(_XMLHttpRequestInterceptor.interceptorSymbol);
|
||||
}
|
||||
checkEnvironment() {
|
||||
return hasConfigurableGlobal("XMLHttpRequest");
|
||||
}
|
||||
setup() {
|
||||
const logger = this.logger.extend("setup");
|
||||
logger.info('patching "XMLHttpRequest" module...');
|
||||
const PureXMLHttpRequest = globalThis.XMLHttpRequest;
|
||||
invariant2(
|
||||
!PureXMLHttpRequest[IS_PATCHED_MODULE],
|
||||
'Failed to patch the "XMLHttpRequest" module: already patched.'
|
||||
);
|
||||
globalThis.XMLHttpRequest = createXMLHttpRequestProxy({
|
||||
emitter: this.emitter,
|
||||
logger: this.logger
|
||||
});
|
||||
logger.info(
|
||||
'native "XMLHttpRequest" module patched!',
|
||||
globalThis.XMLHttpRequest.name
|
||||
);
|
||||
Object.defineProperty(globalThis.XMLHttpRequest, IS_PATCHED_MODULE, {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
value: true
|
||||
});
|
||||
this.subscriptions.push(() => {
|
||||
Object.defineProperty(globalThis.XMLHttpRequest, IS_PATCHED_MODULE, {
|
||||
value: void 0
|
||||
});
|
||||
globalThis.XMLHttpRequest = PureXMLHttpRequest;
|
||||
logger.info(
|
||||
'native "XMLHttpRequest" module restored!',
|
||||
globalThis.XMLHttpRequest.name
|
||||
);
|
||||
});
|
||||
}
|
||||
};
|
||||
var XMLHttpRequestInterceptor = _XMLHttpRequestInterceptor;
|
||||
XMLHttpRequestInterceptor.interceptorSymbol = Symbol("xhr");
|
||||
|
||||
export {
|
||||
XMLHttpRequestInterceptor
|
||||
};
|
||||
//# sourceMappingURL=chunk-DODHRDV6.mjs.map
|
||||
1
node_modules/@mswjs/interceptors/lib/browser/chunk-DODHRDV6.mjs.map
generated
vendored
Normal file
1
node_modules/@mswjs/interceptors/lib/browser/chunk-DODHRDV6.mjs.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
228
node_modules/@mswjs/interceptors/lib/browser/chunk-FGSEOIC4.js
generated
vendored
Normal file
228
node_modules/@mswjs/interceptors/lib/browser/chunk-FGSEOIC4.js
generated
vendored
Normal file
|
|
@ -0,0 +1,228 @@
|
|||
"use strict";Object.defineProperty(exports, "__esModule", {value: true});// src/RequestController.ts
|
||||
var _outvariant = require('outvariant');
|
||||
var _deferredpromise = require('@open-draft/deferred-promise');
|
||||
|
||||
// src/InterceptorError.ts
|
||||
var InterceptorError = class extends Error {
|
||||
constructor(message) {
|
||||
super(message);
|
||||
this.name = "InterceptorError";
|
||||
Object.setPrototypeOf(this, InterceptorError.prototype);
|
||||
}
|
||||
};
|
||||
|
||||
// src/RequestController.ts
|
||||
var kRequestHandled = Symbol("kRequestHandled");
|
||||
var kResponsePromise = Symbol("kResponsePromise");
|
||||
var RequestController = class {
|
||||
constructor(request) {
|
||||
this.request = request;
|
||||
this[kRequestHandled] = false;
|
||||
this[kResponsePromise] = new (0, _deferredpromise.DeferredPromise)();
|
||||
}
|
||||
/**
|
||||
* Respond to this request with the given `Response` instance.
|
||||
* @example
|
||||
* controller.respondWith(new Response())
|
||||
* controller.respondWith(Response.json({ id }))
|
||||
* controller.respondWith(Response.error())
|
||||
*/
|
||||
respondWith(response) {
|
||||
_outvariant.invariant.as(
|
||||
InterceptorError,
|
||||
!this[kRequestHandled],
|
||||
'Failed to respond to the "%s %s" request: the "request" event has already been handled.',
|
||||
this.request.method,
|
||||
this.request.url
|
||||
);
|
||||
this[kRequestHandled] = true;
|
||||
this[kResponsePromise].resolve(response);
|
||||
}
|
||||
/**
|
||||
* Error this request with the given error.
|
||||
* @example
|
||||
* controller.errorWith()
|
||||
* controller.errorWith(new Error('Oops!'))
|
||||
*/
|
||||
errorWith(error) {
|
||||
_outvariant.invariant.as(
|
||||
InterceptorError,
|
||||
!this[kRequestHandled],
|
||||
'Failed to error the "%s %s" request: the "request" event has already been handled.',
|
||||
this.request.method,
|
||||
this.request.url
|
||||
);
|
||||
this[kRequestHandled] = true;
|
||||
this[kResponsePromise].resolve(error);
|
||||
}
|
||||
};
|
||||
kResponsePromise, kRequestHandled;
|
||||
|
||||
// src/utils/emitAsync.ts
|
||||
async function emitAsync(emitter, eventName, ...data) {
|
||||
const listners = emitter.listeners(eventName);
|
||||
if (listners.length === 0) {
|
||||
return;
|
||||
}
|
||||
for (const listener of listners) {
|
||||
await listener.apply(emitter, data);
|
||||
}
|
||||
}
|
||||
|
||||
// src/utils/handleRequest.ts
|
||||
|
||||
var _until = require('@open-draft/until');
|
||||
|
||||
// src/utils/isPropertyAccessible.ts
|
||||
function isPropertyAccessible(obj, key) {
|
||||
try {
|
||||
obj[key];
|
||||
return true;
|
||||
} catch (e) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// src/utils/responseUtils.ts
|
||||
function createServerErrorResponse(body) {
|
||||
return new Response(
|
||||
JSON.stringify(
|
||||
body instanceof Error ? {
|
||||
name: body.name,
|
||||
message: body.message,
|
||||
stack: body.stack
|
||||
} : body
|
||||
),
|
||||
{
|
||||
status: 500,
|
||||
statusText: "Unhandled Exception",
|
||||
headers: {
|
||||
"Content-Type": "application/json"
|
||||
}
|
||||
}
|
||||
);
|
||||
}
|
||||
function isResponseError(response) {
|
||||
return isPropertyAccessible(response, "type") && response.type === "error";
|
||||
}
|
||||
|
||||
// src/utils/isNodeLikeError.ts
|
||||
function isNodeLikeError(error) {
|
||||
if (error == null) {
|
||||
return false;
|
||||
}
|
||||
if (!(error instanceof Error)) {
|
||||
return false;
|
||||
}
|
||||
return "code" in error && "errno" in error;
|
||||
}
|
||||
|
||||
// src/utils/handleRequest.ts
|
||||
async function handleRequest(options) {
|
||||
const handleResponse = async (response) => {
|
||||
if (response instanceof Error) {
|
||||
options.onError(response);
|
||||
} else if (isResponseError(response)) {
|
||||
options.onRequestError(response);
|
||||
} else {
|
||||
await options.onResponse(response);
|
||||
}
|
||||
return true;
|
||||
};
|
||||
const handleResponseError = async (error) => {
|
||||
if (error instanceof InterceptorError) {
|
||||
throw result.error;
|
||||
}
|
||||
if (isNodeLikeError(error)) {
|
||||
options.onError(error);
|
||||
return true;
|
||||
}
|
||||
if (error instanceof Response) {
|
||||
return await handleResponse(error);
|
||||
}
|
||||
return false;
|
||||
};
|
||||
options.emitter.once("request", ({ requestId: pendingRequestId }) => {
|
||||
if (pendingRequestId !== options.requestId) {
|
||||
return;
|
||||
}
|
||||
if (options.controller[kResponsePromise].state === "pending") {
|
||||
options.controller[kResponsePromise].resolve(void 0);
|
||||
}
|
||||
});
|
||||
const requestAbortPromise = new (0, _deferredpromise.DeferredPromise)();
|
||||
if (options.request.signal) {
|
||||
if (options.request.signal.aborted) {
|
||||
requestAbortPromise.reject(options.request.signal.reason);
|
||||
} else {
|
||||
options.request.signal.addEventListener(
|
||||
"abort",
|
||||
() => {
|
||||
requestAbortPromise.reject(options.request.signal.reason);
|
||||
},
|
||||
{ once: true }
|
||||
);
|
||||
}
|
||||
}
|
||||
const result = await _until.until.call(void 0, async () => {
|
||||
const requestListtenersPromise = emitAsync(options.emitter, "request", {
|
||||
requestId: options.requestId,
|
||||
request: options.request,
|
||||
controller: options.controller
|
||||
});
|
||||
await Promise.race([
|
||||
// Short-circuit the request handling promise if the request gets aborted.
|
||||
requestAbortPromise,
|
||||
requestListtenersPromise,
|
||||
options.controller[kResponsePromise]
|
||||
]);
|
||||
const mockedResponse = await options.controller[kResponsePromise];
|
||||
return mockedResponse;
|
||||
});
|
||||
if (requestAbortPromise.state === "rejected") {
|
||||
options.onError(requestAbortPromise.rejectionReason);
|
||||
return true;
|
||||
}
|
||||
if (result.error) {
|
||||
if (await handleResponseError(result.error)) {
|
||||
return true;
|
||||
}
|
||||
if (options.emitter.listenerCount("unhandledException") > 0) {
|
||||
const unhandledExceptionController = new RequestController(
|
||||
options.request
|
||||
);
|
||||
await emitAsync(options.emitter, "unhandledException", {
|
||||
error: result.error,
|
||||
request: options.request,
|
||||
requestId: options.requestId,
|
||||
controller: unhandledExceptionController
|
||||
}).then(() => {
|
||||
if (unhandledExceptionController[kResponsePromise].state === "pending") {
|
||||
unhandledExceptionController[kResponsePromise].resolve(void 0);
|
||||
}
|
||||
});
|
||||
const nextResult = await _until.until.call(void 0,
|
||||
() => unhandledExceptionController[kResponsePromise]
|
||||
);
|
||||
if (nextResult.error) {
|
||||
return handleResponseError(nextResult.error);
|
||||
}
|
||||
if (nextResult.data) {
|
||||
return handleResponse(nextResult.data);
|
||||
}
|
||||
}
|
||||
options.onResponse(createServerErrorResponse(result.error));
|
||||
return true;
|
||||
}
|
||||
if (result.data) {
|
||||
return handleResponse(result.data);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
exports.RequestController = RequestController; exports.emitAsync = emitAsync; exports.handleRequest = handleRequest;
|
||||
//# sourceMappingURL=chunk-FGSEOIC4.js.map
|
||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue