Merge branch 'main' into aeisenberg/checkout-path-commitoid
This commit is contained in:
commit
88db5e75ec
131 changed files with 2233 additions and 2097 deletions
1
.github/workflows/__analyze-ref-input.yml
generated
vendored
1
.github/workflows/__analyze-ref-input.yml
generated
vendored
|
|
@ -65,6 +65,7 @@ jobs:
|
|||
- os: windows-2022
|
||||
version: nightly-latest
|
||||
name: "Analyze: 'ref' and 'sha' from inputs"
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
|
|
|
|||
1
.github/workflows/__debug-artifacts.yml
generated
vendored
1
.github/workflows/__debug-artifacts.yml
generated
vendored
|
|
@ -49,6 +49,7 @@ jobs:
|
|||
- os: macos-latest
|
||||
version: nightly-latest
|
||||
name: Debug artifact upload
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
|
|
|
|||
1
.github/workflows/__extractor-ram-threads.yml
generated
vendored
1
.github/workflows/__extractor-ram-threads.yml
generated
vendored
|
|
@ -27,6 +27,7 @@ jobs:
|
|||
- os: ubuntu-latest
|
||||
version: latest
|
||||
name: Extractor ram and threads options test
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
|
|
|
|||
1
.github/workflows/__go-custom-queries.yml
generated
vendored
1
.github/workflows/__go-custom-queries.yml
generated
vendored
|
|
@ -65,6 +65,7 @@ jobs:
|
|||
- os: windows-2022
|
||||
version: nightly-latest
|
||||
name: 'Go: Custom queries'
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
|
|
|
|||
1
.github/workflows/__go-custom-tracing-autobuild.yml
generated
vendored
1
.github/workflows/__go-custom-tracing-autobuild.yml
generated
vendored
|
|
@ -49,6 +49,7 @@ jobs:
|
|||
- os: macos-latest
|
||||
version: nightly-latest
|
||||
name: 'Go: Autobuild custom tracing'
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
|
|
|
|||
1
.github/workflows/__go-custom-tracing.yml
generated
vendored
1
.github/workflows/__go-custom-tracing.yml
generated
vendored
|
|
@ -65,6 +65,7 @@ jobs:
|
|||
- os: windows-2022
|
||||
version: nightly-latest
|
||||
name: 'Go: Custom tracing'
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
|
|
|
|||
1
.github/workflows/__javascript-source-root.yml
generated
vendored
1
.github/workflows/__javascript-source-root.yml
generated
vendored
|
|
@ -31,6 +31,7 @@ jobs:
|
|||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
name: Custom source root
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
|
|
|
|||
1
.github/workflows/__multi-language-autodetect.yml
generated
vendored
1
.github/workflows/__multi-language-autodetect.yml
generated
vendored
|
|
@ -49,6 +49,7 @@ jobs:
|
|||
- os: macos-latest
|
||||
version: nightly-latest
|
||||
name: Multi-language repository
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
|
|
|
|||
1
.github/workflows/__packaging-config-inputs-js.yml
generated
vendored
1
.github/workflows/__packaging-config-inputs-js.yml
generated
vendored
|
|
@ -29,6 +29,7 @@ jobs:
|
|||
- os: macos-latest
|
||||
version: nightly-20210831
|
||||
name: 'Packaging: Config and input'
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
|
|
|
|||
1
.github/workflows/__packaging-config-js.yml
generated
vendored
1
.github/workflows/__packaging-config-js.yml
generated
vendored
|
|
@ -29,6 +29,7 @@ jobs:
|
|||
- os: macos-latest
|
||||
version: nightly-20210831
|
||||
name: 'Packaging: Config file'
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
|
|
|
|||
1
.github/workflows/__packaging-inputs-js.yml
generated
vendored
1
.github/workflows/__packaging-inputs-js.yml
generated
vendored
|
|
@ -29,6 +29,7 @@ jobs:
|
|||
- os: macos-latest
|
||||
version: nightly-20210831
|
||||
name: 'Packaging: Action input'
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
|
|
|
|||
1
.github/workflows/__remote-config.yml
generated
vendored
1
.github/workflows/__remote-config.yml
generated
vendored
|
|
@ -65,6 +65,7 @@ jobs:
|
|||
- os: windows-2022
|
||||
version: nightly-latest
|
||||
name: Remote config file
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
|
|
|
|||
1
.github/workflows/__rubocop-multi-language.yml
generated
vendored
1
.github/workflows/__rubocop-multi-language.yml
generated
vendored
|
|
@ -37,6 +37,7 @@ jobs:
|
|||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
name: RuboCop multi-language
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
|
|
|
|||
1
.github/workflows/__split-workflow.yml
generated
vendored
1
.github/workflows/__split-workflow.yml
generated
vendored
|
|
@ -29,6 +29,7 @@ jobs:
|
|||
- os: macos-latest
|
||||
version: nightly-20210831
|
||||
name: Split workflow
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
|
|
|
|||
1
.github/workflows/__test-local-codeql.yml
generated
vendored
1
.github/workflows/__test-local-codeql.yml
generated
vendored
|
|
@ -27,6 +27,7 @@ jobs:
|
|||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
name: Local CodeQL bundle
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
|
|
|
|||
1
.github/workflows/__test-proxy.yml
generated
vendored
1
.github/workflows/__test-proxy.yml
generated
vendored
|
|
@ -27,6 +27,7 @@ jobs:
|
|||
- os: ubuntu-latest
|
||||
version: latest
|
||||
name: Proxy test
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
|
|
|
|||
1
.github/workflows/__test-ruby.yml
generated
vendored
1
.github/workflows/__test-ruby.yml
generated
vendored
|
|
@ -37,6 +37,7 @@ jobs:
|
|||
- os: macos-latest
|
||||
version: nightly-latest
|
||||
name: Ruby analysis
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
|
|
|
|||
1
.github/workflows/__unset-environment.yml
generated
vendored
1
.github/workflows/__unset-environment.yml
generated
vendored
|
|
@ -37,6 +37,7 @@ jobs:
|
|||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
name: Test unsetting environment variables
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
|
|
|
|||
1
.github/workflows/__upload-ref-sha-input.yml
generated
vendored
1
.github/workflows/__upload-ref-sha-input.yml
generated
vendored
|
|
@ -65,6 +65,7 @@ jobs:
|
|||
- os: windows-2022
|
||||
version: nightly-latest
|
||||
name: "Upload-sarif: 'ref' and 'sha' from inputs"
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
|
|
|
|||
1
.github/workflows/__with-checkout-path.yml
generated
vendored
1
.github/workflows/__with-checkout-path.yml
generated
vendored
|
|
@ -65,6 +65,7 @@ jobs:
|
|||
- os: windows-2022
|
||||
version: nightly-latest
|
||||
name: Use a custom `checkout_path`
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
|
|
|
|||
|
|
@ -20,6 +20,6 @@ jobs:
|
|||
run: |
|
||||
bundle_version="$(cat "./src/defaults.json" | jq -r ".bundleVersion")"
|
||||
set -x
|
||||
for expected_file in "codeql-bundle.tar.gz" "codeql-bundle-linux64.tar.gz" "codeql-bundle-osx64.tar.gz" "codeql-bundle-win64.tar.gz" "codeql-runner-linux" "codeql-runner-macos" "codeql-runner-win.exe"; do
|
||||
for expected_file in "codeql-bundle.tar.gz" "codeql-bundle-linux64.tar.gz" "codeql-bundle-osx64.tar.gz" "codeql-bundle-win64.tar.gz"; do
|
||||
curl --location --fail --head --request GET "https://github.com/github/codeql-action/releases/download/$bundle_version/$expected_file" > /dev/null
|
||||
done
|
||||
|
|
|
|||
16
.github/workflows/pr-checks.yml
vendored
16
.github/workflows/pr-checks.yml
vendored
|
|
@ -13,6 +13,7 @@ jobs:
|
|||
lint-js:
|
||||
name: Lint
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 45
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
|
@ -21,6 +22,7 @@ jobs:
|
|||
|
||||
check-js:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 45
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
|
@ -30,6 +32,7 @@ jobs:
|
|||
check-node-modules:
|
||||
name: Check modules up to date
|
||||
runs-on: macos-latest
|
||||
timeout-minutes: 45
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
|
@ -39,6 +42,7 @@ jobs:
|
|||
verify-pr-checks:
|
||||
name: Verify PR checks up to date
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 45
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
|
@ -60,6 +64,7 @@ jobs:
|
|||
matrix:
|
||||
os: [ubuntu-latest, macos-latest]
|
||||
runs-on: ${{ matrix.os }}
|
||||
timeout-minutes: 45
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
|
@ -69,6 +74,7 @@ jobs:
|
|||
runner-analyze-javascript-ubuntu:
|
||||
name: Runner ubuntu JS analyze
|
||||
needs: [check-js, check-node-modules]
|
||||
timeout-minutes: 45
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
|
|
@ -97,6 +103,7 @@ jobs:
|
|||
runner-analyze-javascript-windows:
|
||||
name: Runner windows JS analyze
|
||||
needs: [check-js, check-node-modules]
|
||||
timeout-minutes: 45
|
||||
runs-on: windows-latest
|
||||
|
||||
steps:
|
||||
|
|
@ -121,6 +128,7 @@ jobs:
|
|||
runner-analyze-javascript-macos:
|
||||
name: Runner macos JS analyze
|
||||
needs: [check-js, check-node-modules]
|
||||
timeout-minutes: 45
|
||||
runs-on: macos-latest
|
||||
|
||||
steps:
|
||||
|
|
@ -145,6 +153,7 @@ jobs:
|
|||
runner-analyze-csharp-ubuntu:
|
||||
name: Runner ubuntu C# analyze
|
||||
needs: [check-js, check-node-modules]
|
||||
timeout-minutes: 45
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
|
|
@ -184,6 +193,7 @@ jobs:
|
|||
needs: [check-js, check-node-modules]
|
||||
# Build tracing currently does not support Windows 2022, so use `windows-2019` instead of
|
||||
# `windows-latest`.
|
||||
timeout-minutes: 45
|
||||
runs-on: windows-2019
|
||||
|
||||
steps:
|
||||
|
|
@ -228,6 +238,7 @@ jobs:
|
|||
|
||||
runner-analyze-csharp-macos:
|
||||
name: Runner macos C# analyze
|
||||
timeout-minutes: 45
|
||||
needs: [check-js, check-node-modules]
|
||||
runs-on: macos-latest
|
||||
|
||||
|
|
@ -266,6 +277,7 @@ jobs:
|
|||
|
||||
runner-analyze-csharp-autobuild-ubuntu:
|
||||
name: Runner ubuntu autobuild C# analyze
|
||||
timeout-minutes: 45
|
||||
needs: [check-js, check-node-modules]
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
|
|
@ -301,6 +313,7 @@ jobs:
|
|||
TEST_MODE: true
|
||||
|
||||
runner-analyze-csharp-autobuild-windows:
|
||||
timeout-minutes: 45
|
||||
name: Runner windows autobuild C# analyze
|
||||
needs: [check-js, check-node-modules]
|
||||
# Build tracing currently does not support Windows 2022, so use `windows-2019` instead of
|
||||
|
|
@ -343,6 +356,7 @@ jobs:
|
|||
name: Runner macos autobuild C# analyze
|
||||
needs: [check-js, check-node-modules]
|
||||
runs-on: macos-latest
|
||||
timeout-minutes: 45
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
|
@ -380,6 +394,7 @@ jobs:
|
|||
name: Runner upload sarif
|
||||
needs: [check-js, check-node-modules]
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 45
|
||||
|
||||
if: ${{ github.event_name != 'pull_request' || github.event.pull_request.base.repo.id == github.event.pull_request.head.repo.id }}
|
||||
|
||||
|
|
@ -402,6 +417,7 @@ jobs:
|
|||
name: Runner ubuntu extractor RAM and threads options
|
||||
needs: [check-js, check-node-modules]
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 45
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
|
|
|||
1
.github/workflows/python-deps.yml
vendored
1
.github/workflows/python-deps.yml
vendored
|
|
@ -10,6 +10,7 @@ on:
|
|||
|
||||
jobs:
|
||||
test-setup-python-scripts:
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
|
|
|
|||
1
.github/workflows/release-runner.yml
vendored
1
.github/workflows/release-runner.yml
vendored
|
|
@ -9,6 +9,7 @@ on:
|
|||
|
||||
jobs:
|
||||
release-runner:
|
||||
timeout-minutes: 45
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
RELEASE_TAG: "${{ github.event.inputs.bundle-tag }}"
|
||||
|
|
|
|||
1
.github/workflows/split.yml
vendored
1
.github/workflows/split.yml
vendored
|
|
@ -26,6 +26,7 @@ on:
|
|||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 45
|
||||
env:
|
||||
CLI_RELEASE: "${{ github.event.inputs.cli-release }}"
|
||||
RELEASE_TAG: "${{ github.event.inputs.bundle-tag }}"
|
||||
|
|
|
|||
1
.github/workflows/update-dependencies.yml
vendored
1
.github/workflows/update-dependencies.yml
vendored
|
|
@ -6,6 +6,7 @@ on:
|
|||
jobs:
|
||||
update:
|
||||
name: Update dependencies
|
||||
timeout-minutes: 45
|
||||
runs-on: macos-latest
|
||||
if: contains(github.event.pull_request.labels.*.name, 'Update dependencies') && (github.event.pull_request.head.repo.full_name == 'github/codeql-action')
|
||||
steps:
|
||||
|
|
|
|||
1
.github/workflows/update-release-branch.yml
vendored
1
.github/workflows/update-release-branch.yml
vendored
|
|
@ -9,6 +9,7 @@ on:
|
|||
|
||||
jobs:
|
||||
update:
|
||||
timeout-minutes: 45
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ github.repository == 'github/codeql-action' }}
|
||||
steps:
|
||||
|
|
|
|||
|
|
@ -6,6 +6,8 @@ on:
|
|||
|
||||
jobs:
|
||||
update-supported-enterprise-server-versions:
|
||||
name: Update Supported Enterprise Server Versions
|
||||
timeout-minutes: 45
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ github.repository == 'github/codeql-action' }}
|
||||
|
||||
|
|
|
|||
18
CHANGELOG.md
18
CHANGELOG.md
|
|
@ -1,9 +1,21 @@
|
|||
# CodeQL Action and CodeQL Runner Changelog
|
||||
# CodeQL Action Changelog
|
||||
|
||||
## [UNRELEASED]
|
||||
|
||||
- Update default CodeQL bundle version to 2.8.2.
|
||||
- Fix a bug where old results can be uploaded if the languages in a repository change when using a non-ephemeral self-hosted runner.
|
||||
No user facing changes.
|
||||
|
||||
## 1.1.5 - 15 Mar 2022
|
||||
|
||||
- Update default CodeQL bundle version to 2.8.3.
|
||||
- The CodeQL runner is now deprecated and no longer being released. For more information, see [CodeQL runner deprecation](https://github.blog/changelog/2021-09-21-codeql-runner-deprecation/).
|
||||
- Fix two bugs that cause action failures with GHES 3.3 or earlier. [#978](https://github.com/github/codeql-action/pull/978)
|
||||
- Fix `not a permitted key` invalid requests with GHES 3.1 or earlier
|
||||
- Fix `RUNNER_ARCH environment variable must be set` errors with GHES 3.3 or earlier
|
||||
|
||||
## 1.1.4 - 07 Mar 2022
|
||||
|
||||
- Update default CodeQL bundle version to 2.8.2. [#950](https://github.com/github/codeql-action/pull/950)
|
||||
- Fix a bug where old results can be uploaded if the languages in a repository change when using a non-ephemeral self-hosted runner. [#955](https://github.com/github/codeql-action/pull/955)
|
||||
|
||||
## 1.1.3 - 23 Feb 2022
|
||||
|
||||
|
|
|
|||
|
|
@ -63,7 +63,7 @@ Here are a few things you can do that will increase the likelihood of your pull
|
|||
1. The first step of releasing a new version of the `codeql-action` is running the "Update release branch" workflow.
|
||||
This workflow goes through the pull requests that have been merged to `main` since the last release, creates a changelog, then opens a pull request to merge the changes since the last release into the `v1` release branch.
|
||||
|
||||
A release is automatically started every Monday via a scheduled run of this workflow, however you can start a release manually by triggering a run via [workflow dispatch](https://github.com/github/codeql-action/actions/workflows/update-release-branch.yml).
|
||||
A release is automatically started every Monday via a scheduled run of this workflow, however you can start a release manually by triggering a run via [workflow dispatch](https://github.com/github/codeql-action/actions/workflows/update-release-branch.yml).
|
||||
1. The workflow run will open a pull request titled "Merge main into v1". Mark the pull request as [ready for review](https://docs.github.com/en/github/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/changing-the-stage-of-a-pull-request#marking-a-pull-request-as-ready-for-review) to trigger the PR checks.
|
||||
1. Review the checklist items in the pull request description.
|
||||
Once you've checked off all but the last of these, approve the PR and automerge it.
|
||||
|
|
@ -72,6 +72,25 @@ Here are a few things you can do that will increase the likelihood of your pull
|
|||
|
||||
Approve the mergeback PR and automerge it. Once the mergeback has been merged into main, the release is complete.
|
||||
|
||||
## Keeping the PR checks up to date (admin access required)
|
||||
|
||||
Since the `codeql-action` runs most of its testing through individual Actions workflows, there are over two hundred jobs that need to pass in order for a PR to turn green. Managing these PR checks manually is time consuming and complex. Here is a semi-automated approach.
|
||||
|
||||
To regenerate the PR jobs for the action:
|
||||
|
||||
1. From a terminal, run the following commands (replace `SHA` with the sha of the commit whose checks you want to use, typically this should be the latest from `main`):
|
||||
|
||||
```sh
|
||||
SHA= ####
|
||||
CHECKS="$(gh api repos/github/codeql-action/commits/${SHA}/check-runs --paginate | jq --slurp --compact-output --raw-output '[.[].check_runs | .[].name | select(contains("https://") or . == "CodeQL" or . == "LGTM.com" or . == "Update dependencies" or . == "Update Supported Enterprise Server Versions" | not)]')"
|
||||
echo "{\"contexts\": ${CHECKS}}" > checks.json
|
||||
gh api -X "PATCH" repos/github/codeql-action/branches/main/protection/required_status_checks --input checks.json
|
||||
gh api -X "PATCH" repos/github/codeql-action/branches/v1/protection/required_status_checks --input checks.json
|
||||
````
|
||||
|
||||
2. Go to the [branch protection rules settings page](https://github.com/github/codeql-action/settings/branches) and validate that the rules have been updated.
|
||||
|
||||
|
||||
## Resources
|
||||
|
||||
- [How to Contribute to Open Source](https://opensource.guide/how-to-contribute/)
|
||||
|
|
|
|||
|
|
@ -39,8 +39,7 @@ on:
|
|||
|
||||
jobs:
|
||||
CodeQL-Build:
|
||||
# If you're only analyzing JavaScript or Python, CodeQL runs on ubuntu-latest, windows-latest, and macos-latest.
|
||||
# If you're analyzing C/C++, C#, Go, or Java, CodeQL runs on ubuntu-latest, windows-2019, and macos-latest.
|
||||
# CodeQL runs on ubuntu-latest, windows-latest, and macos-latest
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
permissions:
|
||||
|
|
@ -146,4 +145,4 @@ The very first time code scanning is run and if it is on a pull request, you wil
|
|||
|
||||
After code scanning has analyzed the code in a pull request, it needs to compare the analysis of the topic branch (the merge commit of the branch you used to create the pull request) with the analysis of the base branch (the branch into which you want to merge the pull request). This allows code scanning to compute which alerts are newly introduced by the pull request, which alerts were already present in the base branch, and whether any existing alerts are fixed by the changes in the pull request. Initially, if you use a pull request to add code scanning to a repository, the base branch has not yet been analyzed, so it's not possible to compute these details. In this case, when you click through from the results check on the pull request you will see the "Missing analysis for base commit SHA-HASH" message.
|
||||
|
||||
For more information and other causes of this message, see [Reasons for the "missing analysis" message](https://docs.github.com/en/code-security/secure-coding/automatically-scanning-your-code-for-vulnerabilities-and-errors/setting-up-code-scanning-for-a-repository#reasons-for-the-missing-analysis-message)
|
||||
For more information and other causes of this message, see [Reasons for the "Analysis not found" message](https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/setting-up-code-scanning-for-a-repository#reasons-for-the-analysis-not-found-message)
|
||||
|
|
|
|||
27
lib/actions-util.js
generated
27
lib/actions-util.js
generated
|
|
@ -21,6 +21,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.sanitizeArifactName = exports.isAnalyzingDefaultBranch = exports.getRelativeScriptPath = exports.isRunningLocalAction = exports.sendStatusReport = exports.createStatusReportBase = exports.getActionsStatus = exports.getRef = exports.computeAutomationID = exports.getAutomationID = exports.getAnalysisKey = exports.getWorkflowRunID = exports.getWorkflow = exports.formatWorkflowCause = exports.formatWorkflowErrors = exports.validateWorkflow = exports.getWorkflowErrors = exports.WorkflowErrors = exports.patternIsSuperset = exports.determineMergeBaseCommitOid = exports.getCommitOid = exports.getToolCacheDirectory = exports.getTemporaryDirectory = exports.getOptionalInput = exports.getRequiredInput = void 0;
|
||||
const fs = __importStar(require("fs"));
|
||||
const os = __importStar(require("os"));
|
||||
const path = __importStar(require("path"));
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
|
||||
|
|
@ -29,6 +30,8 @@ const yaml = __importStar(require("js-yaml"));
|
|||
const api = __importStar(require("./api-client"));
|
||||
const sharedEnv = __importStar(require("./shared-environment"));
|
||||
const util_1 = require("./util");
|
||||
// eslint-disable-next-line import/no-commonjs
|
||||
const pkg = require("../package.json");
|
||||
/**
|
||||
* The utils in this module are meant to be run inside of the action only.
|
||||
* Code paths from the runner should not enter this module.
|
||||
|
|
@ -503,6 +506,8 @@ async function createStatusReportBase(actionName, status, actionStartedAt, cause
|
|||
workflowStartedAt = actionStartedAt.toISOString();
|
||||
core.exportVariable(sharedEnv.CODEQL_WORKFLOW_STARTED_AT, workflowStartedAt);
|
||||
}
|
||||
const runnerOs = (0, util_1.getRequiredEnvParam)("RUNNER_OS");
|
||||
const codeQlCliVersion = (0, util_1.getCachedCodeQlVersion)();
|
||||
// If running locally then the GITHUB_ACTION_REF cannot be trusted as it may be for the previous action
|
||||
// See https://github.com/actions/runner/issues/803
|
||||
const actionRef = isRunningLocalAction()
|
||||
|
|
@ -521,6 +526,8 @@ async function createStatusReportBase(actionName, status, actionStartedAt, cause
|
|||
started_at: workflowStartedAt,
|
||||
action_started_at: actionStartedAt.toISOString(),
|
||||
status,
|
||||
runner_os: runnerOs,
|
||||
action_version: pkg.version,
|
||||
};
|
||||
// Add optional parameters
|
||||
if (cause) {
|
||||
|
|
@ -539,6 +546,17 @@ async function createStatusReportBase(actionName, status, actionStartedAt, cause
|
|||
if (matrix) {
|
||||
statusReport.matrix_vars = matrix;
|
||||
}
|
||||
if ("RUNNER_ARCH" in process.env) {
|
||||
// RUNNER_ARCH is available only in GHES 3.4 and later
|
||||
// Values other than X86, X64, ARM, or ARM64 are discarded server side
|
||||
statusReport.runner_arch = process.env["RUNNER_ARCH"];
|
||||
}
|
||||
if (runnerOs === "Windows" || runnerOs === "macOS") {
|
||||
statusReport.runner_os_release = os.release();
|
||||
}
|
||||
if (codeQlCliVersion !== undefined) {
|
||||
statusReport.codeql_version = codeQlCliVersion;
|
||||
}
|
||||
return statusReport;
|
||||
}
|
||||
exports.createStatusReportBase = createStatusReportBase;
|
||||
|
|
@ -556,6 +574,13 @@ const INCOMPATIBLE_MSG = "CodeQL Action version is incompatible with the code sc
|
|||
* Returns whether sending the status report was successful of not.
|
||||
*/
|
||||
async function sendStatusReport(statusReport) {
|
||||
const gitHubVersion = await api.getGitHubVersionActionsOnly();
|
||||
if ((0, util_1.isGitHubGhesVersionBelow)(gitHubVersion, "3.2.0")) {
|
||||
// GHES 3.1 and earlier versions reject unexpected properties, which means
|
||||
// that they will reject status reports with newly added properties.
|
||||
// Inhibiting status reporting for GHES < 3.2 avoids such failures.
|
||||
return true;
|
||||
}
|
||||
const statusReportJSON = JSON.stringify(statusReport);
|
||||
core.debug(`Sending status report: ${statusReportJSON}`);
|
||||
// If in test mode we don't want to upload the results
|
||||
|
|
@ -653,7 +678,7 @@ async function isAnalyzingDefaultBranch() {
|
|||
// Get the current ref and trim and refs/heads/ prefix
|
||||
let currentRef = await getRef();
|
||||
currentRef = currentRef.startsWith("refs/heads/")
|
||||
? currentRef.substr("refs/heads/".length)
|
||||
? currentRef.slice("refs/heads/".length)
|
||||
: currentRef;
|
||||
const event = getWorkflowEvent();
|
||||
const defaultBranch = (_a = event === null || event === void 0 ? void 0 : event.repository) === null || _a === void 0 ? void 0 : _a.default_branch;
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
5
lib/analyze.js
generated
5
lib/analyze.js
generated
|
|
@ -131,11 +131,12 @@ async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag,
|
|||
if (!hasBuiltinQueries && !hasCustomQueries && !hasPackWithCustomQueries) {
|
||||
throw new Error(`Unable to analyse ${language} as no queries were selected for this language`);
|
||||
}
|
||||
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
||||
try {
|
||||
if (hasPackWithCustomQueries) {
|
||||
if (hasPackWithCustomQueries &&
|
||||
!(await util.codeQlVersionAbove(codeql, codeql_1.CODEQL_VERSION_CONFIG_FILES))) {
|
||||
logger.info("Performing analysis with custom CodeQL Packs.");
|
||||
logger.startGroup(`Downloading custom packs for ${language}`);
|
||||
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
||||
const results = await codeql.packDownload(packsWithVersion);
|
||||
logger.info(`Downloaded packs: ${results.packs
|
||||
.map((r) => `${r.name}@${r.version || "latest"}`)
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
34
lib/api-client.js
generated
34
lib/api-client.js
generated
|
|
@ -22,12 +22,13 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.getActionsApiClient = exports.getApiClient = exports.DisallowedAPIVersionReason = void 0;
|
||||
exports.getGitHubVersionActionsOnly = exports.getActionsApiClient = exports.getApiClient = exports.DisallowedAPIVersionReason = void 0;
|
||||
const path = __importStar(require("path"));
|
||||
const githubUtils = __importStar(require("@actions/github/lib/utils"));
|
||||
const retry = __importStar(require("@octokit/plugin-retry"));
|
||||
const console_log_level_1 = __importDefault(require("console-log-level"));
|
||||
const actions_util_1 = require("./actions-util");
|
||||
const util = __importStar(require("./util"));
|
||||
const util_1 = require("./util");
|
||||
// eslint-disable-next-line import/no-commonjs
|
||||
const pkg = require("../package.json");
|
||||
|
|
@ -57,15 +58,36 @@ function getApiUrl(githubUrl) {
|
|||
url.pathname = path.join(url.pathname, "api", "v3");
|
||||
return url.toString();
|
||||
}
|
||||
function getApiDetails() {
|
||||
return {
|
||||
auth: (0, actions_util_1.getRequiredInput)("token"),
|
||||
url: (0, util_1.getRequiredEnvParam)("GITHUB_SERVER_URL"),
|
||||
};
|
||||
}
|
||||
// Temporary function to aid in the transition to running on and off of github actions.
|
||||
// Once all code has been converted this function should be removed or made canonical
|
||||
// and called only from the action entrypoints.
|
||||
function getActionsApiClient() {
|
||||
const apiDetails = {
|
||||
auth: (0, actions_util_1.getRequiredInput)("token"),
|
||||
url: (0, util_1.getRequiredEnvParam)("GITHUB_SERVER_URL"),
|
||||
};
|
||||
return (0, exports.getApiClient)(apiDetails);
|
||||
return (0, exports.getApiClient)(getApiDetails());
|
||||
}
|
||||
exports.getActionsApiClient = getActionsApiClient;
|
||||
let cachedGitHubVersion = undefined;
|
||||
/**
|
||||
* Report the GitHub server version. This is a wrapper around
|
||||
* util.getGitHubVersion() that automatically supplies GitHub API details using
|
||||
* GitHub Action inputs. If you need to get the GitHub server version from the
|
||||
* Runner, please call util.getGitHubVersion() instead.
|
||||
*
|
||||
* @returns GitHub version
|
||||
*/
|
||||
async function getGitHubVersionActionsOnly() {
|
||||
if (!util.isActions()) {
|
||||
throw new Error("getGitHubVersionActionsOnly() works only in an action");
|
||||
}
|
||||
if (cachedGitHubVersion === undefined) {
|
||||
cachedGitHubVersion = await util.getGitHubVersion(getApiDetails());
|
||||
}
|
||||
return cachedGitHubVersion;
|
||||
}
|
||||
exports.getGitHubVersionActionsOnly = getGitHubVersionActionsOnly;
|
||||
//# sourceMappingURL=api-client.js.map
|
||||
|
|
@ -1 +1 @@
|
|||
{"version":3,"file":"api-client.js","sourceRoot":"","sources":["../src/api-client.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAAA,2CAA6B;AAE7B,uEAAyD;AACzD,6DAA+C;AAC/C,0EAAgD;AAEhD,iDAAkD;AAClD,iCAAsD;AAEtD,8CAA8C;AAC9C,MAAM,GAAG,GAAG,OAAO,CAAC,iBAAiB,CAAC,CAAC;AAEvC,IAAY,0BAGX;AAHD,WAAY,0BAA0B;IACpC,+FAAc,CAAA;IACd,+FAAc,CAAA;AAChB,CAAC,EAHW,0BAA0B,GAA1B,kCAA0B,KAA1B,kCAA0B,QAGrC;AAeM,MAAM,YAAY,GAAG,UAC1B,UAAoC,EACpC,EAAE,aAAa,GAAG,KAAK,EAAE,GAAG,EAAE;IAE9B,MAAM,IAAI,GACR,CAAC,aAAa,IAAI,UAAU,CAAC,gBAAgB,CAAC,IAAI,UAAU,CAAC,IAAI,CAAC;IACpE,MAAM,eAAe,GAAG,WAAW,CAAC,MAAM,CAAC,MAAM,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;IAC/D,OAAO,IAAI,eAAe,CACxB,WAAW,CAAC,iBAAiB,CAAC,IAAI,EAAE;QAClC,OAAO,EAAE,SAAS,CAAC,UAAU,CAAC,GAAG,CAAC;QAClC,SAAS,EAAE,UAAU,IAAA,cAAO,GAAE,IAAI,GAAG,CAAC,OAAO,EAAE;QAC/C,GAAG,EAAE,IAAA,2BAAe,EAAC,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC;KACzC,CAAC,CACH,CAAC;AACJ,CAAC,CAAC;AAdW,QAAA,YAAY,gBAcvB;AAEF,SAAS,SAAS,CAAC,SAAiB;IAClC,MAAM,GAAG,GAAG,IAAI,GAAG,CAAC,SAAS,CAAC,CAAC;IAE/B,uDAAuD;IACvD,0CAA0C;IAC1C,IAAI,GAAG,CAAC,QAAQ,KAAK,YAAY,IAAI,GAAG,CAAC,QAAQ,KAAK,gBAAgB,EAAE;QACtE,OAAO,wBAAwB,CAAC;KACjC;IAED,6BAA6B;IAC7B,GAAG,CAAC,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,EAAE,KAAK,EAAE,IAAI,CAAC,CAAC;IACpD,OAAO,GAAG,CAAC,QAAQ,EAAE,CAAC;AACxB,CAAC;AAED,uFAAuF;AACvF,qFAAqF;AACrF,+CAA+C;AAC/C,SAAgB,mBAAmB;IACjC,MAAM,UAAU,GAAG;QACjB,IAAI,EAAE,IAAA,+BAAgB,EAAC,OAAO,CAAC;QAC/B,GAAG,EAAE,IAAA,0BAAmB,EAAC,mBAAmB,CAAC;KAC9C,CAAC;IAEF,OAAO,IAAA,oBAAY,EAAC,UAAU,CAAC,CAAC;AAClC,CAAC;AAPD,kDAOC"}
|
||||
{"version":3,"file":"api-client.js","sourceRoot":"","sources":["../src/api-client.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAAA,2CAA6B;AAE7B,uEAAyD;AACzD,6DAA+C;AAC/C,0EAAgD;AAEhD,iDAAkD;AAClD,6CAA+B;AAC/B,iCAAqE;AAErE,8CAA8C;AAC9C,MAAM,GAAG,GAAG,OAAO,CAAC,iBAAiB,CAAC,CAAC;AAEvC,IAAY,0BAGX;AAHD,WAAY,0BAA0B;IACpC,+FAAc,CAAA;IACd,+FAAc,CAAA;AAChB,CAAC,EAHW,0BAA0B,GAA1B,kCAA0B,KAA1B,kCAA0B,QAGrC;AAeM,MAAM,YAAY,GAAG,UAC1B,UAAoC,EACpC,EAAE,aAAa,GAAG,KAAK,EAAE,GAAG,EAAE;IAE9B,MAAM,IAAI,GACR,CAAC,aAAa,IAAI,UAAU,CAAC,gBAAgB,CAAC,IAAI,UAAU,CAAC,IAAI,CAAC;IACpE,MAAM,eAAe,GAAG,WAAW,CAAC,MAAM,CAAC,MAAM,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;IAC/D,OAAO,IAAI,eAAe,CACxB,WAAW,CAAC,iBAAiB,CAAC,IAAI,EAAE;QAClC,OAAO,EAAE,SAAS,CAAC,UAAU,CAAC,GAAG,CAAC;QAClC,SAAS,EAAE,UAAU,IAAA,cAAO,GAAE,IAAI,GAAG,CAAC,OAAO,EAAE;QAC/C,GAAG,EAAE,IAAA,2BAAe,EAAC,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC;KACzC,CAAC,CACH,CAAC;AACJ,CAAC,CAAC;AAdW,QAAA,YAAY,gBAcvB;AAEF,SAAS,SAAS,CAAC,SAAiB;IAClC,MAAM,GAAG,GAAG,IAAI,GAAG,CAAC,SAAS,CAAC,CAAC;IAE/B,uDAAuD;IACvD,0CAA0C;IAC1C,IAAI,GAAG,CAAC,QAAQ,KAAK,YAAY,IAAI,GAAG,CAAC,QAAQ,KAAK,gBAAgB,EAAE;QACtE,OAAO,wBAAwB,CAAC;KACjC;IAED,6BAA6B;IAC7B,GAAG,CAAC,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,EAAE,KAAK,EAAE,IAAI,CAAC,CAAC;IACpD,OAAO,GAAG,CAAC,QAAQ,EAAE,CAAC;AACxB,CAAC;AAED,SAAS,aAAa;IACpB,OAAO;QACL,IAAI,EAAE,IAAA,+BAAgB,EAAC,OAAO,CAAC;QAC/B,GAAG,EAAE,IAAA,0BAAmB,EAAC,mBAAmB,CAAC;KAC9C,CAAC;AACJ,CAAC;AAED,uFAAuF;AACvF,qFAAqF;AACrF,+CAA+C;AAC/C,SAAgB,mBAAmB;IACjC,OAAO,IAAA,oBAAY,EAAC,aAAa,EAAE,CAAC,CAAC;AACvC,CAAC;AAFD,kDAEC;AAED,IAAI,mBAAmB,GAA8B,SAAS,CAAC;AAE/D;;;;;;;GAOG;AACI,KAAK,UAAU,2BAA2B;IAC/C,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,EAAE;QACrB,MAAM,IAAI,KAAK,CAAC,uDAAuD,CAAC,CAAC;KAC1E;IACD,IAAI,mBAAmB,KAAK,SAAS,EAAE;QACrC,mBAAmB,GAAG,MAAM,IAAI,CAAC,gBAAgB,CAAC,aAAa,EAAE,CAAC,CAAC;KACpE;IACD,OAAO,mBAAmB,CAAC;AAC7B,CAAC;AARD,kEAQC"}
|
||||
|
|
@ -1 +1 @@
|
|||
{ "maximumVersion": "3.4", "minimumVersion": "3.1" }
|
||||
{ "maximumVersion": "3.5", "minimumVersion": "3.1" }
|
||||
|
|
|
|||
68
lib/codeql.js
generated
68
lib/codeql.js
generated
|
|
@ -22,11 +22,12 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.getExtraOptions = exports.getCodeQLForTesting = exports.getCachedCodeQL = exports.setCodeQL = exports.getCodeQL = exports.convertToSemVer = exports.getCodeQLURLVersion = exports.setupCodeQL = exports.getCodeQLActionRepository = exports.CODEQL_VERSION_NEW_TRACING = exports.CODEQL_VERSION_ML_POWERED_QUERIES = exports.CODEQL_VERSION_COUNTS_LINES = exports.CommandInvocationError = void 0;
|
||||
exports.getExtraOptions = exports.getCodeQLForTesting = exports.getCachedCodeQL = exports.setCodeQL = exports.getCodeQL = exports.convertToSemVer = exports.getCodeQLURLVersion = exports.setupCodeQL = exports.getCodeQLActionRepository = exports.CODEQL_VERSION_NEW_TRACING = exports.CODEQL_VERSION_ML_POWERED_QUERIES = exports.CODEQL_VERSION_CONFIG_FILES = exports.CODEQL_VERSION_COUNTS_LINES = exports.CommandInvocationError = void 0;
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
|
||||
const fast_deep_equal_1 = __importDefault(require("fast-deep-equal"));
|
||||
const yaml = __importStar(require("js-yaml"));
|
||||
const query_string_1 = __importDefault(require("query-string"));
|
||||
const semver = __importStar(require("semver"));
|
||||
const actions_util_1 = require("./actions-util");
|
||||
|
|
@ -75,6 +76,7 @@ const CODEQL_VERSION_GROUP_RULES = "2.5.5";
|
|||
const CODEQL_VERSION_SARIF_GROUP = "2.5.3";
|
||||
exports.CODEQL_VERSION_COUNTS_LINES = "2.6.2";
|
||||
const CODEQL_VERSION_CUSTOM_QUERY_HELP = "2.7.1";
|
||||
exports.CODEQL_VERSION_CONFIG_FILES = "2.8.2"; // Versions before 2.8.2 weren't tolerant to unknown properties
|
||||
exports.CODEQL_VERSION_ML_POWERED_QUERIES = "2.7.5";
|
||||
/**
|
||||
* This variable controls using the new style of tracing from the CodeQL
|
||||
|
|
@ -194,6 +196,19 @@ async function getCodeQLBundleDownloadURL(apiDetails, variant, logger) {
|
|||
}
|
||||
return `https://github.com/${CODEQL_DEFAULT_ACTION_REPOSITORY}/releases/download/${CODEQL_BUNDLE_VERSION}/${codeQLBundleName}`;
|
||||
}
|
||||
/**
|
||||
* Set up CodeQL CLI access.
|
||||
*
|
||||
* @param codeqlURL
|
||||
* @param apiDetails
|
||||
* @param tempDir
|
||||
* @param toolCacheDir
|
||||
* @param variant
|
||||
* @param logger
|
||||
* @param checkVersion Whether to check that CodeQL CLI meets the minimum
|
||||
* version requirement. Must be set to true outside tests.
|
||||
* @returns
|
||||
*/
|
||||
async function setupCodeQL(codeqlURL, apiDetails, tempDir, toolCacheDir, variant, logger, checkVersion) {
|
||||
try {
|
||||
// We use the special value of 'latest' to prioritize the version in the
|
||||
|
|
@ -365,16 +380,26 @@ async function getCodeQLForTesting() {
|
|||
return getCodeQLForCmd("codeql-for-testing", false);
|
||||
}
|
||||
exports.getCodeQLForTesting = getCodeQLForTesting;
|
||||
/**
|
||||
* Return a CodeQL object for CodeQL CLI access.
|
||||
*
|
||||
* @param cmd Path to CodeQL CLI
|
||||
* @param checkVersion Whether to check that CodeQL CLI meets the minimum
|
||||
* version requirement. Must be set to true outside tests.
|
||||
* @returns A new CodeQL object
|
||||
*/
|
||||
async function getCodeQLForCmd(cmd, checkVersion) {
|
||||
let cachedVersion = undefined;
|
||||
const codeql = {
|
||||
getPath() {
|
||||
return cmd;
|
||||
},
|
||||
async getVersion() {
|
||||
if (cachedVersion === undefined)
|
||||
cachedVersion = runTool(cmd, ["version", "--format=terse"]);
|
||||
return await cachedVersion;
|
||||
let result = util.getCachedCodeQlVersion();
|
||||
if (result === undefined) {
|
||||
result = await runTool(cmd, ["version", "--format=terse"]);
|
||||
util.cacheCodeQlVersion(result);
|
||||
}
|
||||
return result;
|
||||
},
|
||||
async printVersion() {
|
||||
await runTool(cmd, ["version", "--format=json"]);
|
||||
|
|
@ -429,22 +454,30 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
|||
...getExtraOptionsFromEnv(["database", "init"]),
|
||||
]);
|
||||
},
|
||||
async databaseInitCluster(databasePath, languages, sourceRoot, processName, processLevel) {
|
||||
const extraArgs = languages.map((language) => `--language=${language}`);
|
||||
if (languages.filter(languages_1.isTracedLanguage).length > 0) {
|
||||
async databaseInitCluster(config, sourceRoot, processName, processLevel) {
|
||||
const extraArgs = config.languages.map((language) => `--language=${language}`);
|
||||
if (config.languages.filter(languages_1.isTracedLanguage).length > 0) {
|
||||
extraArgs.push("--begin-tracing");
|
||||
if (processName !== undefined) {
|
||||
extraArgs.push(`--trace-process-name=${processName}`);
|
||||
}
|
||||
else {
|
||||
// We default to 3 if no other arguments are provided since this was the default
|
||||
// behaviour of the Runner. Note this path never happens in the CodeQL Action
|
||||
// because that always passes in a process name.
|
||||
extraArgs.push(`--trace-process-level=${processLevel || 3}`);
|
||||
}
|
||||
}
|
||||
if (await util.codeQlVersionAbove(codeql, exports.CODEQL_VERSION_CONFIG_FILES)) {
|
||||
const configLocation = path.resolve(config.tempDir, "user-config.yaml");
|
||||
fs.writeFileSync(configLocation, yaml.dump(config.originalUserInput));
|
||||
extraArgs.push(`--codescanning-config=${configLocation}`);
|
||||
}
|
||||
await runTool(cmd, [
|
||||
"database",
|
||||
"init",
|
||||
"--db-cluster",
|
||||
databasePath,
|
||||
config.dbLocation,
|
||||
`--source-root=${sourceRoot}`,
|
||||
...extraArgs,
|
||||
...getExtraOptionsFromEnv(["database", "init"]),
|
||||
|
|
@ -560,7 +593,9 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
|||
if (extraSearchPath !== undefined) {
|
||||
codeqlArgs.push("--additional-packs", extraSearchPath);
|
||||
}
|
||||
codeqlArgs.push(querySuitePath);
|
||||
if (!(await util.codeQlVersionAbove(this, exports.CODEQL_VERSION_CONFIG_FILES))) {
|
||||
codeqlArgs.push(querySuitePath);
|
||||
}
|
||||
await runTool(cmd, codeqlArgs);
|
||||
},
|
||||
async databaseInterpretResults(databasePath, querySuitePaths, sarifFile, addSnippetsFlag, threadsFlag, automationDetailsId) {
|
||||
|
|
@ -586,7 +621,10 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
|||
(await util.codeQlVersionAbove(this, CODEQL_VERSION_SARIF_GROUP))) {
|
||||
codeqlArgs.push("--sarif-category", automationDetailsId);
|
||||
}
|
||||
codeqlArgs.push(databasePath, ...querySuitePaths);
|
||||
codeqlArgs.push(databasePath);
|
||||
if (!(await util.codeQlVersionAbove(this, exports.CODEQL_VERSION_CONFIG_FILES))) {
|
||||
codeqlArgs.push(...querySuitePaths);
|
||||
}
|
||||
// capture stdout, which contains analysis summaries
|
||||
return await runTool(cmd, codeqlArgs);
|
||||
},
|
||||
|
|
@ -657,6 +695,14 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
|||
await new toolrunner.ToolRunner(cmd, args).exec();
|
||||
},
|
||||
};
|
||||
// To ensure that status reports include the CodeQL CLI version whereever
|
||||
// possbile, we want to call getVersion(), which populates the version value
|
||||
// used by status reporting, at the earliest opportunity. But invoking
|
||||
// getVersion() directly here breaks tests that only pretend to create a
|
||||
// CodeQL object. So instead we rely on the assumption that all non-test
|
||||
// callers would set checkVersion to true, and util.codeQlVersionAbove()
|
||||
// would call getVersion(), so the CLI version would be cached as soon as the
|
||||
// CodeQL object is created.
|
||||
if (checkVersion &&
|
||||
!(await util.codeQlVersionAbove(codeql, CODEQL_MINIMUM_VERSION))) {
|
||||
throw new Error(`Expected a CodeQL CLI with version at least ${CODEQL_MINIMUM_VERSION} but got version ${await codeql.getVersion()}`);
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
2
lib/config-utils.js
generated
2
lib/config-utils.js
generated
|
|
@ -435,7 +435,7 @@ async function addQueriesAndPacksFromWorkflow(codeQL, queriesInput, languages, r
|
|||
// should instead be added in addition
|
||||
function shouldAddConfigFileQueries(queriesInput) {
|
||||
if (queriesInput) {
|
||||
return queriesInput.trimStart().substr(0, 1) === "+";
|
||||
return queriesInput.trimStart().slice(0, 1) === "+";
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
4
lib/config-utils.test.js
generated
4
lib/config-utils.test.js
generated
|
|
@ -914,7 +914,7 @@ const mlPoweredQueriesMacro = ava_1.default.macro({
|
|||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.4", true, undefined, "security-extended", undefined);
|
||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.5", false, undefined, "security-extended", undefined);
|
||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.5", true, undefined, undefined, undefined);
|
||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.5", true, undefined, "security-extended", "~0.0.2");
|
||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.5", true, undefined, "security-and-quality", "~0.0.2");
|
||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.5", true, undefined, "security-extended", "~0.1.0");
|
||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.5", true, undefined, "security-and-quality", "~0.1.0");
|
||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.5", true, "codeql/javascript-experimental-atm-queries@0.0.1", "security-and-quality", "0.0.1");
|
||||
//# sourceMappingURL=config-utils.test.js.map
|
||||
|
|
@ -1,3 +1,3 @@
|
|||
{
|
||||
"bundleVersion": "codeql-bundle-20220224"
|
||||
"bundleVersion": "codeql-bundle-20220311"
|
||||
}
|
||||
|
|
|
|||
5
lib/init-action.js
generated
5
lib/init-action.js
generated
|
|
@ -22,6 +22,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|||
const path = __importStar(require("path"));
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const actions_util_1 = require("./actions-util");
|
||||
const api_client_1 = require("./api-client");
|
||||
const codeql_1 = require("./codeql");
|
||||
const feature_flags_1 = require("./feature-flags");
|
||||
const init_1 = require("./init");
|
||||
|
|
@ -48,7 +49,7 @@ async function sendSuccessStatusReport(startedAt, config, toolsVersion) {
|
|||
}
|
||||
if (queriesInput !== undefined) {
|
||||
queriesInput = queriesInput.startsWith("+")
|
||||
? queriesInput.substr(1)
|
||||
? queriesInput.slice(1)
|
||||
: queriesInput;
|
||||
queries.push(...queriesInput.split(","));
|
||||
}
|
||||
|
|
@ -78,7 +79,7 @@ async function run() {
|
|||
externalRepoAuth: (0, actions_util_1.getOptionalInput)("external-repository-token"),
|
||||
url: (0, util_1.getRequiredEnvParam)("GITHUB_SERVER_URL"),
|
||||
};
|
||||
const gitHubVersion = await (0, util_1.getGitHubVersion)(apiDetails);
|
||||
const gitHubVersion = await (0, api_client_1.getGitHubVersionActionsOnly)();
|
||||
(0, util_1.checkGitHubVersionInRange)(gitHubVersion, logger, util_1.Mode.actions);
|
||||
const repositoryNwo = (0, repository_1.parseRepositoryNwo)((0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY"));
|
||||
const featureFlags = new feature_flags_1.GitHubFeatureFlags(gitHubVersion, apiDetails, repositoryNwo, logger);
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
2
lib/init.js
generated
2
lib/init.js
generated
|
|
@ -52,7 +52,7 @@ async function runInit(codeql, config, sourceRoot, processName, processLevel) {
|
|||
try {
|
||||
if (await (0, util_1.codeQlVersionAbove)(codeql, codeql_1.CODEQL_VERSION_NEW_TRACING)) {
|
||||
// Init a database cluster
|
||||
await codeql.databaseInitCluster(config.dbLocation, config.languages, sourceRoot, processName, processLevel);
|
||||
await codeql.databaseInitCluster(config, sourceRoot, processName, processLevel);
|
||||
}
|
||||
else {
|
||||
for (const language of config.languages) {
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
{"version":3,"file":"init.js","sourceRoot":"","sources":["../src/init.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,yEAA2D;AAC3D,kEAAoD;AAEpD,gEAAkD;AAElD,qCAA2E;AAC3E,4DAA8C;AAI9C,mDAAwE;AACxE,6CAA+B;AAC/B,iCAA4C;AAErC,KAAK,UAAU,UAAU,CAC9B,SAA6B,EAC7B,UAA4B,EAC5B,OAAe,EACf,YAAoB,EACpB,OAA2B,EAC3B,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,oBAAoB,CAAC,CAAC;IACxC,MAAM,EAAE,MAAM,EAAE,YAAY,EAAE,GAAG,MAAM,IAAA,oBAAW,EAChD,SAAS,EACT,UAAU,EACV,OAAO,EACP,YAAY,EACZ,OAAO,EACP,MAAM,EACN,IAAI,CACL,CAAC;IACF,MAAM,MAAM,CAAC,YAAY,EAAE,CAAC;IAC5B,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,EAAE,MAAM,EAAE,YAAY,EAAE,CAAC;AAClC,CAAC;AArBD,gCAqBC;AAEM,KAAK,UAAU,UAAU,CAC9B,cAAkC,EAClC,YAAgC,EAChC,UAA8B,EAC9B,UAA8B,EAC9B,UAA8B,EAC9B,SAAkB,EAClB,iBAAyB,EACzB,iBAAyB,EACzB,UAAyB,EACzB,OAAe,EACf,YAAoB,EACpB,MAAc,EACd,aAAqB,EACrB,aAAiC,EACjC,UAAoC,EACpC,YAA0B,EAC1B,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,6BAA6B,CAAC,CAAC;IACjD,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,UAAU,CACzC,cAAc,EACd,YAAY,EACZ,UAAU,EACV,UAAU,EACV,UAAU,EACV,SAAS,EACT,iBAAiB,EACjB,iBAAiB,EACjB,UAAU,EACV,OAAO,EACP,YAAY,EACZ,MAAM,EACN,aAAa,EACb,aAAa,EACb,UAAU,EACV,YAAY,EACZ,MAAM,CACP,CAAC;IACF,aAAa,CAAC,uBAAuB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IACtD,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,MAAM,CAAC;AAChB,CAAC;AA1CD,gCA0CC;AAEM,KAAK,UAAU,OAAO,CAC3B,MAAc,EACd,MAA0B,EAC1B,UAAkB,EAClB,WAA+B,EAC/B,YAAgC;;IAEhC,EAAE,CAAC,SAAS,CAAC,MAAM,CAAC,UAAU,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IAErD,IAAI;QACF,IAAI,MAAM,IAAA,yBAAkB,EAAC,MAAM,EAAE,mCAA0B,CAAC,EAAE;YAChE,0BAA0B;YAC1B,MAAM,MAAM,CAAC,mBAAmB,CAC9B,MAAM,CAAC,UAAU,EACjB,MAAM,CAAC,SAAS,EAChB,UAAU,EACV,WAAW,EACX,YAAY,CACb,CAAC;SACH;aAAM;YACL,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;gBACvC,yBAAyB;gBACzB,MAAM,MAAM,CAAC,YAAY,CACvB,IAAI,CAAC,qBAAqB,CAAC,MAAM,EAAE,QAAQ,CAAC,EAC5C,QAAQ,EACR,UAAU,CACX,CAAC;aACH;SACF;KACF;IAAC,OAAO,CAAC,EAAE;QACV,kDAAkD;QAClD,yCAAyC;QACzC,IACE,CAAC,YAAY,KAAK;aAClB,MAAA,CAAC,CAAC,OAAO,0CAAE,QAAQ,CAAC,8BAA8B,CAAC,CAAA;YACnD,CAAC,CAAC,OAAO,CAAC,QAAQ,CAAC,uCAAuC,CAAC,EAC3D;YACA,MAAM,IAAI,IAAI,CAAC,SAAS,CACtB,sDAAsD,CAAC,CAAC,OAAO,EAAE,CAClE,CAAC;SACH;aAAM,IACL,CAAC,YAAY,KAAK;aAClB,MAAA,CAAC,CAAC,OAAO,0CAAE,QAAQ,CAAC,wCAAwC,CAAC,CAAA,EAC7D;YACA,MAAM,IAAI,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC;SACrC;aAAM;YACL,MAAM,CAAC,CAAC;SACT;KACF;IACD,OAAO,MAAM,IAAA,uCAAuB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;AACvD,CAAC;AAlDD,0BAkDC;AAED,sEAAsE;AACtE,4EAA4E;AAC5E,4EAA4E;AAC5E,6EAA6E;AAC7E,+CAA+C;AACxC,KAAK,UAAU,mBAAmB,CACvC,WAA+B,EAC/B,YAAgC,EAChC,MAA0B,EAC1B,MAAc,EACd,YAA0B;IAE1B,IAAI,MAAc,CAAC;IACnB,IAAI,WAAW,KAAK,SAAS,EAAE;QAC7B,MAAM,GAAG;;;;;;;;;;;;uCAY0B,WAAW;;8BAEpB,WAAW;;;;;;;;gDAQO,CAAC;KAC9C;SAAM;QACL,oEAAoE;QACpE,mFAAmF;QACnF,+EAA+E;QAC/E,kFAAkF;QAClF,6EAA6E;QAC7E,oFAAoF;QACpF,6CAA6C;QAC7C,YAAY,GAAG,YAAY,IAAI,CAAC,CAAC;QACjC,MAAM,GAAG;;;;;;;;4BAQe,YAAY;;;;;;;;;;;;;;;;;;;;;gDAqBQ,CAAC;KAC9C;IAED,MAAM,gBAAgB,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,mBAAmB,CAAC,CAAC;IACxE,EAAE,CAAC,aAAa,CAAC,gBAAgB,EAAE,MAAM,CAAC,CAAC;IAE3C,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,MAAM,SAAS,CAAC,SAAS,CAAC,YAAY,CAAC,EACvC;QACE,kBAAkB;QAClB,QAAQ;QACR,OAAO;QACP,gBAAgB;QAChB,IAAI,CAAC,OAAO,CACV,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC,EAC9B,OAAO,EACP,OAAO,EACP,YAAY,CACb;KACF,EACD,EAAE,GAAG,EAAE,EAAE,0BAA0B,EAAE,YAAY,CAAC,IAAI,EAAE,EAAE,CAC3D,CAAC,IAAI,EAAE,CAAC;AACX,CAAC;AA5FD,kDA4FC;AAEM,KAAK,UAAU,iBAAiB,CAAC,MAAc,EAAE,MAAc;IACpE,MAAM,CAAC,UAAU,CAAC,2BAA2B,CAAC,CAAC;IAE/C,MAAM,aAAa,GAAG,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE,iBAAiB,CAAC,CAAC;IAEjE,IAAI;QACF,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YAChC,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,YAAY,CAAC,EAAE;gBACvE,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,mBAAmB,CAAC;aAC9C,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;aAAM;YACL,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,kBAAkB,CAAC,CAC7C,CAAC,IAAI,EAAE,CAAC;SACV;QACD,MAAM,MAAM,GAAG,0BAA0B,CAAC;QAC1C,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YAChC,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,IAAI,CAAC,EAAE;gBAC/D,IAAI;gBACJ,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC;gBAChC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;aAC/B,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;aAAM;YACL,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC,EAAE;gBAChE,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;aAC/B,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;KACF;IAAC,OAAO,CAAC,EAAE;QACV,MAAM,CAAC,QAAQ,EAAE,CAAC;QAClB,MAAM,CAAC,OAAO,CACZ,gFAAgF,CAAC,IAAI;YACnF,qGAAqG;YACrG,oGAAoG;YACpG,iDAAiD,CACpD,CAAC;QACF,OAAO;KACR;IACD,MAAM,CAAC,QAAQ,EAAE,CAAC;AACpB,CAAC;AAtCD,8CAsCC"}
|
||||
{"version":3,"file":"init.js","sourceRoot":"","sources":["../src/init.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,yEAA2D;AAC3D,kEAAoD;AAEpD,gEAAkD;AAElD,qCAA2E;AAC3E,4DAA8C;AAI9C,mDAAwE;AACxE,6CAA+B;AAC/B,iCAA4C;AAErC,KAAK,UAAU,UAAU,CAC9B,SAA6B,EAC7B,UAA4B,EAC5B,OAAe,EACf,YAAoB,EACpB,OAA2B,EAC3B,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,oBAAoB,CAAC,CAAC;IACxC,MAAM,EAAE,MAAM,EAAE,YAAY,EAAE,GAAG,MAAM,IAAA,oBAAW,EAChD,SAAS,EACT,UAAU,EACV,OAAO,EACP,YAAY,EACZ,OAAO,EACP,MAAM,EACN,IAAI,CACL,CAAC;IACF,MAAM,MAAM,CAAC,YAAY,EAAE,CAAC;IAC5B,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,EAAE,MAAM,EAAE,YAAY,EAAE,CAAC;AAClC,CAAC;AArBD,gCAqBC;AAEM,KAAK,UAAU,UAAU,CAC9B,cAAkC,EAClC,YAAgC,EAChC,UAA8B,EAC9B,UAA8B,EAC9B,UAA8B,EAC9B,SAAkB,EAClB,iBAAyB,EACzB,iBAAyB,EACzB,UAAyB,EACzB,OAAe,EACf,YAAoB,EACpB,MAAc,EACd,aAAqB,EACrB,aAAiC,EACjC,UAAoC,EACpC,YAA0B,EAC1B,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,6BAA6B,CAAC,CAAC;IACjD,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,UAAU,CACzC,cAAc,EACd,YAAY,EACZ,UAAU,EACV,UAAU,EACV,UAAU,EACV,SAAS,EACT,iBAAiB,EACjB,iBAAiB,EACjB,UAAU,EACV,OAAO,EACP,YAAY,EACZ,MAAM,EACN,aAAa,EACb,aAAa,EACb,UAAU,EACV,YAAY,EACZ,MAAM,CACP,CAAC;IACF,aAAa,CAAC,uBAAuB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IACtD,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,MAAM,CAAC;AAChB,CAAC;AA1CD,gCA0CC;AAEM,KAAK,UAAU,OAAO,CAC3B,MAAc,EACd,MAA0B,EAC1B,UAAkB,EAClB,WAA+B,EAC/B,YAAgC;;IAEhC,EAAE,CAAC,SAAS,CAAC,MAAM,CAAC,UAAU,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IAErD,IAAI;QACF,IAAI,MAAM,IAAA,yBAAkB,EAAC,MAAM,EAAE,mCAA0B,CAAC,EAAE;YAChE,0BAA0B;YAC1B,MAAM,MAAM,CAAC,mBAAmB,CAC9B,MAAM,EACN,UAAU,EACV,WAAW,EACX,YAAY,CACb,CAAC;SACH;aAAM;YACL,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;gBACvC,yBAAyB;gBACzB,MAAM,MAAM,CAAC,YAAY,CACvB,IAAI,CAAC,qBAAqB,CAAC,MAAM,EAAE,QAAQ,CAAC,EAC5C,QAAQ,EACR,UAAU,CACX,CAAC;aACH;SACF;KACF;IAAC,OAAO,CAAC,EAAE;QACV,kDAAkD;QAClD,yCAAyC;QACzC,IACE,CAAC,YAAY,KAAK;aAClB,MAAA,CAAC,CAAC,OAAO,0CAAE,QAAQ,CAAC,8BAA8B,CAAC,CAAA;YACnD,CAAC,CAAC,OAAO,CAAC,QAAQ,CAAC,uCAAuC,CAAC,EAC3D;YACA,MAAM,IAAI,IAAI,CAAC,SAAS,CACtB,sDAAsD,CAAC,CAAC,OAAO,EAAE,CAClE,CAAC;SACH;aAAM,IACL,CAAC,YAAY,KAAK;aAClB,MAAA,CAAC,CAAC,OAAO,0CAAE,QAAQ,CAAC,wCAAwC,CAAC,CAAA,EAC7D;YACA,MAAM,IAAI,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC;SACrC;aAAM;YACL,MAAM,CAAC,CAAC;SACT;KACF;IACD,OAAO,MAAM,IAAA,uCAAuB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;AACvD,CAAC;AAjDD,0BAiDC;AAED,sEAAsE;AACtE,4EAA4E;AAC5E,4EAA4E;AAC5E,6EAA6E;AAC7E,+CAA+C;AACxC,KAAK,UAAU,mBAAmB,CACvC,WAA+B,EAC/B,YAAgC,EAChC,MAA0B,EAC1B,MAAc,EACd,YAA0B;IAE1B,IAAI,MAAc,CAAC;IACnB,IAAI,WAAW,KAAK,SAAS,EAAE;QAC7B,MAAM,GAAG;;;;;;;;;;;;uCAY0B,WAAW;;8BAEpB,WAAW;;;;;;;;gDAQO,CAAC;KAC9C;SAAM;QACL,oEAAoE;QACpE,mFAAmF;QACnF,+EAA+E;QAC/E,kFAAkF;QAClF,6EAA6E;QAC7E,oFAAoF;QACpF,6CAA6C;QAC7C,YAAY,GAAG,YAAY,IAAI,CAAC,CAAC;QACjC,MAAM,GAAG;;;;;;;;4BAQe,YAAY;;;;;;;;;;;;;;;;;;;;;gDAqBQ,CAAC;KAC9C;IAED,MAAM,gBAAgB,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,mBAAmB,CAAC,CAAC;IACxE,EAAE,CAAC,aAAa,CAAC,gBAAgB,EAAE,MAAM,CAAC,CAAC;IAE3C,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,MAAM,SAAS,CAAC,SAAS,CAAC,YAAY,CAAC,EACvC;QACE,kBAAkB;QAClB,QAAQ;QACR,OAAO;QACP,gBAAgB;QAChB,IAAI,CAAC,OAAO,CACV,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC,EAC9B,OAAO,EACP,OAAO,EACP,YAAY,CACb;KACF,EACD,EAAE,GAAG,EAAE,EAAE,0BAA0B,EAAE,YAAY,CAAC,IAAI,EAAE,EAAE,CAC3D,CAAC,IAAI,EAAE,CAAC;AACX,CAAC;AA5FD,kDA4FC;AAEM,KAAK,UAAU,iBAAiB,CAAC,MAAc,EAAE,MAAc;IACpE,MAAM,CAAC,UAAU,CAAC,2BAA2B,CAAC,CAAC;IAE/C,MAAM,aAAa,GAAG,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE,iBAAiB,CAAC,CAAC;IAEjE,IAAI;QACF,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YAChC,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,YAAY,CAAC,EAAE;gBACvE,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,mBAAmB,CAAC;aAC9C,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;aAAM;YACL,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,kBAAkB,CAAC,CAC7C,CAAC,IAAI,EAAE,CAAC;SACV;QACD,MAAM,MAAM,GAAG,0BAA0B,CAAC;QAC1C,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YAChC,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,IAAI,CAAC,EAAE;gBAC/D,IAAI;gBACJ,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC;gBAChC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;aAC/B,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;aAAM;YACL,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC,EAAE;gBAChE,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;aAC/B,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;KACF;IAAC,OAAO,CAAC,EAAE;QACV,MAAM,CAAC,QAAQ,EAAE,CAAC;QAClB,MAAM,CAAC,OAAO,CACZ,gFAAgF,CAAC,IAAI;YACnF,qGAAqG;YACrG,oGAAoG;YACpG,iDAAiD,CACpD,CAAC;QACF,OAAO;KACR;IACD,MAAM,CAAC,QAAQ,EAAE,CAAC;AACpB,CAAC;AAtCD,8CAsCC"}
|
||||
3
lib/upload-sarif-action.js
generated
3
lib/upload-sarif-action.js
generated
|
|
@ -21,6 +21,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const actionsUtil = __importStar(require("./actions-util"));
|
||||
const api_client_1 = require("./api-client");
|
||||
const logging_1 = require("./logging");
|
||||
const repository_1 = require("./repository");
|
||||
const upload_lib = __importStar(require("./upload-lib"));
|
||||
|
|
@ -46,7 +47,7 @@ async function run() {
|
|||
auth: actionsUtil.getRequiredInput("token"),
|
||||
url: (0, util_1.getRequiredEnvParam)("GITHUB_SERVER_URL"),
|
||||
};
|
||||
const gitHubVersion = await (0, util_1.getGitHubVersion)(apiDetails);
|
||||
const gitHubVersion = await (0, api_client_1.getGitHubVersionActionsOnly)();
|
||||
const uploadResult = await upload_lib.uploadFromActions(actionsUtil.getRequiredInput("sarif_file"), gitHubVersion, apiDetails, (0, logging_1.getActionsLogger)());
|
||||
core.setOutput("sarif-id", uploadResult.sarifID);
|
||||
if (actionsUtil.getRequiredInput("wait-for-processing") === "true") {
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
{"version":3,"file":"upload-sarif-action.js","sourceRoot":"","sources":["../src/upload-sarif-action.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAC9C,uCAA6C;AAC7C,6CAAkD;AAClD,yDAA2C;AAC3C,iCAKgB;AAEhB,8CAA8C;AAC9C,MAAM,GAAG,GAAG,OAAO,CAAC,iBAAiB,CAAC,CAAC;AAMvC,KAAK,UAAU,uBAAuB,CACpC,SAAe,EACf,WAA0C;IAE1C,MAAM,gBAAgB,GAAG,MAAM,WAAW,CAAC,sBAAsB,CAC/D,cAAc,EACd,SAAS,EACT,SAAS,CACV,CAAC;IACF,MAAM,YAAY,GAA4B;QAC5C,GAAG,gBAAgB;QACnB,GAAG,WAAW;KACf,CAAC;IACF,MAAM,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AACnD,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,IAAA,4BAAqB,EAAC,WAAI,CAAC,OAAO,EAAE,GAAG,CAAC,OAAO,CAAC,CAAC;IACjD,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IACE,CAAC,CAAC,MAAM,WAAW,CAAC,gBAAgB,CAClC,MAAM,WAAW,CAAC,sBAAsB,CACtC,cAAc,EACd,UAAU,EACV,SAAS,CACV,CACF,CAAC,EACF;QACA,OAAO;KACR;IAED,IAAI;QACF,MAAM,UAAU,GAAG;YACjB,IAAI,EAAE,WAAW,CAAC,gBAAgB,CAAC,OAAO,CAAC;YAC3C,GAAG,EAAE,IAAA,0BAAmB,EAAC,mBAAmB,CAAC;SAC9C,CAAC;QAEF,MAAM,aAAa,GAAG,MAAM,IAAA,uBAAgB,EAAC,UAAU,CAAC,CAAC;QAEzD,MAAM,YAAY,GAAG,MAAM,UAAU,CAAC,iBAAiB,CACrD,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,EAC1C,aAAa,EACb,UAAU,EACV,IAAA,0BAAgB,GAAE,CACnB,CAAC;QACF,IAAI,CAAC,SAAS,CAAC,UAAU,EAAE,YAAY,CAAC,OAAO,CAAC,CAAC;QACjD,IAAI,WAAW,CAAC,gBAAgB,CAAC,qBAAqB,CAAC,KAAK,MAAM,EAAE;YAClE,MAAM,UAAU,CAAC,iBAAiB,CAChC,IAAA,+BAAkB,EAAC,IAAA,0BAAmB,EAAC,mBAAmB,CAAC,CAAC,EAC5D,YAAY,CAAC,OAAO,EACpB,UAAU,EACV,IAAA,0BAAgB,GAAE,CACnB,CAAC;SACH;QACD,MAAM,uBAAuB,CAAC,SAAS,EAAE,YAAY,CAAC,YAAY,CAAC,CAAC;KACrE;IAAC,OAAO,KAAK,EAAE;QACd,MAAM,OAAO,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;QACvE,MAAM,KAAK,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;QACnE,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,CAAC;QACxB,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,WAAW,CAAC,gBAAgB,CAChC,MAAM,WAAW,CAAC,sBAAsB,CACtC,cAAc,EACd,WAAW,CAAC,gBAAgB,CAAC,KAAK,CAAC,EACnC,SAAS,EACT,OAAO,EACP,KAAK,CACN,CACF,CAAC;QACF,OAAO;KACR;AACH,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,sCAAsC,KAAK,EAAE,CAAC,CAAC;QAC9D,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||
{"version":3,"file":"upload-sarif-action.js","sourceRoot":"","sources":["../src/upload-sarif-action.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAC9C,6CAA2D;AAC3D,uCAA6C;AAC7C,6CAAkD;AAClD,yDAA2C;AAC3C,iCAA0E;AAE1E,8CAA8C;AAC9C,MAAM,GAAG,GAAG,OAAO,CAAC,iBAAiB,CAAC,CAAC;AAMvC,KAAK,UAAU,uBAAuB,CACpC,SAAe,EACf,WAA0C;IAE1C,MAAM,gBAAgB,GAAG,MAAM,WAAW,CAAC,sBAAsB,CAC/D,cAAc,EACd,SAAS,EACT,SAAS,CACV,CAAC;IACF,MAAM,YAAY,GAA4B;QAC5C,GAAG,gBAAgB;QACnB,GAAG,WAAW;KACf,CAAC;IACF,MAAM,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AACnD,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,IAAA,4BAAqB,EAAC,WAAI,CAAC,OAAO,EAAE,GAAG,CAAC,OAAO,CAAC,CAAC;IACjD,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IACE,CAAC,CAAC,MAAM,WAAW,CAAC,gBAAgB,CAClC,MAAM,WAAW,CAAC,sBAAsB,CACtC,cAAc,EACd,UAAU,EACV,SAAS,CACV,CACF,CAAC,EACF;QACA,OAAO;KACR;IAED,IAAI;QACF,MAAM,UAAU,GAAG;YACjB,IAAI,EAAE,WAAW,CAAC,gBAAgB,CAAC,OAAO,CAAC;YAC3C,GAAG,EAAE,IAAA,0BAAmB,EAAC,mBAAmB,CAAC;SAC9C,CAAC;QAEF,MAAM,aAAa,GAAG,MAAM,IAAA,wCAA2B,GAAE,CAAC;QAE1D,MAAM,YAAY,GAAG,MAAM,UAAU,CAAC,iBAAiB,CACrD,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,EAC1C,aAAa,EACb,UAAU,EACV,IAAA,0BAAgB,GAAE,CACnB,CAAC;QACF,IAAI,CAAC,SAAS,CAAC,UAAU,EAAE,YAAY,CAAC,OAAO,CAAC,CAAC;QACjD,IAAI,WAAW,CAAC,gBAAgB,CAAC,qBAAqB,CAAC,KAAK,MAAM,EAAE;YAClE,MAAM,UAAU,CAAC,iBAAiB,CAChC,IAAA,+BAAkB,EAAC,IAAA,0BAAmB,EAAC,mBAAmB,CAAC,CAAC,EAC5D,YAAY,CAAC,OAAO,EACpB,UAAU,EACV,IAAA,0BAAgB,GAAE,CACnB,CAAC;SACH;QACD,MAAM,uBAAuB,CAAC,SAAS,EAAE,YAAY,CAAC,YAAY,CAAC,CAAC;KACrE;IAAC,OAAO,KAAK,EAAE;QACd,MAAM,OAAO,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;QACvE,MAAM,KAAK,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;QACnE,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,CAAC;QACxB,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,WAAW,CAAC,gBAAgB,CAChC,MAAM,WAAW,CAAC,sBAAsB,CACtC,cAAc,EACd,WAAW,CAAC,gBAAgB,CAAC,KAAK,CAAC,EACnC,SAAS,EACT,OAAO,EACP,KAAK,CACN,CACF,CAAC;QACF,OAAO;KACR;AACH,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,sCAAsC,KAAK,EAAE,CAAC,CAAC;QAC9D,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||
21
lib/util.js
generated
21
lib/util.js
generated
|
|
@ -22,7 +22,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.getMlPoweredJsQueriesStatus = exports.ML_POWERED_JS_QUERIES_PACK = exports.isGoodVersion = exports.delay = exports.bundleDb = exports.codeQlVersionAbove = exports.isHTTPError = exports.UserError = exports.HTTPError = exports.getRequiredEnvParam = exports.isActions = exports.getMode = exports.enrichEnvironment = exports.initializeEnvironment = exports.Mode = exports.assertNever = exports.getGitHubAuth = exports.apiVersionInRange = exports.DisallowedAPIVersionReason = exports.checkGitHubVersionInRange = exports.getGitHubVersion = exports.GitHubVariant = exports.parseGitHubUrl = exports.getCodeQLDatabasePath = exports.getThreadsFlag = exports.getThreadsFlagValue = exports.getAddSnippetsFlag = exports.getMemoryFlag = exports.getMemoryFlagValue = exports.withTmpDir = exports.getToolNames = exports.getExtraOptionsEnvParam = exports.DEFAULT_DEBUG_DATABASE_NAME = exports.DEFAULT_DEBUG_ARTIFACT_NAME = exports.GITHUB_DOTCOM_URL = void 0;
|
||||
exports.getMlPoweredJsQueriesStatus = exports.ML_POWERED_JS_QUERIES_PACK = exports.isGoodVersion = exports.delay = exports.bundleDb = exports.codeQlVersionAbove = exports.getCachedCodeQlVersion = exports.cacheCodeQlVersion = exports.isGitHubGhesVersionBelow = exports.isHTTPError = exports.UserError = exports.HTTPError = exports.getRequiredEnvParam = exports.isActions = exports.getMode = exports.enrichEnvironment = exports.initializeEnvironment = exports.Mode = exports.assertNever = exports.getGitHubAuth = exports.apiVersionInRange = exports.DisallowedAPIVersionReason = exports.checkGitHubVersionInRange = exports.getGitHubVersion = exports.GitHubVariant = exports.parseGitHubUrl = exports.getCodeQLDatabasePath = exports.getThreadsFlag = exports.getThreadsFlagValue = exports.getAddSnippetsFlag = exports.getMemoryFlag = exports.getMemoryFlagValue = exports.withTmpDir = exports.getToolNames = exports.getExtraOptionsEnvParam = exports.DEFAULT_DEBUG_DATABASE_NAME = exports.DEFAULT_DEBUG_ARTIFACT_NAME = exports.GITHUB_DOTCOM_URL = void 0;
|
||||
const fs = __importStar(require("fs"));
|
||||
const os = __importStar(require("os"));
|
||||
const path = __importStar(require("path"));
|
||||
|
|
@ -500,6 +500,23 @@ function isHTTPError(arg) {
|
|||
return (arg === null || arg === void 0 ? void 0 : arg.status) !== undefined && Number.isInteger(arg.status);
|
||||
}
|
||||
exports.isHTTPError = isHTTPError;
|
||||
function isGitHubGhesVersionBelow(gitHubVersion, expectedVersion) {
|
||||
return (gitHubVersion.type === GitHubVariant.GHES &&
|
||||
semver.lt(gitHubVersion.version, expectedVersion));
|
||||
}
|
||||
exports.isGitHubGhesVersionBelow = isGitHubGhesVersionBelow;
|
||||
let cachedCodeQlVersion = undefined;
|
||||
function cacheCodeQlVersion(version) {
|
||||
if (cachedCodeQlVersion !== undefined) {
|
||||
throw new Error("cacheCodeQlVersion() should be called only once");
|
||||
}
|
||||
cachedCodeQlVersion = version;
|
||||
}
|
||||
exports.cacheCodeQlVersion = cacheCodeQlVersion;
|
||||
function getCachedCodeQlVersion() {
|
||||
return cachedCodeQlVersion;
|
||||
}
|
||||
exports.getCachedCodeQlVersion = getCachedCodeQlVersion;
|
||||
async function codeQlVersionAbove(codeql, requiredVersion) {
|
||||
return semver.gte(await codeql.getVersion(), requiredVersion);
|
||||
}
|
||||
|
|
@ -534,7 +551,7 @@ exports.isGoodVersion = isGoodVersion;
|
|||
*/
|
||||
exports.ML_POWERED_JS_QUERIES_PACK = {
|
||||
packName: "codeql/javascript-experimental-atm-queries",
|
||||
version: "~0.0.2",
|
||||
version: "~0.1.0",
|
||||
};
|
||||
/**
|
||||
* Get information about ML-powered JS queries to populate status reports with.
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
7
lib/util.test.js
generated
7
lib/util.test.js
generated
|
|
@ -262,4 +262,11 @@ for (const [packs, expectedStatus] of ML_POWERED_JS_STATUS_TESTS) {
|
|||
});
|
||||
});
|
||||
}
|
||||
(0, ava_1.default)("isGitHubGhesVersionBelow", async (t) => {
|
||||
t.falsy(util.isGitHubGhesVersionBelow({ type: util.GitHubVariant.DOTCOM }, "3.2.0"));
|
||||
t.falsy(util.isGitHubGhesVersionBelow({ type: util.GitHubVariant.GHAE }, "3.2.0"));
|
||||
t.falsy(util.isGitHubGhesVersionBelow({ type: util.GitHubVariant.GHES, version: "3.3.0" }, "3.2.0"));
|
||||
t.falsy(util.isGitHubGhesVersionBelow({ type: util.GitHubVariant.GHES, version: "3.2.0" }, "3.2.0"));
|
||||
t.true(util.isGitHubGhesVersionBelow({ type: util.GitHubVariant.GHES, version: "3.1.2" }, "3.2.0"));
|
||||
});
|
||||
//# sourceMappingURL=util.test.js.map
|
||||
File diff suppressed because one or more lines are too long
48
node_modules/.package-lock.json
generated
vendored
48
node_modules/.package-lock.json
generated
vendored
|
|
@ -1,19 +1,18 @@
|
|||
{
|
||||
"name": "codeql",
|
||||
"version": "1.1.4",
|
||||
"version": "1.1.6",
|
||||
"lockfileVersion": 2,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"node_modules/@actions/artifact": {
|
||||
"version": "0.5.2",
|
||||
"resolved": "https://registry.npmjs.org/@actions/artifact/-/artifact-0.5.2.tgz",
|
||||
"integrity": "sha512-q/r8WSqyxBJ0ffLCRrtjCBTGnAYqP+ID4yG7f7YSlhrQ4thNg/d+Tq9f1YkLPKX46ZR97OWtGDY+oU/nxcqvLw==",
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/@actions/artifact/-/artifact-1.0.0.tgz",
|
||||
"integrity": "sha512-oje+cCiM2maVwoiN+LT9kh2C6UqiTcS1tDKins+nRfckX+C8JJD2kAmzpD5fn/p5Dibjrqk1mtwreAzgNxHrDg==",
|
||||
"dependencies": {
|
||||
"@actions/core": "^1.2.6",
|
||||
"@actions/http-client": "^1.0.11",
|
||||
"@types/tmp": "^0.1.0",
|
||||
"tmp": "^0.1.0",
|
||||
"tmp-promise": "^2.0.2"
|
||||
"tmp": "^0.2.1",
|
||||
"tmp-promise": "^3.0.2"
|
||||
}
|
||||
},
|
||||
"node_modules/@actions/core": {
|
||||
|
|
@ -501,10 +500,6 @@
|
|||
"@sinonjs/fake-timers": "^7.1.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/tmp": {
|
||||
"version": "0.1.0",
|
||||
"integrity": "sha512-6IwZ9HzWbCq6XoQWhxLpDjuADodH/MKXRUIDFudvgjcVdjFknvmR+DNsoUeer4XPrEnrZs04Jj+kfV9pFsrhmA=="
|
||||
},
|
||||
"node_modules/@typescript-eslint/eslint-plugin": {
|
||||
"version": "4.28.5",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-4.28.5.tgz",
|
||||
|
|
@ -3598,8 +3593,9 @@
|
|||
}
|
||||
},
|
||||
"node_modules/minimist": {
|
||||
"version": "1.2.5",
|
||||
"integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==",
|
||||
"version": "1.2.6",
|
||||
"resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.6.tgz",
|
||||
"integrity": "sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/ms": {
|
||||
|
|
@ -4934,30 +4930,22 @@
|
|||
}
|
||||
},
|
||||
"node_modules/tmp": {
|
||||
"version": "0.1.0",
|
||||
"integrity": "sha512-J7Z2K08jbGcdA1kkQpJSqLF6T0tdQqpR2pnSUXsIchbPdTI9v3e85cLW0d6WDhwuAleOV71j2xWs8qMPfK7nKw==",
|
||||
"version": "0.2.1",
|
||||
"resolved": "https://registry.npmjs.org/tmp/-/tmp-0.2.1.tgz",
|
||||
"integrity": "sha512-76SUhtfqR2Ijn+xllcI5P1oyannHNHByD80W1q447gU3mp9G9PSpGdWmjUOHRDPiHYacIk66W7ubDTuPF3BEtQ==",
|
||||
"dependencies": {
|
||||
"rimraf": "^2.6.3"
|
||||
"rimraf": "^3.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6"
|
||||
"node": ">=8.17.0"
|
||||
}
|
||||
},
|
||||
"node_modules/tmp-promise": {
|
||||
"version": "2.1.1",
|
||||
"integrity": "sha512-Z048AOz/w9b6lCbJUpevIJpRpUztENl8zdv1bmAKVHimfqRFl92ROkmT9rp7TVBnrEw2gtMTol/2Cp2S2kJa4Q==",
|
||||
"version": "3.0.3",
|
||||
"resolved": "https://registry.npmjs.org/tmp-promise/-/tmp-promise-3.0.3.tgz",
|
||||
"integrity": "sha512-RwM7MoPojPxsOBYnyd2hy0bxtIlVrihNs9pj5SUvY8Zz1sQcQG2tG1hSr8PDxfgEB8RNKDhqbIlroIarSNDNsQ==",
|
||||
"dependencies": {
|
||||
"tmp": "0.1.0"
|
||||
}
|
||||
},
|
||||
"node_modules/tmp/node_modules/rimraf": {
|
||||
"version": "2.7.1",
|
||||
"integrity": "sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==",
|
||||
"dependencies": {
|
||||
"glob": "^7.1.3"
|
||||
},
|
||||
"bin": {
|
||||
"rimraf": "bin.js"
|
||||
"tmp": "^0.2.0"
|
||||
}
|
||||
},
|
||||
"node_modules/to-regex-range": {
|
||||
|
|
|
|||
1
node_modules/@actions/artifact/lib/artifact-client.js
generated
vendored
1
node_modules/@actions/artifact/lib/artifact-client.js
generated
vendored
|
|
@ -1,5 +1,6 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.create = void 0;
|
||||
const artifact_client_1 = require("./internal/artifact-client");
|
||||
/**
|
||||
* Constructs an ArtifactClient
|
||||
|
|
|
|||
2
node_modules/@actions/artifact/lib/artifact-client.js.map
generated
vendored
2
node_modules/@actions/artifact/lib/artifact-client.js.map
generated
vendored
|
|
@ -1 +1 @@
|
|||
{"version":3,"file":"artifact-client.js","sourceRoot":"","sources":["../src/artifact-client.ts"],"names":[],"mappings":";;AAIA,gEAAgF;AAUhF;;GAEG;AACH,SAAgB,MAAM;IACpB,OAAO,uCAAqB,CAAC,MAAM,EAAE,CAAA;AACvC,CAAC;AAFD,wBAEC"}
|
||||
{"version":3,"file":"artifact-client.js","sourceRoot":"","sources":["../src/artifact-client.ts"],"names":[],"mappings":";;;AAIA,gEAAgF;AAUhF;;GAEG;AACH,SAAgB,MAAM;IACpB,OAAO,uCAAqB,CAAC,MAAM,EAAE,CAAA;AACvC,CAAC;AAFD,wBAEC"}
|
||||
47
node_modules/@actions/artifact/lib/internal/artifact-client.js
generated
vendored
47
node_modules/@actions/artifact/lib/internal/artifact-client.js
generated
vendored
|
|
@ -1,4 +1,23 @@
|
|||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
|
|
@ -8,18 +27,13 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
|||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.DefaultArtifactClient = void 0;
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const upload_specification_1 = require("./upload-specification");
|
||||
const upload_http_client_1 = require("./upload-http-client");
|
||||
const utils_1 = require("./utils");
|
||||
const path_and_artifact_name_validation_1 = require("./path-and-artifact-name-validation");
|
||||
const download_http_client_1 = require("./download-http-client");
|
||||
const download_specification_1 = require("./download-specification");
|
||||
const config_variables_1 = require("./config-variables");
|
||||
|
|
@ -36,7 +50,9 @@ class DefaultArtifactClient {
|
|||
*/
|
||||
uploadArtifact(name, files, rootDirectory, options) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
utils_1.checkArtifactName(name);
|
||||
core.info(`Starting artifact upload
|
||||
For more detailed logs during the artifact upload process, enable step-debugging: https://docs.github.com/actions/monitoring-and-troubleshooting-workflows/enabling-debug-logging#enabling-step-debug-logging`);
|
||||
path_and_artifact_name_validation_1.checkArtifactName(name);
|
||||
// Get specification for the files being uploaded
|
||||
const uploadSpecification = upload_specification_1.getUploadSpecification(name, rootDirectory, files);
|
||||
const uploadResponse = {
|
||||
|
|
@ -57,12 +73,24 @@ class DefaultArtifactClient {
|
|||
throw new Error('No URL provided by the Artifact Service to upload an artifact to');
|
||||
}
|
||||
core.debug(`Upload Resource URL: ${response.fileContainerResourceUrl}`);
|
||||
core.info(`Container for artifact "${name}" successfully created. Starting upload of file(s)`);
|
||||
// Upload each of the files that were found concurrently
|
||||
const uploadResult = yield uploadHttpClient.uploadArtifactToFileContainer(response.fileContainerResourceUrl, uploadSpecification, options);
|
||||
// Update the size of the artifact to indicate we are done uploading
|
||||
// The uncompressed size is used for display when downloading a zip of the artifact from the UI
|
||||
core.info(`File upload process has finished. Finalizing the artifact upload`);
|
||||
yield uploadHttpClient.patchArtifactSize(uploadResult.totalSize, name);
|
||||
core.info(`Finished uploading artifact ${name}. Reported size is ${uploadResult.uploadSize} bytes. There were ${uploadResult.failedItems.length} items that failed to upload`);
|
||||
if (uploadResult.failedItems.length > 0) {
|
||||
core.info(`Upload finished. There were ${uploadResult.failedItems.length} items that failed to upload`);
|
||||
}
|
||||
else {
|
||||
core.info(`Artifact has been finalized. All files have been successfully uploaded!`);
|
||||
}
|
||||
core.info(`
|
||||
The raw size of all the files that were specified for upload is ${uploadResult.totalSize} bytes
|
||||
The size of all the files that were uploaded is ${uploadResult.uploadSize} bytes. This takes into account any gzip compression used to reduce the upload size, time and storage
|
||||
|
||||
Note: The size of downloaded zips can differ significantly from the reported size. For more information see: https://github.com/actions/upload-artifact#zipped-artifact-downloads \r\n`);
|
||||
uploadResponse.artifactItems = uploadSpecification.map(item => item.absoluteFilePath);
|
||||
uploadResponse.size = uploadResult.uploadSize;
|
||||
uploadResponse.failedItems = uploadResult.failedItems;
|
||||
|
|
@ -125,6 +153,7 @@ class DefaultArtifactClient {
|
|||
while (downloadedArtifacts < artifacts.count) {
|
||||
const currentArtifactToDownload = artifacts.value[downloadedArtifacts];
|
||||
downloadedArtifacts += 1;
|
||||
core.info(`starting download of artifact ${currentArtifactToDownload.name} : ${downloadedArtifacts}/${artifacts.count}`);
|
||||
// Get container entries for the specific artifact
|
||||
const items = yield downloadHttpClient.getContainerItems(currentArtifactToDownload.name, currentArtifactToDownload.fileContainerResourceUrl);
|
||||
const downloadSpecification = download_specification_1.getDownloadSpecification(currentArtifactToDownload.name, items.value, path, true);
|
||||
|
|
|
|||
2
node_modules/@actions/artifact/lib/internal/artifact-client.js.map
generated
vendored
2
node_modules/@actions/artifact/lib/internal/artifact-client.js.map
generated
vendored
|
|
@ -1 +1 @@
|
|||
{"version":3,"file":"artifact-client.js","sourceRoot":"","sources":["../../src/internal/artifact-client.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;AAAA,oDAAqC;AACrC,iEAG+B;AAC/B,6DAAqD;AAKrD,mCAIgB;AAChB,iEAAyD;AACzD,qEAAiE;AACjE,yDAAwD;AACxD,+BAAuC;AAuCvC,MAAa,qBAAqB;IAChC;;OAEG;IACH,MAAM,CAAC,MAAM;QACX,OAAO,IAAI,qBAAqB,EAAE,CAAA;IACpC,CAAC;IAED;;OAEG;IACG,cAAc,CAClB,IAAY,EACZ,KAAe,EACf,aAAqB,EACrB,OAAmC;;YAEnC,yBAAiB,CAAC,IAAI,CAAC,CAAA;YAEvB,iDAAiD;YACjD,MAAM,mBAAmB,GAA0B,6CAAsB,CACvE,IAAI,EACJ,aAAa,EACb,KAAK,CACN,CAAA;YACD,MAAM,cAAc,GAAmB;gBACrC,YAAY,EAAE,IAAI;gBAClB,aAAa,EAAE,EAAE;gBACjB,IAAI,EAAE,CAAC;gBACP,WAAW,EAAE,EAAE;aAChB,CAAA;YAED,MAAM,gBAAgB,GAAG,IAAI,qCAAgB,EAAE,CAAA;YAE/C,IAAI,mBAAmB,CAAC,MAAM,KAAK,CAAC,EAAE;gBACpC,IAAI,CAAC,OAAO,CAAC,qCAAqC,CAAC,CAAA;aACpD;iBAAM;gBACL,yDAAyD;gBACzD,MAAM,QAAQ,GAAG,MAAM,gBAAgB,CAAC,6BAA6B,CACnE,IAAI,EACJ,OAAO,CACR,CAAA;gBACD,IAAI,CAAC,QAAQ,CAAC,wBAAwB,EAAE;oBACtC,IAAI,CAAC,KAAK,CAAC,QAAQ,CAAC,QAAQ,EAAE,CAAC,CAAA;oBAC/B,MAAM,IAAI,KAAK,CACb,kEAAkE,CACnE,CAAA;iBACF;gBACD,IAAI,CAAC,KAAK,CAAC,wBAAwB,QAAQ,CAAC,wBAAwB,EAAE,CAAC,CAAA;gBAEvE,wDAAwD;gBACxD,MAAM,YAAY,GAAG,MAAM,gBAAgB,CAAC,6BAA6B,CACvE,QAAQ,CAAC,wBAAwB,EACjC,mBAAmB,EACnB,OAAO,CACR,CAAA;gBAED,oEAAoE;gBACpE,+FAA+F;gBAC/F,MAAM,gBAAgB,CAAC,iBAAiB,CAAC,YAAY,CAAC,SAAS,EAAE,IAAI,CAAC,CAAA;gBAEtE,IAAI,CAAC,IAAI,CACP,+BAA+B,IAAI,sBAAsB,YAAY,CAAC,UAAU,sBAAsB,YAAY,CAAC,WAAW,CAAC,MAAM,8BAA8B,CACpK,CAAA;gBAED,cAAc,CAAC,aAAa,GAAG,mBAAmB,CAAC,GAAG,CACpD,IAAI,CAAC,EAAE,CAAC,IAAI,CAAC,gBAAgB,CAC9B,CAAA;gBACD,cAAc,CAAC,IAAI,GAAG,YAAY,CAAC,UAAU,CAAA;gBAC7C,cAAc,CAAC,WAAW,GAAG,YAAY,CAAC,WAAW,CAAA;aACtD;YACD,OAAO,cAAc,CAAA;QACvB,CAAC;KAAA;IAEK,gBAAgB,CACpB,IAAY,EACZ,IAAyB,EACzB,OAAqC;;YAErC,MAAM,kBAAkB,GAAG,IAAI,yCAAkB,EAAE,CAAA;YAEnD,MAAM,SAAS,GAAG,MAAM,kBAAkB,CAAC,aAAa,EAAE,CAAA;YAC1D,IAAI,SAAS,CAAC,KAAK,KAAK,CAAC,EAAE;gBACzB,MAAM,IAAI,KAAK,CACb,0DAA0D,CAC3D,CAAA;aACF;YAED,MAAM,kBAAkB,GAAG,SAAS,CAAC,KAAK,CAAC,IAAI,CAAC,QAAQ,CAAC,EAAE;gBACzD,OAAO,QAAQ,CAAC,IAAI,KAAK,IAAI,CAAA;YAC/B,CAAC,CAAC,CAAA;YACF,IAAI,CAAC,kBAAkB,EAAE;gBACvB,MAAM,IAAI,KAAK,CAAC,6CAA6C,IAAI,EAAE,CAAC,CAAA;aACrE;YAED,MAAM,KAAK,GAAG,MAAM,kBAAkB,CAAC,iBAAiB,CACtD,kBAAkB,CAAC,IAAI,EACvB,kBAAkB,CAAC,wBAAwB,CAC5C,CAAA;YAED,IAAI,CAAC,IAAI,EAAE;gBACT,IAAI,GAAG,wCAAqB,EAAE,CAAA;aAC/B;YACD,IAAI,GAAG,gBAAS,CAAC,IAAI,CAAC,CAAA;YACtB,IAAI,GAAG,cAAO,CAAC,IAAI,CAAC,CAAA;YAEpB,4IAA4I;YAC5I,MAAM,qBAAqB,GAAG,iDAAwB,CACpD,IAAI,EACJ,KAAK,CAAC,KAAK,EACX,IAAI,EACJ,CAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,oBAAoB,KAAI,KAAK,CACvC,CAAA;YAED,IAAI,qBAAqB,CAAC,eAAe,CAAC,MAAM,KAAK,CAAC,EAAE;gBACtD,IAAI,CAAC,IAAI,CACP,sDAAsD,kBAAkB,CAAC,IAAI,EAAE,CAChF,CAAA;aACF;iBAAM;gBACL,4EAA4E;gBAC5E,MAAM,oCAA4B,CAChC,qBAAqB,CAAC,kBAAkB,CACzC,CAAA;gBACD,IAAI,CAAC,IAAI,CAAC,qDAAqD,CAAC,CAAA;gBAChE,MAAM,mCAA2B,CAC/B,qBAAqB,CAAC,kBAAkB,CACzC,CAAA;gBACD,MAAM,kBAAkB,CAAC,sBAAsB,CAC7C,qBAAqB,CAAC,eAAe,CACtC,CAAA;aACF;YAED,OAAO;gBACL,YAAY,EAAE,IAAI;gBAClB,YAAY,EAAE,qBAAqB,CAAC,oBAAoB;aACzD,CAAA;QACH,CAAC;KAAA;IAEK,oBAAoB,CACxB,IAAyB;;YAEzB,MAAM,kBAAkB,GAAG,IAAI,yCAAkB,EAAE,CAAA;YAEnD,MAAM,QAAQ,GAAuB,EAAE,CAAA;YACvC,MAAM,SAAS,GAAG,MAAM,kBAAkB,CAAC,aAAa,EAAE,CAAA;YAC1D,IAAI,SAAS,CAAC,KAAK,KAAK,CAAC,EAAE;gBACzB,IAAI,CAAC,IAAI,CAAC,0DAA0D,CAAC,CAAA;gBACrE,OAAO,QAAQ,CAAA;aAChB;YAED,IAAI,CAAC,IAAI,EAAE;gBACT,IAAI,GAAG,wCAAqB,EAAE,CAAA;aAC/B;YACD,IAAI,GAAG,gBAAS,CAAC,IAAI,CAAC,CAAA;YACtB,IAAI,GAAG,cAAO,CAAC,IAAI,CAAC,CAAA;YAEpB,IAAI,mBAAmB,GAAG,CAAC,CAAA;YAC3B,OAAO,mBAAmB,GAAG,SAAS,CAAC,KAAK,EAAE;gBAC5C,MAAM,yBAAyB,GAAG,SAAS,CAAC,KAAK,CAAC,mBAAmB,CAAC,CAAA;gBACtE,mBAAmB,IAAI,CAAC,CAAA;gBAExB,kDAAkD;gBAClD,MAAM,KAAK,GAAG,MAAM,kBAAkB,CAAC,iBAAiB,CACtD,yBAAyB,CAAC,IAAI,EAC9B,yBAAyB,CAAC,wBAAwB,CACnD,CAAA;gBAED,MAAM,qBAAqB,GAAG,iDAAwB,CACpD,yBAAyB,CAAC,IAAI,EAC9B,KAAK,CAAC,KAAK,EACX,IAAI,EACJ,IAAI,CACL,CAAA;gBACD,IAAI,qBAAqB,CAAC,eAAe,CAAC,MAAM,KAAK,CAAC,EAAE;oBACtD,IAAI,CAAC,IAAI,CACP,qDAAqD,yBAAyB,CAAC,IAAI,EAAE,CACtF,CAAA;iBACF;qBAAM;oBACL,MAAM,oCAA4B,CAChC,qBAAqB,CAAC,kBAAkB,CACzC,CAAA;oBACD,MAAM,mCAA2B,CAC/B,qBAAqB,CAAC,kBAAkB,CACzC,CAAA;oBACD,MAAM,kBAAkB,CAAC,sBAAsB,CAC7C,qBAAqB,CAAC,eAAe,CACtC,CAAA;iBACF;gBAED,QAAQ,CAAC,IAAI,CAAC;oBACZ,YAAY,EAAE,yBAAyB,CAAC,IAAI;oBAC5C,YAAY,EAAE,qBAAqB,CAAC,oBAAoB;iBACzD,CAAC,CAAA;aACH;YACD,OAAO,QAAQ,CAAA;QACjB,CAAC;KAAA;CACF;AApMD,sDAoMC"}
|
||||
{"version":3,"file":"artifact-client.js","sourceRoot":"","sources":["../../src/internal/artifact-client.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,oDAAqC;AACrC,iEAG+B;AAC/B,6DAAqD;AAKrD,mCAGgB;AAChB,2FAAqE;AACrE,iEAAyD;AACzD,qEAAiE;AACjE,yDAAwD;AACxD,+BAAuC;AAuCvC,MAAa,qBAAqB;IAChC;;OAEG;IACH,MAAM,CAAC,MAAM;QACX,OAAO,IAAI,qBAAqB,EAAE,CAAA;IACpC,CAAC;IAED;;OAEG;IACG,cAAc,CAClB,IAAY,EACZ,KAAe,EACf,aAAqB,EACrB,OAAmC;;YAEnC,IAAI,CAAC,IAAI,CACP;8MACwM,CACzM,CAAA;YACD,qDAAiB,CAAC,IAAI,CAAC,CAAA;YAEvB,iDAAiD;YACjD,MAAM,mBAAmB,GAA0B,6CAAsB,CACvE,IAAI,EACJ,aAAa,EACb,KAAK,CACN,CAAA;YACD,MAAM,cAAc,GAAmB;gBACrC,YAAY,EAAE,IAAI;gBAClB,aAAa,EAAE,EAAE;gBACjB,IAAI,EAAE,CAAC;gBACP,WAAW,EAAE,EAAE;aAChB,CAAA;YAED,MAAM,gBAAgB,GAAG,IAAI,qCAAgB,EAAE,CAAA;YAE/C,IAAI,mBAAmB,CAAC,MAAM,KAAK,CAAC,EAAE;gBACpC,IAAI,CAAC,OAAO,CAAC,qCAAqC,CAAC,CAAA;aACpD;iBAAM;gBACL,yDAAyD;gBACzD,MAAM,QAAQ,GAAG,MAAM,gBAAgB,CAAC,6BAA6B,CACnE,IAAI,EACJ,OAAO,CACR,CAAA;gBACD,IAAI,CAAC,QAAQ,CAAC,wBAAwB,EAAE;oBACtC,IAAI,CAAC,KAAK,CAAC,QAAQ,CAAC,QAAQ,EAAE,CAAC,CAAA;oBAC/B,MAAM,IAAI,KAAK,CACb,kEAAkE,CACnE,CAAA;iBACF;gBAED,IAAI,CAAC,KAAK,CAAC,wBAAwB,QAAQ,CAAC,wBAAwB,EAAE,CAAC,CAAA;gBACvE,IAAI,CAAC,IAAI,CACP,2BAA2B,IAAI,oDAAoD,CACpF,CAAA;gBAED,wDAAwD;gBACxD,MAAM,YAAY,GAAG,MAAM,gBAAgB,CAAC,6BAA6B,CACvE,QAAQ,CAAC,wBAAwB,EACjC,mBAAmB,EACnB,OAAO,CACR,CAAA;gBAED,oEAAoE;gBACpE,+FAA+F;gBAC/F,IAAI,CAAC,IAAI,CACP,kEAAkE,CACnE,CAAA;gBACD,MAAM,gBAAgB,CAAC,iBAAiB,CAAC,YAAY,CAAC,SAAS,EAAE,IAAI,CAAC,CAAA;gBAEtE,IAAI,YAAY,CAAC,WAAW,CAAC,MAAM,GAAG,CAAC,EAAE;oBACvC,IAAI,CAAC,IAAI,CACP,+BAA+B,YAAY,CAAC,WAAW,CAAC,MAAM,8BAA8B,CAC7F,CAAA;iBACF;qBAAM;oBACL,IAAI,CAAC,IAAI,CACP,yEAAyE,CAC1E,CAAA;iBACF;gBAED,IAAI,CAAC,IAAI,CACP;kEAC0D,YAAY,CAAC,SAAS;kDACtC,YAAY,CAAC,UAAU;;uLAE8G,CAChL,CAAA;gBAED,cAAc,CAAC,aAAa,GAAG,mBAAmB,CAAC,GAAG,CACpD,IAAI,CAAC,EAAE,CAAC,IAAI,CAAC,gBAAgB,CAC9B,CAAA;gBACD,cAAc,CAAC,IAAI,GAAG,YAAY,CAAC,UAAU,CAAA;gBAC7C,cAAc,CAAC,WAAW,GAAG,YAAY,CAAC,WAAW,CAAA;aACtD;YACD,OAAO,cAAc,CAAA;QACvB,CAAC;KAAA;IAEK,gBAAgB,CACpB,IAAY,EACZ,IAAyB,EACzB,OAAqC;;YAErC,MAAM,kBAAkB,GAAG,IAAI,yCAAkB,EAAE,CAAA;YAEnD,MAAM,SAAS,GAAG,MAAM,kBAAkB,CAAC,aAAa,EAAE,CAAA;YAC1D,IAAI,SAAS,CAAC,KAAK,KAAK,CAAC,EAAE;gBACzB,MAAM,IAAI,KAAK,CACb,0DAA0D,CAC3D,CAAA;aACF;YAED,MAAM,kBAAkB,GAAG,SAAS,CAAC,KAAK,CAAC,IAAI,CAAC,QAAQ,CAAC,EAAE;gBACzD,OAAO,QAAQ,CAAC,IAAI,KAAK,IAAI,CAAA;YAC/B,CAAC,CAAC,CAAA;YACF,IAAI,CAAC,kBAAkB,EAAE;gBACvB,MAAM,IAAI,KAAK,CAAC,6CAA6C,IAAI,EAAE,CAAC,CAAA;aACrE;YAED,MAAM,KAAK,GAAG,MAAM,kBAAkB,CAAC,iBAAiB,CACtD,kBAAkB,CAAC,IAAI,EACvB,kBAAkB,CAAC,wBAAwB,CAC5C,CAAA;YAED,IAAI,CAAC,IAAI,EAAE;gBACT,IAAI,GAAG,wCAAqB,EAAE,CAAA;aAC/B;YACD,IAAI,GAAG,gBAAS,CAAC,IAAI,CAAC,CAAA;YACtB,IAAI,GAAG,cAAO,CAAC,IAAI,CAAC,CAAA;YAEpB,4IAA4I;YAC5I,MAAM,qBAAqB,GAAG,iDAAwB,CACpD,IAAI,EACJ,KAAK,CAAC,KAAK,EACX,IAAI,EACJ,CAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,oBAAoB,KAAI,KAAK,CACvC,CAAA;YAED,IAAI,qBAAqB,CAAC,eAAe,CAAC,MAAM,KAAK,CAAC,EAAE;gBACtD,IAAI,CAAC,IAAI,CACP,sDAAsD,kBAAkB,CAAC,IAAI,EAAE,CAChF,CAAA;aACF;iBAAM;gBACL,4EAA4E;gBAC5E,MAAM,oCAA4B,CAChC,qBAAqB,CAAC,kBAAkB,CACzC,CAAA;gBACD,IAAI,CAAC,IAAI,CAAC,qDAAqD,CAAC,CAAA;gBAChE,MAAM,mCAA2B,CAC/B,qBAAqB,CAAC,kBAAkB,CACzC,CAAA;gBACD,MAAM,kBAAkB,CAAC,sBAAsB,CAC7C,qBAAqB,CAAC,eAAe,CACtC,CAAA;aACF;YAED,OAAO;gBACL,YAAY,EAAE,IAAI;gBAClB,YAAY,EAAE,qBAAqB,CAAC,oBAAoB;aACzD,CAAA;QACH,CAAC;KAAA;IAEK,oBAAoB,CACxB,IAAyB;;YAEzB,MAAM,kBAAkB,GAAG,IAAI,yCAAkB,EAAE,CAAA;YAEnD,MAAM,QAAQ,GAAuB,EAAE,CAAA;YACvC,MAAM,SAAS,GAAG,MAAM,kBAAkB,CAAC,aAAa,EAAE,CAAA;YAC1D,IAAI,SAAS,CAAC,KAAK,KAAK,CAAC,EAAE;gBACzB,IAAI,CAAC,IAAI,CAAC,0DAA0D,CAAC,CAAA;gBACrE,OAAO,QAAQ,CAAA;aAChB;YAED,IAAI,CAAC,IAAI,EAAE;gBACT,IAAI,GAAG,wCAAqB,EAAE,CAAA;aAC/B;YACD,IAAI,GAAG,gBAAS,CAAC,IAAI,CAAC,CAAA;YACtB,IAAI,GAAG,cAAO,CAAC,IAAI,CAAC,CAAA;YAEpB,IAAI,mBAAmB,GAAG,CAAC,CAAA;YAC3B,OAAO,mBAAmB,GAAG,SAAS,CAAC,KAAK,EAAE;gBAC5C,MAAM,yBAAyB,GAAG,SAAS,CAAC,KAAK,CAAC,mBAAmB,CAAC,CAAA;gBACtE,mBAAmB,IAAI,CAAC,CAAA;gBACxB,IAAI,CAAC,IAAI,CACP,iCAAiC,yBAAyB,CAAC,IAAI,MAAM,mBAAmB,IAAI,SAAS,CAAC,KAAK,EAAE,CAC9G,CAAA;gBAED,kDAAkD;gBAClD,MAAM,KAAK,GAAG,MAAM,kBAAkB,CAAC,iBAAiB,CACtD,yBAAyB,CAAC,IAAI,EAC9B,yBAAyB,CAAC,wBAAwB,CACnD,CAAA;gBAED,MAAM,qBAAqB,GAAG,iDAAwB,CACpD,yBAAyB,CAAC,IAAI,EAC9B,KAAK,CAAC,KAAK,EACX,IAAI,EACJ,IAAI,CACL,CAAA;gBACD,IAAI,qBAAqB,CAAC,eAAe,CAAC,MAAM,KAAK,CAAC,EAAE;oBACtD,IAAI,CAAC,IAAI,CACP,qDAAqD,yBAAyB,CAAC,IAAI,EAAE,CACtF,CAAA;iBACF;qBAAM;oBACL,MAAM,oCAA4B,CAChC,qBAAqB,CAAC,kBAAkB,CACzC,CAAA;oBACD,MAAM,mCAA2B,CAC/B,qBAAqB,CAAC,kBAAkB,CACzC,CAAA;oBACD,MAAM,kBAAkB,CAAC,sBAAsB,CAC7C,qBAAqB,CAAC,eAAe,CACtC,CAAA;iBACF;gBAED,QAAQ,CAAC,IAAI,CAAC;oBACZ,YAAY,EAAE,yBAAyB,CAAC,IAAI;oBAC5C,YAAY,EAAE,qBAAqB,CAAC,oBAAoB;iBACzD,CAAC,CAAA;aACH;YACD,OAAO,QAAQ,CAAA;QACjB,CAAC;KAAA;CACF;AAhOD,sDAgOC"}
|
||||
1
node_modules/@actions/artifact/lib/internal/config-variables.js
generated
vendored
1
node_modules/@actions/artifact/lib/internal/config-variables.js
generated
vendored
|
|
@ -1,5 +1,6 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.getRetentionDays = exports.getWorkSpaceDirectory = exports.getWorkFlowRunId = exports.getRuntimeUrl = exports.getRuntimeToken = exports.getDownloadFileConcurrency = exports.getInitialRetryIntervalInMilliseconds = exports.getRetryMultiplier = exports.getRetryLimit = exports.getUploadChunkSize = exports.getUploadFileConcurrency = void 0;
|
||||
// The number of concurrent uploads that happens at the same time
|
||||
function getUploadFileConcurrency() {
|
||||
return 2;
|
||||
|
|
|
|||
2
node_modules/@actions/artifact/lib/internal/config-variables.js.map
generated
vendored
2
node_modules/@actions/artifact/lib/internal/config-variables.js.map
generated
vendored
|
|
@ -1 +1 @@
|
|||
{"version":3,"file":"config-variables.js","sourceRoot":"","sources":["../../src/internal/config-variables.ts"],"names":[],"mappings":";;AAAA,iEAAiE;AACjE,SAAgB,wBAAwB;IACtC,OAAO,CAAC,CAAA;AACV,CAAC;AAFD,4DAEC;AAED,2FAA2F;AAC3F,4CAA4C;AAC5C,SAAgB,kBAAkB;IAChC,OAAO,CAAC,GAAG,IAAI,GAAG,IAAI,CAAA,CAAC,cAAc;AACvC,CAAC;AAFD,gDAEC;AAED,yFAAyF;AACzF,SAAgB,aAAa;IAC3B,OAAO,CAAC,CAAA;AACV,CAAC;AAFD,sCAEC;AAED,wGAAwG;AACxG,0GAA0G;AAC1G,SAAgB,kBAAkB;IAChC,OAAO,GAAG,CAAA;AACZ,CAAC;AAFD,gDAEC;AAED,yGAAyG;AACzG,SAAgB,qCAAqC;IACnD,OAAO,IAAI,CAAA;AACb,CAAC;AAFD,sFAEC;AAED,mEAAmE;AACnE,SAAgB,0BAA0B;IACxC,OAAO,CAAC,CAAA;AACV,CAAC;AAFD,gEAEC;AAED,SAAgB,eAAe;IAC7B,MAAM,KAAK,GAAG,OAAO,CAAC,GAAG,CAAC,uBAAuB,CAAC,CAAA;IAClD,IAAI,CAAC,KAAK,EAAE;QACV,MAAM,IAAI,KAAK,CAAC,kDAAkD,CAAC,CAAA;KACpE;IACD,OAAO,KAAK,CAAA;AACd,CAAC;AAND,0CAMC;AAED,SAAgB,aAAa;IAC3B,MAAM,UAAU,GAAG,OAAO,CAAC,GAAG,CAAC,qBAAqB,CAAC,CAAA;IACrD,IAAI,CAAC,UAAU,EAAE;QACf,MAAM,IAAI,KAAK,CAAC,gDAAgD,CAAC,CAAA;KAClE;IACD,OAAO,UAAU,CAAA;AACnB,CAAC;AAND,sCAMC;AAED,SAAgB,gBAAgB;IAC9B,MAAM,aAAa,GAAG,OAAO,CAAC,GAAG,CAAC,eAAe,CAAC,CAAA;IAClD,IAAI,CAAC,aAAa,EAAE;QAClB,MAAM,IAAI,KAAK,CAAC,0CAA0C,CAAC,CAAA;KAC5D;IACD,OAAO,aAAa,CAAA;AACtB,CAAC;AAND,4CAMC;AAED,SAAgB,qBAAqB;IACnC,MAAM,kBAAkB,GAAG,OAAO,CAAC,GAAG,CAAC,kBAAkB,CAAC,CAAA;IAC1D,IAAI,CAAC,kBAAkB,EAAE;QACvB,MAAM,IAAI,KAAK,CAAC,6CAA6C,CAAC,CAAA;KAC/D;IACD,OAAO,kBAAkB,CAAA;AAC3B,CAAC;AAND,sDAMC;AAED,SAAgB,gBAAgB;IAC9B,OAAO,OAAO,CAAC,GAAG,CAAC,uBAAuB,CAAC,CAAA;AAC7C,CAAC;AAFD,4CAEC"}
|
||||
{"version":3,"file":"config-variables.js","sourceRoot":"","sources":["../../src/internal/config-variables.ts"],"names":[],"mappings":";;;AAAA,iEAAiE;AACjE,SAAgB,wBAAwB;IACtC,OAAO,CAAC,CAAA;AACV,CAAC;AAFD,4DAEC;AAED,2FAA2F;AAC3F,4CAA4C;AAC5C,SAAgB,kBAAkB;IAChC,OAAO,CAAC,GAAG,IAAI,GAAG,IAAI,CAAA,CAAC,cAAc;AACvC,CAAC;AAFD,gDAEC;AAED,yFAAyF;AACzF,SAAgB,aAAa;IAC3B,OAAO,CAAC,CAAA;AACV,CAAC;AAFD,sCAEC;AAED,wGAAwG;AACxG,0GAA0G;AAC1G,SAAgB,kBAAkB;IAChC,OAAO,GAAG,CAAA;AACZ,CAAC;AAFD,gDAEC;AAED,yGAAyG;AACzG,SAAgB,qCAAqC;IACnD,OAAO,IAAI,CAAA;AACb,CAAC;AAFD,sFAEC;AAED,mEAAmE;AACnE,SAAgB,0BAA0B;IACxC,OAAO,CAAC,CAAA;AACV,CAAC;AAFD,gEAEC;AAED,SAAgB,eAAe;IAC7B,MAAM,KAAK,GAAG,OAAO,CAAC,GAAG,CAAC,uBAAuB,CAAC,CAAA;IAClD,IAAI,CAAC,KAAK,EAAE;QACV,MAAM,IAAI,KAAK,CAAC,kDAAkD,CAAC,CAAA;KACpE;IACD,OAAO,KAAK,CAAA;AACd,CAAC;AAND,0CAMC;AAED,SAAgB,aAAa;IAC3B,MAAM,UAAU,GAAG,OAAO,CAAC,GAAG,CAAC,qBAAqB,CAAC,CAAA;IACrD,IAAI,CAAC,UAAU,EAAE;QACf,MAAM,IAAI,KAAK,CAAC,gDAAgD,CAAC,CAAA;KAClE;IACD,OAAO,UAAU,CAAA;AACnB,CAAC;AAND,sCAMC;AAED,SAAgB,gBAAgB;IAC9B,MAAM,aAAa,GAAG,OAAO,CAAC,GAAG,CAAC,eAAe,CAAC,CAAA;IAClD,IAAI,CAAC,aAAa,EAAE;QAClB,MAAM,IAAI,KAAK,CAAC,0CAA0C,CAAC,CAAA;KAC5D;IACD,OAAO,aAAa,CAAA;AACtB,CAAC;AAND,4CAMC;AAED,SAAgB,qBAAqB;IACnC,MAAM,kBAAkB,GAAG,OAAO,CAAC,GAAG,CAAC,kBAAkB,CAAC,CAAA;IAC1D,IAAI,CAAC,kBAAkB,EAAE;QACvB,MAAM,IAAI,KAAK,CAAC,6CAA6C,CAAC,CAAA;KAC/D;IACD,OAAO,kBAAkB,CAAA;AAC3B,CAAC;AAND,sDAMC;AAED,SAAgB,gBAAgB;IAC9B,OAAO,OAAO,CAAC,GAAG,CAAC,uBAAuB,CAAC,CAAA;AAC7C,CAAC;AAFD,4CAEC"}
|
||||
10
node_modules/@actions/artifact/lib/internal/contracts.d.ts
generated
vendored
10
node_modules/@actions/artifact/lib/internal/contracts.d.ts
generated
vendored
|
|
@ -25,8 +25,18 @@ export interface PatchArtifactSizeSuccessResponse {
|
|||
uploadUrl: string;
|
||||
}
|
||||
export interface UploadResults {
|
||||
/**
|
||||
* The size in bytes of data that was transferred during the upload process to the actions backend service. This takes into account possible
|
||||
* gzip compression to reduce the amount of data that needs to be transferred
|
||||
*/
|
||||
uploadSize: number;
|
||||
/**
|
||||
* The raw size of the files that were specified for upload
|
||||
*/
|
||||
totalSize: number;
|
||||
/**
|
||||
* An array of files that failed to upload
|
||||
*/
|
||||
failedItems: string[];
|
||||
}
|
||||
export interface ListArtifactsResponse {
|
||||
|
|
|
|||
30
node_modules/@actions/artifact/lib/internal/download-http-client.js
generated
vendored
30
node_modules/@actions/artifact/lib/internal/download-http-client.js
generated
vendored
|
|
@ -1,4 +1,23 @@
|
|||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
|
|
@ -8,14 +27,8 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
|||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.DownloadHttpClient = void 0;
|
||||
const fs = __importStar(require("fs"));
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const zlib = __importStar(require("zlib"));
|
||||
|
|
@ -167,9 +180,6 @@ class DownloadHttpClient {
|
|||
let response;
|
||||
try {
|
||||
response = yield makeDownloadRequest();
|
||||
if (core.isDebug()) {
|
||||
utils_1.displayHttpDiagnostics(response);
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
// if an error is caught, it is usually indicative of a timeout so retry the download
|
||||
|
|
|
|||
2
node_modules/@actions/artifact/lib/internal/download-http-client.js.map
generated
vendored
2
node_modules/@actions/artifact/lib/internal/download-http-client.js.map
generated
vendored
File diff suppressed because one or more lines are too long
17
node_modules/@actions/artifact/lib/internal/download-specification.js
generated
vendored
17
node_modules/@actions/artifact/lib/internal/download-specification.js
generated
vendored
|
|
@ -1,12 +1,25 @@
|
|||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.getDownloadSpecification = void 0;
|
||||
const path = __importStar(require("path"));
|
||||
/**
|
||||
* Creates a specification for a set of files that will be downloaded
|
||||
|
|
|
|||
2
node_modules/@actions/artifact/lib/internal/download-specification.js.map
generated
vendored
2
node_modules/@actions/artifact/lib/internal/download-specification.js.map
generated
vendored
|
|
@ -1 +1 @@
|
|||
{"version":3,"file":"download-specification.js","sourceRoot":"","sources":["../../src/internal/download-specification.ts"],"names":[],"mappings":";;;;;;;;;AAAA,2CAA4B;AAyB5B;;;;;;GAMG;AACH,SAAgB,wBAAwB,CACtC,YAAoB,EACpB,eAAiC,EACjC,YAAoB,EACpB,oBAA6B;IAE7B,oEAAoE;IACpE,MAAM,WAAW,GAAG,IAAI,GAAG,EAAU,CAAA;IAErC,MAAM,cAAc,GAA0B;QAC5C,oBAAoB,EAAE,oBAAoB;YACxC,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,YAAY,EAAE,YAAY,CAAC;YACvC,CAAC,CAAC,YAAY;QAChB,kBAAkB,EAAE,EAAE;QACtB,kBAAkB,EAAE,EAAE;QACtB,eAAe,EAAE,EAAE;KACpB,CAAA;IAED,KAAK,MAAM,KAAK,IAAI,eAAe,EAAE;QACnC,wEAAwE;QACxE,IACE,KAAK,CAAC,IAAI,CAAC,UAAU,CAAC,GAAG,YAAY,GAAG,CAAC;YACzC,KAAK,CAAC,IAAI,CAAC,UAAU,CAAC,GAAG,YAAY,IAAI,CAAC,EAC1C;YACA,2CAA2C;YAC3C,MAAM,mBAAmB,GAAG,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,IAAI,CAAC,CAAA;YACtD,oIAAoI;YACpI,MAAM,QAAQ,GAAG,IAAI,CAAC,IAAI,CACxB,YAAY,EACZ,oBAAoB;gBAClB,CAAC,CAAC,mBAAmB;gBACrB,CAAC,CAAC,mBAAmB,CAAC,OAAO,CAAC,YAAY,EAAE,EAAE,CAAC,CAClD,CAAA;YAED,2GAA2G;YAC3G,6FAA6F;YAC7F,IAAI,KAAK,CAAC,QAAQ,KAAK,MAAM,EAAE;gBAC7B,wFAAwF;gBACxF,WAAW,CAAC,GAAG,CAAC,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC,CAAA;gBACvC,IAAI,KAAK,CAAC,UAAU,KAAK,CAAC,EAAE;oBAC1B,kGAAkG;oBAClG,cAAc,CAAC,kBAAkB,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAA;iBACjD;qBAAM;oBACL,cAAc,CAAC,eAAe,CAAC,IAAI,CAAC;wBAClC,cAAc,EAAE,KAAK,CAAC,eAAe;wBACrC,UAAU,EAAE,QAAQ;qBACrB,CAAC,CAAA;iBACH;aACF;SACF;KACF;IAED,cAAc,CAAC,kBAAkB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,CAAC,CAAA;IAC3D,OAAO,cAAc,CAAA;AACvB,CAAC;AAtDD,4DAsDC"}
|
||||
{"version":3,"file":"download-specification.js","sourceRoot":"","sources":["../../src/internal/download-specification.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,2CAA4B;AAyB5B;;;;;;GAMG;AACH,SAAgB,wBAAwB,CACtC,YAAoB,EACpB,eAAiC,EACjC,YAAoB,EACpB,oBAA6B;IAE7B,oEAAoE;IACpE,MAAM,WAAW,GAAG,IAAI,GAAG,EAAU,CAAA;IAErC,MAAM,cAAc,GAA0B;QAC5C,oBAAoB,EAAE,oBAAoB;YACxC,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,YAAY,EAAE,YAAY,CAAC;YACvC,CAAC,CAAC,YAAY;QAChB,kBAAkB,EAAE,EAAE;QACtB,kBAAkB,EAAE,EAAE;QACtB,eAAe,EAAE,EAAE;KACpB,CAAA;IAED,KAAK,MAAM,KAAK,IAAI,eAAe,EAAE;QACnC,wEAAwE;QACxE,IACE,KAAK,CAAC,IAAI,CAAC,UAAU,CAAC,GAAG,YAAY,GAAG,CAAC;YACzC,KAAK,CAAC,IAAI,CAAC,UAAU,CAAC,GAAG,YAAY,IAAI,CAAC,EAC1C;YACA,2CAA2C;YAC3C,MAAM,mBAAmB,GAAG,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,IAAI,CAAC,CAAA;YACtD,oIAAoI;YACpI,MAAM,QAAQ,GAAG,IAAI,CAAC,IAAI,CACxB,YAAY,EACZ,oBAAoB;gBAClB,CAAC,CAAC,mBAAmB;gBACrB,CAAC,CAAC,mBAAmB,CAAC,OAAO,CAAC,YAAY,EAAE,EAAE,CAAC,CAClD,CAAA;YAED,2GAA2G;YAC3G,6FAA6F;YAC7F,IAAI,KAAK,CAAC,QAAQ,KAAK,MAAM,EAAE;gBAC7B,wFAAwF;gBACxF,WAAW,CAAC,GAAG,CAAC,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC,CAAA;gBACvC,IAAI,KAAK,CAAC,UAAU,KAAK,CAAC,EAAE;oBAC1B,kGAAkG;oBAClG,cAAc,CAAC,kBAAkB,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAA;iBACjD;qBAAM;oBACL,cAAc,CAAC,eAAe,CAAC,IAAI,CAAC;wBAClC,cAAc,EAAE,KAAK,CAAC,eAAe;wBACrC,UAAU,EAAE,QAAQ;qBACrB,CAAC,CAAA;iBACH;aACF;SACF;KACF;IAED,cAAc,CAAC,kBAAkB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,CAAC,CAAA;IAC3D,OAAO,cAAc,CAAA;AACvB,CAAC;AAtDD,4DAsDC"}
|
||||
1
node_modules/@actions/artifact/lib/internal/http-manager.js
generated
vendored
1
node_modules/@actions/artifact/lib/internal/http-manager.js
generated
vendored
|
|
@ -1,5 +1,6 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.HttpManager = void 0;
|
||||
const utils_1 = require("./utils");
|
||||
/**
|
||||
* Used for managing http clients during either upload or download
|
||||
|
|
|
|||
2
node_modules/@actions/artifact/lib/internal/http-manager.js.map
generated
vendored
2
node_modules/@actions/artifact/lib/internal/http-manager.js.map
generated
vendored
|
|
@ -1 +1 @@
|
|||
{"version":3,"file":"http-manager.js","sourceRoot":"","sources":["../../src/internal/http-manager.ts"],"names":[],"mappings":";;AACA,mCAAwC;AAExC;;GAEG;AACH,MAAa,WAAW;IAItB,YAAY,WAAmB,EAAE,SAAiB;QAChD,IAAI,WAAW,GAAG,CAAC,EAAE;YACnB,MAAM,IAAI,KAAK,CAAC,mCAAmC,CAAC,CAAA;SACrD;QACD,IAAI,CAAC,SAAS,GAAG,SAAS,CAAA;QAC1B,IAAI,CAAC,OAAO,GAAG,IAAI,KAAK,CAAC,WAAW,CAAC,CAAC,IAAI,CAAC,wBAAgB,CAAC,SAAS,CAAC,CAAC,CAAA;IACzE,CAAC;IAED,SAAS,CAAC,KAAa;QACrB,OAAO,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,CAAA;IAC5B,CAAC;IAED,mGAAmG;IACnG,+HAA+H;IAC/H,uBAAuB,CAAC,KAAa;QACnC,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC,OAAO,EAAE,CAAA;QAC7B,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,GAAG,wBAAgB,CAAC,IAAI,CAAC,SAAS,CAAC,CAAA;IACxD,CAAC;IAED,2BAA2B;QACzB,KAAK,MAAM,CAAC,KAAK,CAAC,IAAI,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE,EAAE;YAC5C,IAAI,CAAC,uBAAuB,CAAC,KAAK,CAAC,CAAA;SACpC;IACH,CAAC;CACF;AA5BD,kCA4BC"}
|
||||
{"version":3,"file":"http-manager.js","sourceRoot":"","sources":["../../src/internal/http-manager.ts"],"names":[],"mappings":";;;AACA,mCAAwC;AAExC;;GAEG;AACH,MAAa,WAAW;IAItB,YAAY,WAAmB,EAAE,SAAiB;QAChD,IAAI,WAAW,GAAG,CAAC,EAAE;YACnB,MAAM,IAAI,KAAK,CAAC,mCAAmC,CAAC,CAAA;SACrD;QACD,IAAI,CAAC,SAAS,GAAG,SAAS,CAAA;QAC1B,IAAI,CAAC,OAAO,GAAG,IAAI,KAAK,CAAC,WAAW,CAAC,CAAC,IAAI,CAAC,wBAAgB,CAAC,SAAS,CAAC,CAAC,CAAA;IACzE,CAAC;IAED,SAAS,CAAC,KAAa;QACrB,OAAO,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,CAAA;IAC5B,CAAC;IAED,mGAAmG;IACnG,+HAA+H;IAC/H,uBAAuB,CAAC,KAAa;QACnC,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC,OAAO,EAAE,CAAA;QAC7B,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,GAAG,wBAAgB,CAAC,IAAI,CAAC,SAAS,CAAC,CAAA;IACxD,CAAC;IAED,2BAA2B;QACzB,KAAK,MAAM,CAAC,KAAK,CAAC,IAAI,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE,EAAE;YAC5C,IAAI,CAAC,uBAAuB,CAAC,KAAK,CAAC,CAAA;SACpC;IACH,CAAC;CACF;AA5BD,kCA4BC"}
|
||||
8
node_modules/@actions/artifact/lib/internal/path-and-artifact-name-validation.d.ts
generated
vendored
Normal file
8
node_modules/@actions/artifact/lib/internal/path-and-artifact-name-validation.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,8 @@
|
|||
/**
|
||||
* Scans the name of the artifact to make sure there are no illegal characters
|
||||
*/
|
||||
export declare function checkArtifactName(name: string): void;
|
||||
/**
|
||||
* Scans the name of the filePath used to make sure there are no illegal characters
|
||||
*/
|
||||
export declare function checkArtifactFilePath(path: string): void;
|
||||
67
node_modules/@actions/artifact/lib/internal/path-and-artifact-name-validation.js
generated
vendored
Normal file
67
node_modules/@actions/artifact/lib/internal/path-and-artifact-name-validation.js
generated
vendored
Normal file
|
|
@ -0,0 +1,67 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.checkArtifactFilePath = exports.checkArtifactName = void 0;
|
||||
const core_1 = require("@actions/core");
|
||||
/**
|
||||
* Invalid characters that cannot be in the artifact name or an uploaded file. Will be rejected
|
||||
* from the server if attempted to be sent over. These characters are not allowed due to limitations with certain
|
||||
* file systems such as NTFS. To maintain platform-agnostic behavior, all characters that are not supported by an
|
||||
* individual filesystem/platform will not be supported on all fileSystems/platforms
|
||||
*
|
||||
* FilePaths can include characters such as \ and / which are not permitted in the artifact name alone
|
||||
*/
|
||||
const invalidArtifactFilePathCharacters = new Map([
|
||||
['"', ' Double quote "'],
|
||||
[':', ' Colon :'],
|
||||
['<', ' Less than <'],
|
||||
['>', ' Greater than >'],
|
||||
['|', ' Vertical bar |'],
|
||||
['*', ' Asterisk *'],
|
||||
['?', ' Question mark ?'],
|
||||
['\r', ' Carriage return \\r'],
|
||||
['\n', ' Line feed \\n']
|
||||
]);
|
||||
const invalidArtifactNameCharacters = new Map([
|
||||
...invalidArtifactFilePathCharacters,
|
||||
['\\', ' Backslash \\'],
|
||||
['/', ' Forward slash /']
|
||||
]);
|
||||
/**
|
||||
* Scans the name of the artifact to make sure there are no illegal characters
|
||||
*/
|
||||
function checkArtifactName(name) {
|
||||
if (!name) {
|
||||
throw new Error(`Artifact name: ${name}, is incorrectly provided`);
|
||||
}
|
||||
for (const [invalidCharacterKey, errorMessageForCharacter] of invalidArtifactNameCharacters) {
|
||||
if (name.includes(invalidCharacterKey)) {
|
||||
throw new Error(`Artifact name is not valid: ${name}. Contains the following character: ${errorMessageForCharacter}
|
||||
|
||||
Invalid characters include: ${Array.from(invalidArtifactNameCharacters.values()).toString()}
|
||||
|
||||
These characters are not allowed in the artifact name due to limitations with certain file systems such as NTFS. To maintain file system agnostic behavior, these characters are intentionally not allowed to prevent potential problems with downloads on different file systems.`);
|
||||
}
|
||||
}
|
||||
core_1.info(`Artifact name is valid!`);
|
||||
}
|
||||
exports.checkArtifactName = checkArtifactName;
|
||||
/**
|
||||
* Scans the name of the filePath used to make sure there are no illegal characters
|
||||
*/
|
||||
function checkArtifactFilePath(path) {
|
||||
if (!path) {
|
||||
throw new Error(`Artifact path: ${path}, is incorrectly provided`);
|
||||
}
|
||||
for (const [invalidCharacterKey, errorMessageForCharacter] of invalidArtifactFilePathCharacters) {
|
||||
if (path.includes(invalidCharacterKey)) {
|
||||
throw new Error(`Artifact path is not valid: ${path}. Contains the following character: ${errorMessageForCharacter}
|
||||
|
||||
Invalid characters include: ${Array.from(invalidArtifactFilePathCharacters.values()).toString()}
|
||||
|
||||
The following characters are not allowed in files that are uploaded due to limitations with certain file systems such as NTFS. To maintain file system agnostic behavior, these characters are intentionally not allowed to prevent potential problems with downloads on different file systems.
|
||||
`);
|
||||
}
|
||||
}
|
||||
}
|
||||
exports.checkArtifactFilePath = checkArtifactFilePath;
|
||||
//# sourceMappingURL=path-and-artifact-name-validation.js.map
|
||||
1
node_modules/@actions/artifact/lib/internal/path-and-artifact-name-validation.js.map
generated
vendored
Normal file
1
node_modules/@actions/artifact/lib/internal/path-and-artifact-name-validation.js.map
generated
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"path-and-artifact-name-validation.js","sourceRoot":"","sources":["../../src/internal/path-and-artifact-name-validation.ts"],"names":[],"mappings":";;;AAAA,wCAAkC;AAElC;;;;;;;GAOG;AACH,MAAM,iCAAiC,GAAG,IAAI,GAAG,CAAiB;IAChE,CAAC,GAAG,EAAE,iBAAiB,CAAC;IACxB,CAAC,GAAG,EAAE,UAAU,CAAC;IACjB,CAAC,GAAG,EAAE,cAAc,CAAC;IACrB,CAAC,GAAG,EAAE,iBAAiB,CAAC;IACxB,CAAC,GAAG,EAAE,iBAAiB,CAAC;IACxB,CAAC,GAAG,EAAE,aAAa,CAAC;IACpB,CAAC,GAAG,EAAE,kBAAkB,CAAC;IACzB,CAAC,IAAI,EAAE,sBAAsB,CAAC;IAC9B,CAAC,IAAI,EAAE,gBAAgB,CAAC;CACzB,CAAC,CAAA;AAEF,MAAM,6BAA6B,GAAG,IAAI,GAAG,CAAiB;IAC5D,GAAG,iCAAiC;IACpC,CAAC,IAAI,EAAE,eAAe,CAAC;IACvB,CAAC,GAAG,EAAE,kBAAkB,CAAC;CAC1B,CAAC,CAAA;AAEF;;GAEG;AACH,SAAgB,iBAAiB,CAAC,IAAY;IAC5C,IAAI,CAAC,IAAI,EAAE;QACT,MAAM,IAAI,KAAK,CAAC,kBAAkB,IAAI,2BAA2B,CAAC,CAAA;KACnE;IAED,KAAK,MAAM,CACT,mBAAmB,EACnB,wBAAwB,CACzB,IAAI,6BAA6B,EAAE;QAClC,IAAI,IAAI,CAAC,QAAQ,CAAC,mBAAmB,CAAC,EAAE;YACtC,MAAM,IAAI,KAAK,CACb,+BAA+B,IAAI,uCAAuC,wBAAwB;;8BAE5E,KAAK,CAAC,IAAI,CAC9B,6BAA6B,CAAC,MAAM,EAAE,CACvC,CAAC,QAAQ,EAAE;;mRAE+P,CAC5Q,CAAA;SACF;KACF;IAED,WAAI,CAAC,yBAAyB,CAAC,CAAA;AACjC,CAAC;AAvBD,8CAuBC;AAED;;GAEG;AACH,SAAgB,qBAAqB,CAAC,IAAY;IAChD,IAAI,CAAC,IAAI,EAAE;QACT,MAAM,IAAI,KAAK,CAAC,kBAAkB,IAAI,2BAA2B,CAAC,CAAA;KACnE;IAED,KAAK,MAAM,CACT,mBAAmB,EACnB,wBAAwB,CACzB,IAAI,iCAAiC,EAAE;QACtC,IAAI,IAAI,CAAC,QAAQ,CAAC,mBAAmB,CAAC,EAAE;YACtC,MAAM,IAAI,KAAK,CACb,+BAA+B,IAAI,uCAAuC,wBAAwB;;8BAE5E,KAAK,CAAC,IAAI,CAC9B,iCAAiC,CAAC,MAAM,EAAE,CAC3C,CAAC,QAAQ,EAAE;;;WAGT,CACJ,CAAA;SACF;KACF;AACH,CAAC;AAtBD,sDAsBC"}
|
||||
27
node_modules/@actions/artifact/lib/internal/requestUtils.js
generated
vendored
27
node_modules/@actions/artifact/lib/internal/requestUtils.js
generated
vendored
|
|
@ -1,4 +1,23 @@
|
|||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
|
|
@ -8,14 +27,8 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
|||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.retryHttpClientRequest = exports.retry = void 0;
|
||||
const utils_1 = require("./utils");
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const config_variables_1 = require("./config-variables");
|
||||
|
|
|
|||
2
node_modules/@actions/artifact/lib/internal/requestUtils.js.map
generated
vendored
2
node_modules/@actions/artifact/lib/internal/requestUtils.js.map
generated
vendored
|
|
@ -1 +1 @@
|
|||
{"version":3,"file":"requestUtils.js","sourceRoot":"","sources":["../../src/internal/requestUtils.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;AACA,mCAMgB;AAChB,oDAAqC;AACrC,yDAAgD;AAEhD,SAAsB,KAAK,CACzB,IAAY,EACZ,SAA6C,EAC7C,mBAAwC,EACxC,WAAmB;;QAEnB,IAAI,QAAQ,GAAoC,SAAS,CAAA;QACzD,IAAI,UAAU,GAAuB,SAAS,CAAA;QAC9C,IAAI,WAAW,GAAG,KAAK,CAAA;QACvB,IAAI,YAAY,GAAG,EAAE,CAAA;QACrB,IAAI,sBAAsB,GAAuB,SAAS,CAAA;QAC1D,IAAI,OAAO,GAAG,CAAC,CAAA;QAEf,OAAO,OAAO,IAAI,WAAW,EAAE;YAC7B,IAAI;gBACF,QAAQ,GAAG,MAAM,SAAS,EAAE,CAAA;gBAC5B,UAAU,GAAG,QAAQ,CAAC,OAAO,CAAC,UAAU,CAAA;gBAExC,IAAI,2BAAmB,CAAC,UAAU,CAAC,EAAE;oBACnC,OAAO,QAAQ,CAAA;iBAChB;gBAED,uFAAuF;gBACvF,IAAI,UAAU,EAAE;oBACd,sBAAsB,GAAG,mBAAmB,CAAC,GAAG,CAAC,UAAU,CAAC,CAAA;iBAC7D;gBAED,WAAW,GAAG,6BAAqB,CAAC,UAAU,CAAC,CAAA;gBAC/C,YAAY,GAAG,mCAAmC,UAAU,EAAE,CAAA;aAC/D;YAAC,OAAO,KAAK,EAAE;gBACd,WAAW,GAAG,IAAI,CAAA;gBAClB,YAAY,GAAG,KAAK,CAAC,OAAO,CAAA;aAC7B;YAED,IAAI,CAAC,WAAW,EAAE;gBAChB,IAAI,CAAC,IAAI,CAAC,GAAG,IAAI,2BAA2B,CAAC,CAAA;gBAC7C,IAAI,QAAQ,EAAE;oBACZ,8BAAsB,CAAC,QAAQ,CAAC,CAAA;iBACjC;gBACD,MAAK;aACN;YAED,IAAI,CAAC,IAAI,CACP,GAAG,IAAI,cAAc,OAAO,OAAO,WAAW,uBAAuB,YAAY,EAAE,CACpF,CAAA;YAED,MAAM,aAAK,CAAC,6CAAqC,CAAC,OAAO,CAAC,CAAC,CAAA;YAC3D,OAAO,EAAE,CAAA;SACV;QAED,IAAI,QAAQ,EAAE;YACZ,8BAAsB,CAAC,QAAQ,CAAC,CAAA;SACjC;QAED,IAAI,sBAAsB,EAAE;YAC1B,MAAM,KAAK,CAAC,GAAG,IAAI,YAAY,sBAAsB,EAAE,CAAC,CAAA;SACzD;QACD,MAAM,KAAK,CAAC,GAAG,IAAI,YAAY,YAAY,EAAE,CAAC,CAAA;IAChD,CAAC;CAAA;AA1DD,sBA0DC;AAED,SAAsB,sBAAsB,CAC1C,IAAY,EACZ,MAA0C,EAC1C,sBAA2C,IAAI,GAAG,EAAE,EACpD,WAAW,GAAG,gCAAa,EAAE;;QAE7B,OAAO,MAAM,KAAK,CAAC,IAAI,EAAE,MAAM,EAAE,mBAAmB,EAAE,WAAW,CAAC,CAAA;IACpE,CAAC;CAAA;AAPD,wDAOC"}
|
||||
{"version":3,"file":"requestUtils.js","sourceRoot":"","sources":["../../src/internal/requestUtils.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AACA,mCAMgB;AAChB,oDAAqC;AACrC,yDAAgD;AAEhD,SAAsB,KAAK,CACzB,IAAY,EACZ,SAA6C,EAC7C,mBAAwC,EACxC,WAAmB;;QAEnB,IAAI,QAAQ,GAAoC,SAAS,CAAA;QACzD,IAAI,UAAU,GAAuB,SAAS,CAAA;QAC9C,IAAI,WAAW,GAAG,KAAK,CAAA;QACvB,IAAI,YAAY,GAAG,EAAE,CAAA;QACrB,IAAI,sBAAsB,GAAuB,SAAS,CAAA;QAC1D,IAAI,OAAO,GAAG,CAAC,CAAA;QAEf,OAAO,OAAO,IAAI,WAAW,EAAE;YAC7B,IAAI;gBACF,QAAQ,GAAG,MAAM,SAAS,EAAE,CAAA;gBAC5B,UAAU,GAAG,QAAQ,CAAC,OAAO,CAAC,UAAU,CAAA;gBAExC,IAAI,2BAAmB,CAAC,UAAU,CAAC,EAAE;oBACnC,OAAO,QAAQ,CAAA;iBAChB;gBAED,uFAAuF;gBACvF,IAAI,UAAU,EAAE;oBACd,sBAAsB,GAAG,mBAAmB,CAAC,GAAG,CAAC,UAAU,CAAC,CAAA;iBAC7D;gBAED,WAAW,GAAG,6BAAqB,CAAC,UAAU,CAAC,CAAA;gBAC/C,YAAY,GAAG,mCAAmC,UAAU,EAAE,CAAA;aAC/D;YAAC,OAAO,KAAK,EAAE;gBACd,WAAW,GAAG,IAAI,CAAA;gBAClB,YAAY,GAAG,KAAK,CAAC,OAAO,CAAA;aAC7B;YAED,IAAI,CAAC,WAAW,EAAE;gBAChB,IAAI,CAAC,IAAI,CAAC,GAAG,IAAI,2BAA2B,CAAC,CAAA;gBAC7C,IAAI,QAAQ,EAAE;oBACZ,8BAAsB,CAAC,QAAQ,CAAC,CAAA;iBACjC;gBACD,MAAK;aACN;YAED,IAAI,CAAC,IAAI,CACP,GAAG,IAAI,cAAc,OAAO,OAAO,WAAW,uBAAuB,YAAY,EAAE,CACpF,CAAA;YAED,MAAM,aAAK,CAAC,6CAAqC,CAAC,OAAO,CAAC,CAAC,CAAA;YAC3D,OAAO,EAAE,CAAA;SACV;QAED,IAAI,QAAQ,EAAE;YACZ,8BAAsB,CAAC,QAAQ,CAAC,CAAA;SACjC;QAED,IAAI,sBAAsB,EAAE;YAC1B,MAAM,KAAK,CAAC,GAAG,IAAI,YAAY,sBAAsB,EAAE,CAAC,CAAA;SACzD;QACD,MAAM,KAAK,CAAC,GAAG,IAAI,YAAY,YAAY,EAAE,CAAC,CAAA;IAChD,CAAC;CAAA;AA1DD,sBA0DC;AAED,SAAsB,sBAAsB,CAC1C,IAAY,EACZ,MAA0C,EAC1C,sBAA2C,IAAI,GAAG,EAAE,EACpD,WAAW,GAAG,gCAAa,EAAE;;QAE7B,OAAO,MAAM,KAAK,CAAC,IAAI,EAAE,MAAM,EAAE,mBAAmB,EAAE,WAAW,CAAC,CAAA;IACpE,CAAC;CAAA;AAPD,wDAOC"}
|
||||
3
node_modules/@actions/artifact/lib/internal/status-reporter.d.ts
generated
vendored
3
node_modules/@actions/artifact/lib/internal/status-reporter.d.ts
generated
vendored
|
|
@ -11,11 +11,10 @@ export declare class StatusReporter {
|
|||
private displayFrequencyInMilliseconds;
|
||||
private largeFiles;
|
||||
private totalFileStatus;
|
||||
private largeFileStatus;
|
||||
constructor(displayFrequencyInMilliseconds: number);
|
||||
setTotalNumberOfFilesToProcess(fileTotal: number): void;
|
||||
start(): void;
|
||||
updateLargeFileStatus(fileName: string, numerator: number, denominator: number): void;
|
||||
updateLargeFileStatus(fileName: string, chunkStartIndex: number, chunkEndIndex: number, totalUploadFileSize: number): void;
|
||||
stop(): void;
|
||||
incrementProcessedCount(): void;
|
||||
private formatPercentage;
|
||||
|
|
|
|||
22
node_modules/@actions/artifact/lib/internal/status-reporter.js
generated
vendored
22
node_modules/@actions/artifact/lib/internal/status-reporter.js
generated
vendored
|
|
@ -1,5 +1,6 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.StatusReporter = void 0;
|
||||
const core_1 = require("@actions/core");
|
||||
/**
|
||||
* Status Reporter that displays information about the progress/status of an artifact that is being uploaded or downloaded
|
||||
|
|
@ -14,11 +15,11 @@ class StatusReporter {
|
|||
this.processedCount = 0;
|
||||
this.largeFiles = new Map();
|
||||
this.totalFileStatus = undefined;
|
||||
this.largeFileStatus = undefined;
|
||||
this.displayFrequencyInMilliseconds = displayFrequencyInMilliseconds;
|
||||
}
|
||||
setTotalNumberOfFilesToProcess(fileTotal) {
|
||||
this.totalNumberOfFilesToProcess = fileTotal;
|
||||
this.processedCount = 0;
|
||||
}
|
||||
start() {
|
||||
// displays information about the total upload/download status
|
||||
|
|
@ -27,30 +28,17 @@ class StatusReporter {
|
|||
const percentage = this.formatPercentage(this.processedCount, this.totalNumberOfFilesToProcess);
|
||||
core_1.info(`Total file count: ${this.totalNumberOfFilesToProcess} ---- Processed file #${this.processedCount} (${percentage.slice(0, percentage.indexOf('.') + 2)}%)`);
|
||||
}, this.displayFrequencyInMilliseconds);
|
||||
// displays extra information about any large files that take a significant amount of time to upload or download every 1 second
|
||||
this.largeFileStatus = setInterval(() => {
|
||||
for (const value of Array.from(this.largeFiles.values())) {
|
||||
core_1.info(value);
|
||||
}
|
||||
// delete all entries in the map after displaying the information so it will not be displayed again unless explicitly added
|
||||
this.largeFiles.clear();
|
||||
}, 1000);
|
||||
}
|
||||
// if there is a large file that is being uploaded in chunks, this is used to display extra information about the status of the upload
|
||||
updateLargeFileStatus(fileName, numerator, denominator) {
|
||||
updateLargeFileStatus(fileName, chunkStartIndex, chunkEndIndex, totalUploadFileSize) {
|
||||
// display 1 decimal place without any rounding
|
||||
const percentage = this.formatPercentage(numerator, denominator);
|
||||
const displayInformation = `Uploading ${fileName} (${percentage.slice(0, percentage.indexOf('.') + 2)}%)`;
|
||||
// any previously added display information should be overwritten for the specific large file because a map is being used
|
||||
this.largeFiles.set(fileName, displayInformation);
|
||||
const percentage = this.formatPercentage(chunkEndIndex, totalUploadFileSize);
|
||||
core_1.info(`Uploaded ${fileName} (${percentage.slice(0, percentage.indexOf('.') + 2)}%) bytes ${chunkStartIndex}:${chunkEndIndex}`);
|
||||
}
|
||||
stop() {
|
||||
if (this.totalFileStatus) {
|
||||
clearInterval(this.totalFileStatus);
|
||||
}
|
||||
if (this.largeFileStatus) {
|
||||
clearInterval(this.largeFileStatus);
|
||||
}
|
||||
}
|
||||
incrementProcessedCount() {
|
||||
this.processedCount++;
|
||||
|
|
|
|||
2
node_modules/@actions/artifact/lib/internal/status-reporter.js.map
generated
vendored
2
node_modules/@actions/artifact/lib/internal/status-reporter.js.map
generated
vendored
|
|
@ -1 +1 @@
|
|||
{"version":3,"file":"status-reporter.js","sourceRoot":"","sources":["../../src/internal/status-reporter.ts"],"names":[],"mappings":";;AAAA,wCAAkC;AAElC;;;;;;GAMG;AAEH,MAAa,cAAc;IAQzB,YAAY,8BAAsC;QAP1C,gCAA2B,GAAG,CAAC,CAAA;QAC/B,mBAAc,GAAG,CAAC,CAAA;QAElB,eAAU,GAAG,IAAI,GAAG,EAAkB,CAAA;QAK5C,IAAI,CAAC,eAAe,GAAG,SAAS,CAAA;QAChC,IAAI,CAAC,eAAe,GAAG,SAAS,CAAA;QAChC,IAAI,CAAC,8BAA8B,GAAG,8BAA8B,CAAA;IACtE,CAAC;IAED,8BAA8B,CAAC,SAAiB;QAC9C,IAAI,CAAC,2BAA2B,GAAG,SAAS,CAAA;IAC9C,CAAC;IAED,KAAK;QACH,8DAA8D;QAC9D,IAAI,CAAC,eAAe,GAAG,WAAW,CAAC,GAAG,EAAE;YACtC,+CAA+C;YAC/C,MAAM,UAAU,GAAG,IAAI,CAAC,gBAAgB,CACtC,IAAI,CAAC,cAAc,EACnB,IAAI,CAAC,2BAA2B,CACjC,CAAA;YACD,WAAI,CACF,qBACE,IAAI,CAAC,2BACP,yBAAyB,IAAI,CAAC,cAAc,KAAK,UAAU,CAAC,KAAK,CAC/D,CAAC,EACD,UAAU,CAAC,OAAO,CAAC,GAAG,CAAC,GAAG,CAAC,CAC5B,IAAI,CACN,CAAA;QACH,CAAC,EAAE,IAAI,CAAC,8BAA8B,CAAC,CAAA;QAEvC,+HAA+H;QAC/H,IAAI,CAAC,eAAe,GAAG,WAAW,CAAC,GAAG,EAAE;YACtC,KAAK,MAAM,KAAK,IAAI,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,UAAU,CAAC,MAAM,EAAE,CAAC,EAAE;gBACxD,WAAI,CAAC,KAAK,CAAC,CAAA;aACZ;YACD,2HAA2H;YAC3H,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,CAAA;QACzB,CAAC,EAAE,IAAI,CAAC,CAAA;IACV,CAAC;IAED,sIAAsI;IACtI,qBAAqB,CACnB,QAAgB,EAChB,SAAiB,EACjB,WAAmB;QAEnB,+CAA+C;QAC/C,MAAM,UAAU,GAAG,IAAI,CAAC,gBAAgB,CAAC,SAAS,EAAE,WAAW,CAAC,CAAA;QAChE,MAAM,kBAAkB,GAAG,aAAa,QAAQ,KAAK,UAAU,CAAC,KAAK,CACnE,CAAC,EACD,UAAU,CAAC,OAAO,CAAC,GAAG,CAAC,GAAG,CAAC,CAC5B,IAAI,CAAA;QAEL,yHAAyH;QACzH,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,QAAQ,EAAE,kBAAkB,CAAC,CAAA;IACnD,CAAC;IAED,IAAI;QACF,IAAI,IAAI,CAAC,eAAe,EAAE;YACxB,aAAa,CAAC,IAAI,CAAC,eAAe,CAAC,CAAA;SACpC;QAED,IAAI,IAAI,CAAC,eAAe,EAAE;YACxB,aAAa,CAAC,IAAI,CAAC,eAAe,CAAC,CAAA;SACpC;IACH,CAAC;IAED,uBAAuB;QACrB,IAAI,CAAC,cAAc,EAAE,CAAA;IACvB,CAAC;IAEO,gBAAgB,CAAC,SAAiB,EAAE,WAAmB;QAC7D,0HAA0H;QAC1H,OAAO,CAAC,CAAC,SAAS,GAAG,WAAW,CAAC,GAAG,GAAG,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAA;IAChE,CAAC;CACF;AAjFD,wCAiFC"}
|
||||
{"version":3,"file":"status-reporter.js","sourceRoot":"","sources":["../../src/internal/status-reporter.ts"],"names":[],"mappings":";;;AAAA,wCAAkC;AAElC;;;;;;GAMG;AAEH,MAAa,cAAc;IAOzB,YAAY,8BAAsC;QAN1C,gCAA2B,GAAG,CAAC,CAAA;QAC/B,mBAAc,GAAG,CAAC,CAAA;QAElB,eAAU,GAAG,IAAI,GAAG,EAAkB,CAAA;QAI5C,IAAI,CAAC,eAAe,GAAG,SAAS,CAAA;QAChC,IAAI,CAAC,8BAA8B,GAAG,8BAA8B,CAAA;IACtE,CAAC;IAED,8BAA8B,CAAC,SAAiB;QAC9C,IAAI,CAAC,2BAA2B,GAAG,SAAS,CAAA;QAC5C,IAAI,CAAC,cAAc,GAAG,CAAC,CAAA;IACzB,CAAC;IAED,KAAK;QACH,8DAA8D;QAC9D,IAAI,CAAC,eAAe,GAAG,WAAW,CAAC,GAAG,EAAE;YACtC,+CAA+C;YAC/C,MAAM,UAAU,GAAG,IAAI,CAAC,gBAAgB,CACtC,IAAI,CAAC,cAAc,EACnB,IAAI,CAAC,2BAA2B,CACjC,CAAA;YACD,WAAI,CACF,qBACE,IAAI,CAAC,2BACP,yBAAyB,IAAI,CAAC,cAAc,KAAK,UAAU,CAAC,KAAK,CAC/D,CAAC,EACD,UAAU,CAAC,OAAO,CAAC,GAAG,CAAC,GAAG,CAAC,CAC5B,IAAI,CACN,CAAA;QACH,CAAC,EAAE,IAAI,CAAC,8BAA8B,CAAC,CAAA;IACzC,CAAC;IAED,sIAAsI;IACtI,qBAAqB,CACnB,QAAgB,EAChB,eAAuB,EACvB,aAAqB,EACrB,mBAA2B;QAE3B,+CAA+C;QAC/C,MAAM,UAAU,GAAG,IAAI,CAAC,gBAAgB,CAAC,aAAa,EAAE,mBAAmB,CAAC,CAAA;QAC5E,WAAI,CACF,YAAY,QAAQ,KAAK,UAAU,CAAC,KAAK,CACvC,CAAC,EACD,UAAU,CAAC,OAAO,CAAC,GAAG,CAAC,GAAG,CAAC,CAC5B,YAAY,eAAe,IAAI,aAAa,EAAE,CAChD,CAAA;IACH,CAAC;IAED,IAAI;QACF,IAAI,IAAI,CAAC,eAAe,EAAE;YACxB,aAAa,CAAC,IAAI,CAAC,eAAe,CAAC,CAAA;SACpC;IACH,CAAC;IAED,uBAAuB;QACrB,IAAI,CAAC,cAAc,EAAE,CAAA;IACvB,CAAC;IAEO,gBAAgB,CAAC,SAAiB,EAAE,WAAmB;QAC7D,0HAA0H;QAC1H,OAAO,CAAC,CAAC,SAAS,GAAG,WAAW,CAAC,GAAG,GAAG,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAA;IAChE,CAAC;CACF;AAnED,wCAmEC"}
|
||||
46
node_modules/@actions/artifact/lib/internal/upload-gzip.js
generated
vendored
46
node_modules/@actions/artifact/lib/internal/upload-gzip.js
generated
vendored
|
|
@ -1,4 +1,23 @@
|
|||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
|
|
@ -15,18 +34,25 @@ var __asyncValues = (this && this.__asyncValues) || function (o) {
|
|||
function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }
|
||||
function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }
|
||||
};
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.createGZipFileInBuffer = exports.createGZipFileOnDisk = void 0;
|
||||
const fs = __importStar(require("fs"));
|
||||
const zlib = __importStar(require("zlib"));
|
||||
const util_1 = require("util");
|
||||
const stat = util_1.promisify(fs.stat);
|
||||
/**
|
||||
* GZipping certain files that are already compressed will likely not yield further size reductions. Creating large temporary gzip
|
||||
* files then will just waste a lot of time before ultimately being discarded (especially for very large files).
|
||||
* If any of these types of files are encountered then on-disk gzip creation will be skipped and the original file will be uploaded as-is
|
||||
*/
|
||||
const gzipExemptFileExtensions = [
|
||||
'.gzip',
|
||||
'.zip',
|
||||
'.tar.lz',
|
||||
'.tar.gz',
|
||||
'.tar.bz2',
|
||||
'.7z'
|
||||
];
|
||||
/**
|
||||
* Creates a Gzip compressed file of an original file at the provided temporary filepath location
|
||||
* @param {string} originalFilePath filepath of whatever will be compressed. The original file will be unmodified
|
||||
|
|
@ -35,6 +61,12 @@ const stat = util_1.promisify(fs.stat);
|
|||
*/
|
||||
function createGZipFileOnDisk(originalFilePath, tempFilePath) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
for (const gzipExemptExtension of gzipExemptFileExtensions) {
|
||||
if (originalFilePath.endsWith(gzipExemptExtension)) {
|
||||
// return a really large number so that the original file gets uploaded
|
||||
return Number.MAX_SAFE_INTEGER;
|
||||
}
|
||||
}
|
||||
return new Promise((resolve, reject) => {
|
||||
const inputStream = fs.createReadStream(originalFilePath);
|
||||
const gzip = zlib.createGzip();
|
||||
|
|
|
|||
2
node_modules/@actions/artifact/lib/internal/upload-gzip.js.map
generated
vendored
2
node_modules/@actions/artifact/lib/internal/upload-gzip.js.map
generated
vendored
|
|
@ -1 +1 @@
|
|||
{"version":3,"file":"upload-gzip.js","sourceRoot":"","sources":["../../src/internal/upload-gzip.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAwB;AACxB,2CAA4B;AAC5B,+BAA8B;AAC9B,MAAM,IAAI,GAAG,gBAAS,CAAC,EAAE,CAAC,IAAI,CAAC,CAAA;AAE/B;;;;;GAKG;AACH,SAAsB,oBAAoB,CACxC,gBAAwB,EACxB,YAAoB;;QAEpB,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;YACrC,MAAM,WAAW,GAAG,EAAE,CAAC,gBAAgB,CAAC,gBAAgB,CAAC,CAAA;YACzD,MAAM,IAAI,GAAG,IAAI,CAAC,UAAU,EAAE,CAAA;YAC9B,MAAM,YAAY,GAAG,EAAE,CAAC,iBAAiB,CAAC,YAAY,CAAC,CAAA;YACvD,WAAW,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,IAAI,CAAC,YAAY,CAAC,CAAA;YACzC,YAAY,CAAC,EAAE,CAAC,QAAQ,EAAE,GAAS,EAAE;gBACnC,qIAAqI;gBACrI,MAAM,IAAI,GAAG,CAAC,MAAM,IAAI,CAAC,YAAY,CAAC,CAAC,CAAC,IAAI,CAAA;gBAC5C,OAAO,CAAC,IAAI,CAAC,CAAA;YACf,CAAC,CAAA,CAAC,CAAA;YACF,YAAY,CAAC,EAAE,CAAC,OAAO,EAAE,KAAK,CAAC,EAAE;gBAC/B,sCAAsC;gBACtC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAA;gBAClB,MAAM,CAAA;YACR,CAAC,CAAC,CAAA;QACJ,CAAC,CAAC,CAAA;IACJ,CAAC;CAAA;AApBD,oDAoBC;AAED;;;;GAIG;AACH,SAAsB,sBAAsB,CAC1C,gBAAwB;;QAExB,OAAO,IAAI,OAAO,CAAC,CAAM,OAAO,EAAC,EAAE;;YACjC,MAAM,WAAW,GAAG,EAAE,CAAC,gBAAgB,CAAC,gBAAgB,CAAC,CAAA;YACzD,MAAM,IAAI,GAAG,IAAI,CAAC,UAAU,EAAE,CAAA;YAC9B,WAAW,CAAC,IAAI,CAAC,IAAI,CAAC,CAAA;YACtB,8IAA8I;YAC9I,MAAM,MAAM,GAAG,EAAE,CAAA;;gBACjB,KAA0B,IAAA,SAAA,cAAA,IAAI,CAAA,UAAA;oBAAnB,MAAM,KAAK,iBAAA,CAAA;oBACpB,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,CAAA;iBACnB;;;;;;;;;YACD,OAAO,CAAC,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,CAAA;QAChC,CAAC,CAAA,CAAC,CAAA;IACJ,CAAC;CAAA;AAdD,wDAcC"}
|
||||
{"version":3,"file":"upload-gzip.js","sourceRoot":"","sources":["../../src/internal/upload-gzip.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAwB;AACxB,2CAA4B;AAC5B,+BAA8B;AAC9B,MAAM,IAAI,GAAG,gBAAS,CAAC,EAAE,CAAC,IAAI,CAAC,CAAA;AAE/B;;;;GAIG;AACH,MAAM,wBAAwB,GAAG;IAC/B,OAAO;IACP,MAAM;IACN,SAAS;IACT,SAAS;IACT,UAAU;IACV,KAAK;CACN,CAAA;AAED;;;;;GAKG;AACH,SAAsB,oBAAoB,CACxC,gBAAwB,EACxB,YAAoB;;QAEpB,KAAK,MAAM,mBAAmB,IAAI,wBAAwB,EAAE;YAC1D,IAAI,gBAAgB,CAAC,QAAQ,CAAC,mBAAmB,CAAC,EAAE;gBAClD,uEAAuE;gBACvE,OAAO,MAAM,CAAC,gBAAgB,CAAA;aAC/B;SACF;QAED,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;YACrC,MAAM,WAAW,GAAG,EAAE,CAAC,gBAAgB,CAAC,gBAAgB,CAAC,CAAA;YACzD,MAAM,IAAI,GAAG,IAAI,CAAC,UAAU,EAAE,CAAA;YAC9B,MAAM,YAAY,GAAG,EAAE,CAAC,iBAAiB,CAAC,YAAY,CAAC,CAAA;YACvD,WAAW,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,IAAI,CAAC,YAAY,CAAC,CAAA;YACzC,YAAY,CAAC,EAAE,CAAC,QAAQ,EAAE,GAAS,EAAE;gBACnC,qIAAqI;gBACrI,MAAM,IAAI,GAAG,CAAC,MAAM,IAAI,CAAC,YAAY,CAAC,CAAC,CAAC,IAAI,CAAA;gBAC5C,OAAO,CAAC,IAAI,CAAC,CAAA;YACf,CAAC,CAAA,CAAC,CAAA;YACF,YAAY,CAAC,EAAE,CAAC,OAAO,EAAE,KAAK,CAAC,EAAE;gBAC/B,sCAAsC;gBACtC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAA;gBAClB,MAAM,CAAA;YACR,CAAC,CAAC,CAAA;QACJ,CAAC,CAAC,CAAA;IACJ,CAAC;CAAA;AA3BD,oDA2BC;AAED;;;;GAIG;AACH,SAAsB,sBAAsB,CAC1C,gBAAwB;;QAExB,OAAO,IAAI,OAAO,CAAC,CAAM,OAAO,EAAC,EAAE;;YACjC,MAAM,WAAW,GAAG,EAAE,CAAC,gBAAgB,CAAC,gBAAgB,CAAC,CAAA;YACzD,MAAM,IAAI,GAAG,IAAI,CAAC,UAAU,EAAE,CAAA;YAC9B,WAAW,CAAC,IAAI,CAAC,IAAI,CAAC,CAAA;YACtB,8IAA8I;YAC9I,MAAM,MAAM,GAAG,EAAE,CAAA;;gBACjB,KAA0B,IAAA,SAAA,cAAA,IAAI,CAAA,UAAA;oBAAnB,MAAM,KAAK,iBAAA,CAAA;oBACpB,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,CAAA;iBACnB;;;;;;;;;YACD,OAAO,CAAC,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,CAAA;QAChC,CAAC,CAAA,CAAC,CAAA;IACJ,CAAC;CAAA;AAdD,wDAcC"}
|
||||
70
node_modules/@actions/artifact/lib/internal/upload-http-client.js
generated
vendored
70
node_modules/@actions/artifact/lib/internal/upload-http-client.js
generated
vendored
|
|
@ -1,4 +1,23 @@
|
|||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
|
|
@ -8,14 +27,8 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
|||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.UploadHttpClient = void 0;
|
||||
const fs = __importStar(require("fs"));
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const tmp = __importStar(require("tmp-promise"));
|
||||
|
|
@ -161,27 +174,33 @@ class UploadHttpClient {
|
|||
*/
|
||||
uploadFileAsync(httpClientIndex, parameters) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const totalFileSize = (yield stat(parameters.file)).size;
|
||||
const fileStat = yield stat(parameters.file);
|
||||
const totalFileSize = fileStat.size;
|
||||
const isFIFO = fileStat.isFIFO();
|
||||
let offset = 0;
|
||||
let isUploadSuccessful = true;
|
||||
let failedChunkSizes = 0;
|
||||
let uploadFileSize = 0;
|
||||
let isGzip = true;
|
||||
// the file that is being uploaded is less than 64k in size, to increase throughput and to minimize disk I/O
|
||||
// the file that is being uploaded is less than 64k in size to increase throughput and to minimize disk I/O
|
||||
// for creating a new GZip file, an in-memory buffer is used for compression
|
||||
if (totalFileSize < 65536) {
|
||||
// with named pipes the file size is reported as zero in that case don't read the file in memory
|
||||
if (!isFIFO && totalFileSize < 65536) {
|
||||
core.debug(`${parameters.file} is less than 64k in size. Creating a gzip file in-memory to potentially reduce the upload size`);
|
||||
const buffer = yield upload_gzip_1.createGZipFileInBuffer(parameters.file);
|
||||
//An open stream is needed in the event of a failure and we need to retry. If a NodeJS.ReadableStream is directly passed in,
|
||||
// An open stream is needed in the event of a failure and we need to retry. If a NodeJS.ReadableStream is directly passed in,
|
||||
// it will not properly get reset to the start of the stream if a chunk upload needs to be retried
|
||||
let openUploadStream;
|
||||
if (totalFileSize < buffer.byteLength) {
|
||||
// compression did not help with reducing the size, use a readable stream from the original file for upload
|
||||
core.debug(`The gzip file created for ${parameters.file} did not help with reducing the size of the file. The original file will be uploaded as-is`);
|
||||
openUploadStream = () => fs.createReadStream(parameters.file);
|
||||
isGzip = false;
|
||||
uploadFileSize = totalFileSize;
|
||||
}
|
||||
else {
|
||||
// create a readable stream using a PassThrough stream that is both readable and writable
|
||||
core.debug(`A gzip file created for ${parameters.file} helped with reducing the size of the original file. The file will be uploaded using gzip.`);
|
||||
openUploadStream = () => {
|
||||
const passThrough = new stream.PassThrough();
|
||||
passThrough.end(buffer);
|
||||
|
|
@ -206,25 +225,27 @@ class UploadHttpClient {
|
|||
// the file that is being uploaded is greater than 64k in size, a temporary file gets created on disk using the
|
||||
// npm tmp-promise package and this file gets used to create a GZipped file
|
||||
const tempFile = yield tmp.file();
|
||||
core.debug(`${parameters.file} is greater than 64k in size. Creating a gzip file on-disk ${tempFile.path} to potentially reduce the upload size`);
|
||||
// create a GZip file of the original file being uploaded, the original file should not be modified in any way
|
||||
uploadFileSize = yield upload_gzip_1.createGZipFileOnDisk(parameters.file, tempFile.path);
|
||||
let uploadFilePath = tempFile.path;
|
||||
// compression did not help with size reduction, use the original file for upload and delete the temp GZip file
|
||||
if (totalFileSize < uploadFileSize) {
|
||||
// for named pipes totalFileSize is zero, this assumes compression did help
|
||||
if (!isFIFO && totalFileSize < uploadFileSize) {
|
||||
core.debug(`The gzip file created for ${parameters.file} did not help with reducing the size of the file. The original file will be uploaded as-is`);
|
||||
uploadFileSize = totalFileSize;
|
||||
uploadFilePath = parameters.file;
|
||||
isGzip = false;
|
||||
}
|
||||
else {
|
||||
core.debug(`The gzip file created for ${parameters.file} is smaller than the original file. The file will be uploaded using gzip.`);
|
||||
}
|
||||
let abortFileUpload = false;
|
||||
// upload only a single chunk at a time
|
||||
while (offset < uploadFileSize) {
|
||||
const chunkSize = Math.min(uploadFileSize - offset, parameters.maxChunkSize);
|
||||
// if an individual file is greater than 100MB (1024*1024*100) in size, display extra information about the upload status
|
||||
if (uploadFileSize > 104857600) {
|
||||
this.statusReporter.updateLargeFileStatus(parameters.file, offset, uploadFileSize);
|
||||
}
|
||||
const start = offset;
|
||||
const end = offset + chunkSize - 1;
|
||||
const startChunkIndex = offset;
|
||||
const endChunkIndex = offset + chunkSize - 1;
|
||||
offset += parameters.maxChunkSize;
|
||||
if (abortFileUpload) {
|
||||
// if we don't want to continue in the event of an error, any pending upload chunks will be marked as failed
|
||||
|
|
@ -232,10 +253,10 @@ class UploadHttpClient {
|
|||
continue;
|
||||
}
|
||||
const result = yield this.uploadChunk(httpClientIndex, parameters.resourceUrl, () => fs.createReadStream(uploadFilePath, {
|
||||
start,
|
||||
end,
|
||||
start: startChunkIndex,
|
||||
end: endChunkIndex,
|
||||
autoClose: false
|
||||
}), start, end, uploadFileSize, isGzip, totalFileSize);
|
||||
}), startChunkIndex, endChunkIndex, uploadFileSize, isGzip, totalFileSize);
|
||||
if (!result) {
|
||||
// Chunk failed to upload, report as failed and do not continue uploading any more chunks for the file. It is possible that part of a chunk was
|
||||
// successfully uploaded so the server may report a different size for what was uploaded
|
||||
|
|
@ -244,9 +265,16 @@ class UploadHttpClient {
|
|||
core.warning(`Aborting upload for ${parameters.file} due to failure`);
|
||||
abortFileUpload = true;
|
||||
}
|
||||
else {
|
||||
// if an individual file is greater than 8MB (1024*1024*8) in size, display extra information about the upload status
|
||||
if (uploadFileSize > 8388608) {
|
||||
this.statusReporter.updateLargeFileStatus(parameters.file, startChunkIndex, endChunkIndex, uploadFileSize);
|
||||
}
|
||||
}
|
||||
}
|
||||
// Delete the temporary file that was created as part of the upload. If the temp file does not get manually deleted by
|
||||
// calling cleanup, it gets removed when the node process exits. For more info see: https://www.npmjs.com/package/tmp-promise#about
|
||||
core.debug(`deleting temporary gzip file ${tempFile.path}`);
|
||||
yield tempFile.cleanup();
|
||||
return {
|
||||
isSuccess: isUploadSuccessful,
|
||||
|
|
|
|||
2
node_modules/@actions/artifact/lib/internal/upload-http-client.js.map
generated
vendored
2
node_modules/@actions/artifact/lib/internal/upload-http-client.js.map
generated
vendored
File diff suppressed because one or more lines are too long
23
node_modules/@actions/artifact/lib/internal/upload-specification.js
generated
vendored
23
node_modules/@actions/artifact/lib/internal/upload-specification.js
generated
vendored
|
|
@ -1,16 +1,29 @@
|
|||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.getUploadSpecification = void 0;
|
||||
const fs = __importStar(require("fs"));
|
||||
const core_1 = require("@actions/core");
|
||||
const path_1 = require("path");
|
||||
const utils_1 = require("./utils");
|
||||
const path_and_artifact_name_validation_1 = require("./path-and-artifact-name-validation");
|
||||
/**
|
||||
* Creates a specification that describes how each file that is part of the artifact will be uploaded
|
||||
* @param artifactName the name of the artifact being uploaded. Used during upload to denote where the artifact is stored on the server
|
||||
|
|
@ -18,7 +31,7 @@ const utils_1 = require("./utils");
|
|||
* @param artifactFiles a list of absolute file paths that denote what should be uploaded as part of the artifact
|
||||
*/
|
||||
function getUploadSpecification(artifactName, rootDirectory, artifactFiles) {
|
||||
utils_1.checkArtifactName(artifactName);
|
||||
// artifact name was checked earlier on, no need to check again
|
||||
const specifications = [];
|
||||
if (!fs.existsSync(rootDirectory)) {
|
||||
throw new Error(`Provided rootDirectory ${rootDirectory} does not exist`);
|
||||
|
|
@ -61,7 +74,7 @@ function getUploadSpecification(artifactName, rootDirectory, artifactFiles) {
|
|||
}
|
||||
// Check for forbidden characters in file paths that will be rejected during upload
|
||||
const uploadPath = file.replace(rootDirectory, '');
|
||||
utils_1.checkArtifactFilePath(uploadPath);
|
||||
path_and_artifact_name_validation_1.checkArtifactFilePath(uploadPath);
|
||||
/*
|
||||
uploadFilePath denotes where the file will be uploaded in the file container on the server. During a run, if multiple artifacts are uploaded, they will all
|
||||
be saved in the same container. The artifact name is used as the root directory in the container to separate and distinguish uploaded artifacts
|
||||
|
|
|
|||
2
node_modules/@actions/artifact/lib/internal/upload-specification.js.map
generated
vendored
2
node_modules/@actions/artifact/lib/internal/upload-specification.js.map
generated
vendored
|
|
@ -1 +1 @@
|
|||
{"version":3,"file":"upload-specification.js","sourceRoot":"","sources":["../../src/internal/upload-specification.ts"],"names":[],"mappings":";;;;;;;;;AAAA,uCAAwB;AACxB,wCAAmC;AACnC,+BAA6C;AAC7C,mCAAgE;AAOhE;;;;;GAKG;AACH,SAAgB,sBAAsB,CACpC,YAAoB,EACpB,aAAqB,EACrB,aAAuB;IAEvB,yBAAiB,CAAC,YAAY,CAAC,CAAA;IAE/B,MAAM,cAAc,GAA0B,EAAE,CAAA;IAEhD,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,aAAa,CAAC,EAAE;QACjC,MAAM,IAAI,KAAK,CAAC,0BAA0B,aAAa,iBAAiB,CAAC,CAAA;KAC1E;IACD,IAAI,CAAC,EAAE,CAAC,SAAS,CAAC,aAAa,CAAC,CAAC,WAAW,EAAE,EAAE;QAC9C,MAAM,IAAI,KAAK,CACb,0BAA0B,aAAa,2BAA2B,CACnE,CAAA;KACF;IACD,sFAAsF;IACtF,aAAa,GAAG,gBAAS,CAAC,aAAa,CAAC,CAAA;IACxC,aAAa,GAAG,cAAO,CAAC,aAAa,CAAC,CAAA;IAEtC;;;;;;;;;;;;;;;;;;MAkBE;IACF,KAAK,IAAI,IAAI,IAAI,aAAa,EAAE;QAC9B,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,EAAE;YACxB,MAAM,IAAI,KAAK,CAAC,QAAQ,IAAI,iBAAiB,CAAC,CAAA;SAC/C;QACD,IAAI,CAAC,EAAE,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,WAAW,EAAE,EAAE;YACrC,sFAAsF;YACtF,IAAI,GAAG,gBAAS,CAAC,IAAI,CAAC,CAAA;YACtB,IAAI,GAAG,cAAO,CAAC,IAAI,CAAC,CAAA;YACpB,IAAI,CAAC,IAAI,CAAC,UAAU,CAAC,aAAa,CAAC,EAAE;gBACnC,MAAM,IAAI,KAAK,CACb,sBAAsB,aAAa,2CAA2C,IAAI,EAAE,CACrF,CAAA;aACF;YAED,mFAAmF;YACnF,MAAM,UAAU,GAAG,IAAI,CAAC,OAAO,CAAC,aAAa,EAAE,EAAE,CAAC,CAAA;YAClD,6BAAqB,CAAC,UAAU,CAAC,CAAA;YAEjC;;;;;;;;;cASE;YACF,cAAc,CAAC,IAAI,CAAC;gBAClB,gBAAgB,EAAE,IAAI;gBACtB,cAAc,EAAE,WAAI,CAAC,YAAY,EAAE,UAAU,CAAC;aAC/C,CAAC,CAAA;SACH;aAAM;YACL,uDAAuD;YACvD,YAAK,CAAC,YAAY,IAAI,kDAAkD,CAAC,CAAA;SAC1E;KACF;IACD,OAAO,cAAc,CAAA;AACvB,CAAC;AA9ED,wDA8EC"}
|
||||
{"version":3,"file":"upload-specification.js","sourceRoot":"","sources":["../../src/internal/upload-specification.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAwB;AACxB,wCAAmC;AACnC,+BAA6C;AAC7C,2FAAyE;AAOzE;;;;;GAKG;AACH,SAAgB,sBAAsB,CACpC,YAAoB,EACpB,aAAqB,EACrB,aAAuB;IAEvB,+DAA+D;IAC/D,MAAM,cAAc,GAA0B,EAAE,CAAA;IAEhD,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,aAAa,CAAC,EAAE;QACjC,MAAM,IAAI,KAAK,CAAC,0BAA0B,aAAa,iBAAiB,CAAC,CAAA;KAC1E;IACD,IAAI,CAAC,EAAE,CAAC,SAAS,CAAC,aAAa,CAAC,CAAC,WAAW,EAAE,EAAE;QAC9C,MAAM,IAAI,KAAK,CACb,0BAA0B,aAAa,2BAA2B,CACnE,CAAA;KACF;IACD,sFAAsF;IACtF,aAAa,GAAG,gBAAS,CAAC,aAAa,CAAC,CAAA;IACxC,aAAa,GAAG,cAAO,CAAC,aAAa,CAAC,CAAA;IAEtC;;;;;;;;;;;;;;;;;;MAkBE;IACF,KAAK,IAAI,IAAI,IAAI,aAAa,EAAE;QAC9B,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,EAAE;YACxB,MAAM,IAAI,KAAK,CAAC,QAAQ,IAAI,iBAAiB,CAAC,CAAA;SAC/C;QACD,IAAI,CAAC,EAAE,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,WAAW,EAAE,EAAE;YACrC,sFAAsF;YACtF,IAAI,GAAG,gBAAS,CAAC,IAAI,CAAC,CAAA;YACtB,IAAI,GAAG,cAAO,CAAC,IAAI,CAAC,CAAA;YACpB,IAAI,CAAC,IAAI,CAAC,UAAU,CAAC,aAAa,CAAC,EAAE;gBACnC,MAAM,IAAI,KAAK,CACb,sBAAsB,aAAa,2CAA2C,IAAI,EAAE,CACrF,CAAA;aACF;YAED,mFAAmF;YACnF,MAAM,UAAU,GAAG,IAAI,CAAC,OAAO,CAAC,aAAa,EAAE,EAAE,CAAC,CAAA;YAClD,yDAAqB,CAAC,UAAU,CAAC,CAAA;YAEjC;;;;;;;;;cASE;YACF,cAAc,CAAC,IAAI,CAAC;gBAClB,gBAAgB,EAAE,IAAI;gBACtB,cAAc,EAAE,WAAI,CAAC,YAAY,EAAE,UAAU,CAAC;aAC/C,CAAC,CAAA;SACH;aAAM;YACL,uDAAuD;YACvD,YAAK,CAAC,YAAY,IAAI,kDAAkD,CAAC,CAAA;SAC1E;KACF;IACD,OAAO,cAAc,CAAA;AACvB,CAAC;AA7ED,wDA6EC"}
|
||||
8
node_modules/@actions/artifact/lib/internal/utils.d.ts
generated
vendored
8
node_modules/@actions/artifact/lib/internal/utils.d.ts
generated
vendored
|
|
@ -58,14 +58,6 @@ export declare function getArtifactUrl(): string;
|
|||
* Other information such as the headers, the response code and message might be useful, so this is displayed.
|
||||
*/
|
||||
export declare function displayHttpDiagnostics(response: IHttpClientResponse): void;
|
||||
/**
|
||||
* Scans the name of the artifact to make sure there are no illegal characters
|
||||
*/
|
||||
export declare function checkArtifactName(name: string): void;
|
||||
/**
|
||||
* Scans the name of the filePath used to make sure there are no illegal characters
|
||||
*/
|
||||
export declare function checkArtifactFilePath(path: string): void;
|
||||
export declare function createDirectoriesForArtifact(directories: string[]): Promise<void>;
|
||||
export declare function createEmptyFilesForArtifact(emptyFilesToCreate: string[]): Promise<void>;
|
||||
export declare function getFileSize(filePath: string): Promise<number>;
|
||||
|
|
|
|||
45
node_modules/@actions/artifact/lib/internal/utils.js
generated
vendored
45
node_modules/@actions/artifact/lib/internal/utils.js
generated
vendored
|
|
@ -9,6 +9,7 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
|||
});
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.sleep = exports.getProperRetention = exports.rmFile = exports.getFileSize = exports.createEmptyFilesForArtifact = exports.createDirectoriesForArtifact = exports.displayHttpDiagnostics = exports.getArtifactUrl = exports.createHttpClient = exports.getUploadHeaders = exports.getDownloadHeaders = exports.getContentRange = exports.tryGetRetryAfterValueTimeInMilliseconds = exports.isThrottledStatusCode = exports.isRetryableStatusCode = exports.isForbiddenStatusCode = exports.isSuccessStatusCode = exports.getApiVersion = exports.parseEnvNumber = exports.getExponentialRetryTimeInMilliseconds = void 0;
|
||||
const core_1 = require("@actions/core");
|
||||
const fs_1 = require("fs");
|
||||
const http_client_1 = require("@actions/http-client");
|
||||
|
|
@ -28,7 +29,7 @@ function getExponentialRetryTimeInMilliseconds(retryCount) {
|
|||
const minTime = config_variables_1.getInitialRetryIntervalInMilliseconds() * config_variables_1.getRetryMultiplier() * retryCount;
|
||||
const maxTime = minTime * config_variables_1.getRetryMultiplier();
|
||||
// returns a random number between the minTime (inclusive) and the maxTime (exclusive)
|
||||
return Math.random() * (maxTime - minTime) + minTime;
|
||||
return Math.trunc(Math.random() * (maxTime - minTime) + minTime);
|
||||
}
|
||||
exports.getExponentialRetryTimeInMilliseconds = getExponentialRetryTimeInMilliseconds;
|
||||
/**
|
||||
|
|
@ -207,48 +208,6 @@ Header Information: ${JSON.stringify(response.message.headers, undefined, 2)}
|
|||
###### End Diagnostic HTTP information ######`);
|
||||
}
|
||||
exports.displayHttpDiagnostics = displayHttpDiagnostics;
|
||||
/**
|
||||
* Invalid characters that cannot be in the artifact name or an uploaded file. Will be rejected
|
||||
* from the server if attempted to be sent over. These characters are not allowed due to limitations with certain
|
||||
* file systems such as NTFS. To maintain platform-agnostic behavior, all characters that are not supported by an
|
||||
* individual filesystem/platform will not be supported on all fileSystems/platforms
|
||||
*
|
||||
* FilePaths can include characters such as \ and / which are not permitted in the artifact name alone
|
||||
*/
|
||||
const invalidArtifactFilePathCharacters = ['"', ':', '<', '>', '|', '*', '?'];
|
||||
const invalidArtifactNameCharacters = [
|
||||
...invalidArtifactFilePathCharacters,
|
||||
'\\',
|
||||
'/'
|
||||
];
|
||||
/**
|
||||
* Scans the name of the artifact to make sure there are no illegal characters
|
||||
*/
|
||||
function checkArtifactName(name) {
|
||||
if (!name) {
|
||||
throw new Error(`Artifact name: ${name}, is incorrectly provided`);
|
||||
}
|
||||
for (const invalidChar of invalidArtifactNameCharacters) {
|
||||
if (name.includes(invalidChar)) {
|
||||
throw new Error(`Artifact name is not valid: ${name}. Contains character: "${invalidChar}". Invalid artifact name characters include: ${invalidArtifactNameCharacters.toString()}.`);
|
||||
}
|
||||
}
|
||||
}
|
||||
exports.checkArtifactName = checkArtifactName;
|
||||
/**
|
||||
* Scans the name of the filePath used to make sure there are no illegal characters
|
||||
*/
|
||||
function checkArtifactFilePath(path) {
|
||||
if (!path) {
|
||||
throw new Error(`Artifact path: ${path}, is incorrectly provided`);
|
||||
}
|
||||
for (const invalidChar of invalidArtifactFilePathCharacters) {
|
||||
if (path.includes(invalidChar)) {
|
||||
throw new Error(`Artifact path is not valid: ${path}. Contains character: "${invalidChar}". Invalid characters include: ${invalidArtifactFilePathCharacters.toString()}.`);
|
||||
}
|
||||
}
|
||||
}
|
||||
exports.checkArtifactFilePath = checkArtifactFilePath;
|
||||
function createDirectoriesForArtifact(directories) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
for (const directory of directories) {
|
||||
|
|
|
|||
2
node_modules/@actions/artifact/lib/internal/utils.js.map
generated
vendored
2
node_modules/@actions/artifact/lib/internal/utils.js.map
generated
vendored
File diff suppressed because one or more lines are too long
8
node_modules/@actions/artifact/package.json
generated
vendored
8
node_modules/@actions/artifact/package.json
generated
vendored
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@actions/artifact",
|
||||
"version": "0.5.2",
|
||||
"version": "1.0.0",
|
||||
"preview": true,
|
||||
"description": "Actions artifact lib",
|
||||
"keywords": [
|
||||
|
|
@ -39,11 +39,11 @@
|
|||
"dependencies": {
|
||||
"@actions/core": "^1.2.6",
|
||||
"@actions/http-client": "^1.0.11",
|
||||
"@types/tmp": "^0.1.0",
|
||||
"tmp": "^0.1.0",
|
||||
"tmp-promise": "^2.0.2"
|
||||
"tmp": "^0.2.1",
|
||||
"tmp-promise": "^3.0.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/tmp": "^0.2.1",
|
||||
"typescript": "^3.8.3"
|
||||
}
|
||||
}
|
||||
|
|
|
|||
21
node_modules/@types/tmp/LICENSE
generated
vendored
21
node_modules/@types/tmp/LICENSE
generated
vendored
|
|
@ -1,21 +0,0 @@
|
|||
MIT License
|
||||
|
||||
Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE
|
||||
16
node_modules/@types/tmp/README.md
generated
vendored
16
node_modules/@types/tmp/README.md
generated
vendored
|
|
@ -1,16 +0,0 @@
|
|||
# Installation
|
||||
> `npm install --save @types/tmp`
|
||||
|
||||
# Summary
|
||||
This package contains type definitions for tmp ( http://github.com/raszi/node-tmp ).
|
||||
|
||||
# Details
|
||||
Files were exported from https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/tmp
|
||||
|
||||
Additional Details
|
||||
* Last updated: Mon, 01 Apr 2019 20:55:52 GMT
|
||||
* Dependencies: none
|
||||
* Global values: none
|
||||
|
||||
# Credits
|
||||
These definitions were written by Jared Klopper <https://github.com/optical>, Gyusun Yeom <https://github.com/Perlmint>, Alan Plum <https://github.com/pluma>.
|
||||
70
node_modules/@types/tmp/index.d.ts
generated
vendored
70
node_modules/@types/tmp/index.d.ts
generated
vendored
|
|
@ -1,70 +0,0 @@
|
|||
// Type definitions for tmp 0.1
|
||||
// Project: http://github.com/raszi/node-tmp
|
||||
// Definitions by: Jared Klopper <https://github.com/optical>
|
||||
// Gyusun Yeom <https://github.com/Perlmint>
|
||||
// Alan Plum <https://github.com/pluma>
|
||||
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
|
||||
|
||||
export interface TmpNameOptions {
|
||||
prefix?: string;
|
||||
postfix?: string;
|
||||
template?: string;
|
||||
dir?: string;
|
||||
tries?: number;
|
||||
}
|
||||
|
||||
export interface FileOptions extends TmpNameOptions {
|
||||
mode?: number;
|
||||
keep?: boolean;
|
||||
discardDescriptor?: boolean;
|
||||
detachDescriptor?: boolean;
|
||||
}
|
||||
|
||||
export interface DirOptions extends TmpNameOptions {
|
||||
mode?: number;
|
||||
keep?: boolean;
|
||||
unsafeCleanup?: boolean;
|
||||
}
|
||||
|
||||
export interface FileResult {
|
||||
name: string;
|
||||
fd: number;
|
||||
removeCallback: () => void;
|
||||
}
|
||||
|
||||
export interface DirResult {
|
||||
name: string;
|
||||
removeCallback: () => void;
|
||||
}
|
||||
|
||||
export type FileCallback = (
|
||||
err: any,
|
||||
name: string,
|
||||
fd: number,
|
||||
removeCallback: () => void
|
||||
) => void;
|
||||
|
||||
export type DirCallback = (
|
||||
err: any,
|
||||
name: string,
|
||||
removeCallback: () => void
|
||||
) => void;
|
||||
|
||||
export type TmpNameCallback = (err: any, name: string) => void;
|
||||
|
||||
export function file(options: FileOptions, cb: FileCallback): void;
|
||||
export function file(cb: FileCallback): void;
|
||||
|
||||
export function fileSync(options?: FileOptions): FileResult;
|
||||
|
||||
export function dir(options: DirOptions, cb: DirCallback): void;
|
||||
export function dir(cb: DirCallback): void;
|
||||
|
||||
export function dirSync(options?: DirOptions): DirResult;
|
||||
|
||||
export function tmpName(options: TmpNameOptions, cb: TmpNameCallback): void;
|
||||
export function tmpName(cb: TmpNameCallback): void;
|
||||
|
||||
export function tmpNameSync(options?: TmpNameOptions): string;
|
||||
|
||||
export function setGracefulCleanup(): void;
|
||||
34
node_modules/@types/tmp/package.json
generated
vendored
34
node_modules/@types/tmp/package.json
generated
vendored
|
|
@ -1,34 +0,0 @@
|
|||
{
|
||||
"name": "@types/tmp",
|
||||
"version": "0.1.0",
|
||||
"description": "TypeScript definitions for tmp",
|
||||
"license": "MIT",
|
||||
"contributors": [
|
||||
{
|
||||
"name": "Jared Klopper",
|
||||
"url": "https://github.com/optical",
|
||||
"githubUsername": "optical"
|
||||
},
|
||||
{
|
||||
"name": "Gyusun Yeom",
|
||||
"url": "https://github.com/Perlmint",
|
||||
"githubUsername": "Perlmint"
|
||||
},
|
||||
{
|
||||
"name": "Alan Plum",
|
||||
"url": "https://github.com/pluma",
|
||||
"githubUsername": "pluma"
|
||||
}
|
||||
],
|
||||
"main": "",
|
||||
"types": "index",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/DefinitelyTyped/DefinitelyTyped.git",
|
||||
"directory": "types/tmp"
|
||||
},
|
||||
"scripts": {},
|
||||
"dependencies": {},
|
||||
"typesPublisherContentHash": "d148a6fe31794f2a5b222ae9d0f38d575acbb5e0fed205877913f4e52e33a357",
|
||||
"typeScriptVersion": "2.0"
|
||||
}
|
||||
8
node_modules/minimist/index.js
generated
vendored
8
node_modules/minimist/index.js
generated
vendored
|
|
@ -70,7 +70,7 @@ module.exports = function (args, opts) {
|
|||
var o = obj;
|
||||
for (var i = 0; i < keys.length-1; i++) {
|
||||
var key = keys[i];
|
||||
if (key === '__proto__') return;
|
||||
if (isConstructorOrProto(o, key)) return;
|
||||
if (o[key] === undefined) o[key] = {};
|
||||
if (o[key] === Object.prototype || o[key] === Number.prototype
|
||||
|| o[key] === String.prototype) o[key] = {};
|
||||
|
|
@ -79,7 +79,7 @@ module.exports = function (args, opts) {
|
|||
}
|
||||
|
||||
var key = keys[keys.length - 1];
|
||||
if (key === '__proto__') return;
|
||||
if (isConstructorOrProto(o, key)) return;
|
||||
if (o === Object.prototype || o === Number.prototype
|
||||
|| o === String.prototype) o = {};
|
||||
if (o === Array.prototype) o = [];
|
||||
|
|
@ -243,3 +243,7 @@ function isNumber (x) {
|
|||
return /^[-+]?(?:\d+(?:\.\d*)?|\.\d+)(e[-+]?\d+)?$/.test(x);
|
||||
}
|
||||
|
||||
|
||||
function isConstructorOrProto (obj, key) {
|
||||
return key === 'constructor' && typeof obj[key] === 'function' || key === '__proto__';
|
||||
}
|
||||
|
|
|
|||
2
node_modules/minimist/package.json
generated
vendored
2
node_modules/minimist/package.json
generated
vendored
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "minimist",
|
||||
"version": "1.2.5",
|
||||
"version": "1.2.6",
|
||||
"description": "parse argument options",
|
||||
"main": "index.js",
|
||||
"devDependencies": {
|
||||
|
|
|
|||
5
node_modules/minimist/readme.markdown
generated
vendored
5
node_modules/minimist/readme.markdown
generated
vendored
|
|
@ -34,7 +34,10 @@ $ node example/parse.js -x 3 -y 4 -n5 -abc --beep=boop foo bar baz
|
|||
Previous versions had a prototype pollution bug that could cause privilege
|
||||
escalation in some circumstances when handling untrusted user input.
|
||||
|
||||
Please use version 1.2.3 or later: https://snyk.io/vuln/SNYK-JS-MINIMIST-559764
|
||||
Please use version 1.2.6 or later:
|
||||
|
||||
* https://security.snyk.io/vuln/SNYK-JS-MINIMIST-2429795 (version <=1.2.5)
|
||||
* https://snyk.io/vuln/SNYK-JS-MINIMIST-559764 (version <=1.2.3)
|
||||
|
||||
# methods
|
||||
|
||||
|
|
|
|||
16
node_modules/minimist/test/proto.js
generated
vendored
16
node_modules/minimist/test/proto.js
generated
vendored
|
|
@ -42,3 +42,19 @@ test('proto pollution (constructor)', function (t) {
|
|||
t.equal(argv.y, undefined);
|
||||
t.end();
|
||||
});
|
||||
|
||||
test('proto pollution (constructor function)', function (t) {
|
||||
var argv = parse(['--_.concat.constructor.prototype.y', '123']);
|
||||
function fnToBeTested() {}
|
||||
t.equal(fnToBeTested.y, undefined);
|
||||
t.equal(argv.y, undefined);
|
||||
t.end();
|
||||
});
|
||||
|
||||
// powered by snyk - https://github.com/backstage/backstage/issues/10343
|
||||
test('proto pollution (constructor function) snyk', function (t) {
|
||||
var argv = parse('--_.constructor.constructor.prototype.foo bar'.split(' '));
|
||||
t.equal((function(){}).foo, undefined);
|
||||
t.equal(argv.y, undefined);
|
||||
t.end();
|
||||
})
|
||||
|
|
|
|||
106
node_modules/tmp-promise/.circleci/config.yml
generated
vendored
106
node_modules/tmp-promise/.circleci/config.yml
generated
vendored
|
|
@ -1,53 +1,53 @@
|
|||
version: 2
|
||||
|
||||
common_steps: &common_steps
|
||||
steps:
|
||||
- checkout
|
||||
|
||||
- run:
|
||||
name: Install dependencies
|
||||
command: npm install
|
||||
|
||||
- run:
|
||||
name: Run tests
|
||||
command: npm run mocha
|
||||
|
||||
- run:
|
||||
name: Check Typescript types
|
||||
command: npm run check-types
|
||||
when: always
|
||||
|
||||
jobs:
|
||||
node-8:
|
||||
docker:
|
||||
- image: circleci/node:8
|
||||
|
||||
<<: *common_steps
|
||||
|
||||
node-10:
|
||||
docker:
|
||||
- image: circleci/node:10
|
||||
|
||||
<<: *common_steps
|
||||
|
||||
node-11:
|
||||
docker:
|
||||
- image: circleci/node:11
|
||||
|
||||
<<: *common_steps
|
||||
|
||||
node-12:
|
||||
docker:
|
||||
- image: circleci/node:12
|
||||
|
||||
<<: *common_steps
|
||||
|
||||
workflows:
|
||||
version: 2
|
||||
|
||||
on-commit:
|
||||
jobs:
|
||||
- node-8
|
||||
- node-10
|
||||
- node-11
|
||||
- node-12
|
||||
version: 2
|
||||
|
||||
common_steps: &common_steps
|
||||
steps:
|
||||
- checkout
|
||||
|
||||
- run:
|
||||
name: Install dependencies
|
||||
command: npm install
|
||||
|
||||
- run:
|
||||
name: Run tests
|
||||
command: npm run mocha
|
||||
|
||||
- run:
|
||||
name: Check Typescript types
|
||||
command: npm run check-types
|
||||
when: always
|
||||
|
||||
jobs:
|
||||
node-8:
|
||||
docker:
|
||||
- image: circleci/node:8
|
||||
|
||||
<<: *common_steps
|
||||
|
||||
node-10:
|
||||
docker:
|
||||
- image: circleci/node:10
|
||||
|
||||
<<: *common_steps
|
||||
|
||||
node-11:
|
||||
docker:
|
||||
- image: circleci/node:11
|
||||
|
||||
<<: *common_steps
|
||||
|
||||
node-12:
|
||||
docker:
|
||||
- image: circleci/node:12
|
||||
|
||||
<<: *common_steps
|
||||
|
||||
workflows:
|
||||
version: 2
|
||||
|
||||
on-commit:
|
||||
jobs:
|
||||
- node-8
|
||||
- node-10
|
||||
- node-11
|
||||
- node-12
|
||||
|
|
|
|||
632
node_modules/tmp-promise/README.md
generated
vendored
632
node_modules/tmp-promise/README.md
generated
vendored
|
|
@ -1,316 +1,316 @@
|
|||
# tmp-promise
|
||||
|
||||
[](https://circleci.com/gh/benjamingr/tmp-promise)
|
||||
[](https://badge.fury.io/js/tmp-promise)
|
||||
|
||||
A simple utility for creating temporary files or directories.
|
||||
|
||||
The [tmp](https://github.com/raszi/node-tmp) package with promises support. If you want to use `tmp` with `async/await` then this helper might be for you.
|
||||
|
||||
This documentation is mostly copied from that package's - but with promise usage instead of callback usage adapted.
|
||||
|
||||
## Installation
|
||||
|
||||
npm i tmp-promise
|
||||
|
||||
**Note:** Node.js 8+ is supported - older versions of Node.js are not supported by the Node.js foundation. If you need to use an older version of Node.js install tmp-promise@1.10
|
||||
|
||||
npm i tmp-promise@1.1.0
|
||||
|
||||
## About
|
||||
|
||||
This adds promises support to a [widely used library][2]. This package is used to create temporary files and directories in a [Node.js][1] environment.
|
||||
|
||||
|
||||
tmp-promise offers both an asynchronous and a synchronous API. For all API calls, all
|
||||
the parameters are optional.
|
||||
|
||||
Internally, tmp uses crypto for determining random file names, or, when using templates, a six letter random identifier. And just in case that you do not have that much entropy left on your system, tmp will fall back to pseudo random numbers.
|
||||
|
||||
You can set whether you want to remove the temporary file on process exit or not, and the destination directory can also be set.
|
||||
|
||||
tmp-promise also uses promise [disposers](http://stackoverflow.com/questions/28915677/what-is-the-promise-disposer-pattern) to provide a nice way to perform cleanup when you're done working with the files.
|
||||
|
||||
## Usage (API Reference)
|
||||
|
||||
### Asynchronous file creation
|
||||
|
||||
Simple temporary file creation, the file will be closed and unlinked on process exit.
|
||||
|
||||
With Node.js 10 and es - modules:
|
||||
|
||||
```js
|
||||
import { file } from 'tmp-promise'
|
||||
|
||||
(async () => {
|
||||
const {fd, path, cleanup} = await file();
|
||||
// work with file here in fd
|
||||
cleanup();
|
||||
})();
|
||||
```
|
||||
|
||||
Or the older way:
|
||||
|
||||
```javascript
|
||||
var tmp = require('tmp-promise');
|
||||
|
||||
tmp.file().then(o => {
|
||||
console.log("File: ", o.path);
|
||||
console.log("Filedescriptor: ", o.fd);
|
||||
|
||||
// If we don't need the file anymore we could manually call cleanup
|
||||
// But that is not necessary if we didn't pass the keep option because the library
|
||||
// will clean after itself.
|
||||
o.cleanup();
|
||||
});
|
||||
```
|
||||
|
||||
Simple temporary file creation with a [disposer](http://stackoverflow.com/questions/28915677/what-is-the-promise-disposer-pattern):
|
||||
|
||||
With Node.js 10 and es - modules:
|
||||
|
||||
```js
|
||||
import { withFile } from 'tmp-promise'
|
||||
|
||||
withFile(async ({path, fd}) => {
|
||||
// when this function returns or throws - release the file
|
||||
await doSomethingWithFile(db);
|
||||
});
|
||||
```
|
||||
|
||||
Or the older way:
|
||||
|
||||
```js
|
||||
tmp.withFile(o => {
|
||||
console.log("File: ", o.path);
|
||||
console.log("Filedescriptor: ", o.fd);
|
||||
// the file remains opens until the below promise resolves
|
||||
return somePromiseReturningFn();
|
||||
}).then(v => {
|
||||
// file is closed here automatically, v is the value of somePromiseReturningFn
|
||||
});
|
||||
```
|
||||
|
||||
|
||||
### Synchronous file creation
|
||||
|
||||
A synchronous version of the above.
|
||||
|
||||
```javascript
|
||||
var tmp = require('tmp-promise');
|
||||
|
||||
var tmpobj = tmp.fileSync();
|
||||
console.log("File: ", tmpobj.name);
|
||||
console.log("Filedescriptor: ", tmpobj.fd);
|
||||
|
||||
// If we don't need the file anymore we could manually call the removeCallback
|
||||
// But that is not necessary if we didn't pass the keep option because the library
|
||||
// will clean after itself.
|
||||
tmpobj.removeCallback();
|
||||
```
|
||||
|
||||
Note that this might throw an exception if either the maximum limit of retries
|
||||
for creating a temporary name fails, or, in case that you do not have the permission
|
||||
to write to the directory where the temporary file should be created in.
|
||||
|
||||
### Asynchronous directory creation
|
||||
|
||||
Simple temporary directory creation, it will be removed on process exit.
|
||||
|
||||
If the directory still contains items on process exit, then it won't be removed.
|
||||
|
||||
```javascript
|
||||
var tmp = require('tmp-promise');
|
||||
|
||||
tmp.dir().then(o => {
|
||||
console.log("Dir: ", o.path);
|
||||
|
||||
// Manual cleanup
|
||||
o.cleanup();
|
||||
});
|
||||
```
|
||||
|
||||
If you want to cleanup the directory even when there are entries in it, then
|
||||
you can pass the `unsafeCleanup` option when creating it.
|
||||
|
||||
You can also use a [disposer](http://stackoverflow.com/questions/28915677/what-is-the-promise-disposer-pattern) here which takes care of cleanup automatically:
|
||||
|
||||
```javascript
|
||||
var tmp = require('tmp-promise');
|
||||
|
||||
tmp.withDir(o => {
|
||||
console.log("Dir: ", o.path);
|
||||
|
||||
// automatic cleanup when the below promise resolves
|
||||
return somePromiseReturningFn();
|
||||
}).then(v => {
|
||||
// the directory has been cleaned here
|
||||
});
|
||||
```
|
||||
|
||||
### Synchronous directory creation
|
||||
|
||||
A synchronous version of the above.
|
||||
|
||||
```javascript
|
||||
var tmp = require('tmp-promise');
|
||||
|
||||
var tmpobj = tmp.dirSync();
|
||||
console.log("Dir: ", tmpobj.name);
|
||||
// Manual cleanup
|
||||
tmpobj.removeCallback();
|
||||
```
|
||||
|
||||
Note that this might throw an exception if either the maximum limit of retries
|
||||
for creating a temporary name fails, or, in case that you do not have the permission
|
||||
to write to the directory where the temporary directory should be created in.
|
||||
|
||||
### Asynchronous filename generation
|
||||
|
||||
It is possible with this library to generate a unique filename in the specified
|
||||
directory.
|
||||
|
||||
```javascript
|
||||
var tmp = require('tmp-promise');
|
||||
|
||||
tmp.tmpName().then(path => {
|
||||
console.log("Created temporary filename: ", path);
|
||||
});
|
||||
```
|
||||
|
||||
### Synchronous filename generation
|
||||
|
||||
A synchronous version of the above.
|
||||
|
||||
```javascript
|
||||
var tmp = require('tmp-promise');
|
||||
|
||||
var name = tmp.tmpNameSync();
|
||||
console.log("Created temporary filename: ", name);
|
||||
```
|
||||
|
||||
## Advanced usage
|
||||
|
||||
### Asynchronous file creation
|
||||
|
||||
Creates a file with mode `0644`, prefix will be `prefix-` and postfix will be `.txt`.
|
||||
|
||||
```javascript
|
||||
var tmp = require('tmp-promise');
|
||||
|
||||
tmp.file({ mode: 0644, prefix: 'prefix-', postfix: '.txt' }).then(o => {
|
||||
console.log("File: ", o.path);
|
||||
console.log("Filedescriptor: ", o.fd);
|
||||
});
|
||||
```
|
||||
|
||||
### Synchronous file creation
|
||||
|
||||
A synchronous version of the above.
|
||||
|
||||
```javascript
|
||||
var tmp = require('tmp-promise');
|
||||
|
||||
var tmpobj = tmp.fileSync({ mode: 0644, prefix: 'prefix-', postfix: '.txt' });
|
||||
console.log("File: ", tmpobj.name);
|
||||
console.log("Filedescriptor: ", tmpobj.fd);
|
||||
```
|
||||
|
||||
### Asynchronous directory creation
|
||||
|
||||
Creates a directory with mode `0755`, prefix will be `myTmpDir_`.
|
||||
|
||||
```javascript
|
||||
var tmp = require('tmp-promise');
|
||||
|
||||
tmp.dir({ mode: 0750, prefix: 'myTmpDir_' }).then(o => {
|
||||
console.log("Dir: ", o.path);
|
||||
});
|
||||
```
|
||||
|
||||
### Synchronous directory creation
|
||||
|
||||
Again, a synchronous version of the above.
|
||||
|
||||
```javascript
|
||||
var tmp = require('tmp-promise');
|
||||
|
||||
var tmpobj = tmp.dirSync({ mode: 0750, prefix: 'myTmpDir_' });
|
||||
console.log("Dir: ", tmpobj.name);
|
||||
```
|
||||
|
||||
|
||||
### mkstemp like, asynchronously
|
||||
|
||||
Creates a new temporary directory with mode `0700` and filename like `/tmp/tmp-nk2J1u`.
|
||||
|
||||
```javascript
|
||||
var tmp = require('tmp-promise');
|
||||
tmp.dir({ template: '/tmp/tmp-XXXXXX' }).then(console.log);
|
||||
```
|
||||
|
||||
|
||||
### mkstemp like, synchronously
|
||||
|
||||
This will behave similarly to the asynchronous version.
|
||||
|
||||
```javascript
|
||||
var tmp = require('tmp-promise');
|
||||
|
||||
var tmpobj = tmp.dirSync({ template: '/tmp/tmp-XXXXXX' });
|
||||
console.log("Dir: ", tmpobj.name);
|
||||
```
|
||||
|
||||
### Asynchronous filename generation
|
||||
|
||||
The `tmpName()` function accepts the `prefix`, `postfix`, `dir`, etc. parameters also:
|
||||
|
||||
```javascript
|
||||
var tmp = require('tmp-promise');
|
||||
|
||||
tmp.tmpName({ template: '/tmp/tmp-XXXXXX' }).then(path =>
|
||||
console.log("Created temporary filename: ", path);
|
||||
);
|
||||
```
|
||||
|
||||
### Synchronous filename generation
|
||||
|
||||
The `tmpNameSync()` function works similarly to `tmpName()`.
|
||||
|
||||
```javascript
|
||||
var tmp = require('tmp-promise');
|
||||
var tmpname = tmp.tmpNameSync({ template: '/tmp/tmp-XXXXXX' });
|
||||
console.log("Created temporary filename: ", tmpname);
|
||||
```
|
||||
|
||||
|
||||
## Graceful cleanup
|
||||
|
||||
One may want to cleanup the temporary files even when an uncaught exception
|
||||
occurs. To enforce this, you can call the `setGracefulCleanup()` method:
|
||||
|
||||
```javascript
|
||||
var tmp = require('tmp');
|
||||
|
||||
tmp.setGracefulCleanup();
|
||||
```
|
||||
|
||||
## Options
|
||||
|
||||
All options are optional :)
|
||||
|
||||
* `mode`: the file mode to create with, it fallbacks to `0600` on file creation and `0700` on directory creation
|
||||
* `prefix`: the optional prefix, fallbacks to `tmp-` if not provided
|
||||
* `postfix`: the optional postfix, fallbacks to `.tmp` on file creation
|
||||
* `template`: [`mkstemp`][3] like filename template, no default
|
||||
* `dir`: the optional temporary directory, fallbacks to system default (guesses from environment)
|
||||
* `tries`: how many times should the function try to get a unique filename before giving up, default `3`
|
||||
* `keep`: signals that the temporary file or directory should not be deleted on exit, default is `false`, means delete
|
||||
* Please keep in mind that it is recommended in this case to call the provided `cleanupCallback` function manually.
|
||||
* `unsafeCleanup`: recursively removes the created temporary directory, even when it's not empty. default is `false`
|
||||
|
||||
|
||||
|
||||
[1]: http://nodejs.org/
|
||||
[2]: https://www.npmjs.com/browse/depended/tmp
|
||||
[3]: http://www.kernel.org/doc/man-pages/online/pages/man3/mkstemp.3.html
|
||||
# tmp-promise
|
||||
|
||||
[](https://circleci.com/gh/benjamingr/tmp-promise)
|
||||
[](https://badge.fury.io/js/tmp-promise)
|
||||
|
||||
A simple utility for creating temporary files or directories.
|
||||
|
||||
The [tmp](https://github.com/raszi/node-tmp) package with promises support. If you want to use `tmp` with `async/await` then this helper might be for you.
|
||||
|
||||
This documentation is mostly copied from that package's - but with promise usage instead of callback usage adapted.
|
||||
|
||||
## Installation
|
||||
|
||||
npm i tmp-promise
|
||||
|
||||
**Note:** Node.js 8+ is supported - older versions of Node.js are not supported by the Node.js foundation. If you need to use an older version of Node.js install tmp-promise@1.10
|
||||
|
||||
npm i tmp-promise@1.1.0
|
||||
|
||||
## About
|
||||
|
||||
This adds promises support to a [widely used library][2]. This package is used to create temporary files and directories in a [Node.js][1] environment.
|
||||
|
||||
|
||||
tmp-promise offers both an asynchronous and a synchronous API. For all API calls, all
|
||||
the parameters are optional.
|
||||
|
||||
Internally, tmp uses crypto for determining random file names, or, when using templates, a six letter random identifier. And just in case that you do not have that much entropy left on your system, tmp will fall back to pseudo random numbers.
|
||||
|
||||
You can set whether you want to remove the temporary file on process exit or not, and the destination directory can also be set.
|
||||
|
||||
tmp-promise also uses promise [disposers](http://stackoverflow.com/questions/28915677/what-is-the-promise-disposer-pattern) to provide a nice way to perform cleanup when you're done working with the files.
|
||||
|
||||
## Usage (API Reference)
|
||||
|
||||
### Asynchronous file creation
|
||||
|
||||
Simple temporary file creation, the file will be closed and unlinked on process exit.
|
||||
|
||||
With Node.js 10 and es - modules:
|
||||
|
||||
```js
|
||||
import { file } from 'tmp-promise'
|
||||
|
||||
(async () => {
|
||||
const {fd, path, cleanup} = await file();
|
||||
// work with file here in fd
|
||||
cleanup();
|
||||
})();
|
||||
```
|
||||
|
||||
Or the older way:
|
||||
|
||||
```javascript
|
||||
var tmp = require('tmp-promise');
|
||||
|
||||
tmp.file().then(o => {
|
||||
console.log("File: ", o.path);
|
||||
console.log("Filedescriptor: ", o.fd);
|
||||
|
||||
// If we don't need the file anymore we could manually call cleanup
|
||||
// But that is not necessary if we didn't pass the keep option because the library
|
||||
// will clean after itself.
|
||||
o.cleanup();
|
||||
});
|
||||
```
|
||||
|
||||
Simple temporary file creation with a [disposer](http://stackoverflow.com/questions/28915677/what-is-the-promise-disposer-pattern):
|
||||
|
||||
With Node.js 10 and es - modules:
|
||||
|
||||
```js
|
||||
import { withFile } from 'tmp-promise'
|
||||
|
||||
withFile(async ({path, fd}) => {
|
||||
// when this function returns or throws - release the file
|
||||
await doSomethingWithFile(db);
|
||||
});
|
||||
```
|
||||
|
||||
Or the older way:
|
||||
|
||||
```js
|
||||
tmp.withFile(o => {
|
||||
console.log("File: ", o.path);
|
||||
console.log("Filedescriptor: ", o.fd);
|
||||
// the file remains opens until the below promise resolves
|
||||
return somePromiseReturningFn();
|
||||
}).then(v => {
|
||||
// file is closed here automatically, v is the value of somePromiseReturningFn
|
||||
});
|
||||
```
|
||||
|
||||
|
||||
### Synchronous file creation
|
||||
|
||||
A synchronous version of the above.
|
||||
|
||||
```javascript
|
||||
var tmp = require('tmp-promise');
|
||||
|
||||
var tmpobj = tmp.fileSync();
|
||||
console.log("File: ", tmpobj.name);
|
||||
console.log("Filedescriptor: ", tmpobj.fd);
|
||||
|
||||
// If we don't need the file anymore we could manually call the removeCallback
|
||||
// But that is not necessary if we didn't pass the keep option because the library
|
||||
// will clean after itself.
|
||||
tmpobj.removeCallback();
|
||||
```
|
||||
|
||||
Note that this might throw an exception if either the maximum limit of retries
|
||||
for creating a temporary name fails, or, in case that you do not have the permission
|
||||
to write to the directory where the temporary file should be created in.
|
||||
|
||||
### Asynchronous directory creation
|
||||
|
||||
Simple temporary directory creation, it will be removed on process exit.
|
||||
|
||||
If the directory still contains items on process exit, then it won't be removed.
|
||||
|
||||
```javascript
|
||||
var tmp = require('tmp-promise');
|
||||
|
||||
tmp.dir().then(o => {
|
||||
console.log("Dir: ", o.path);
|
||||
|
||||
// Manual cleanup
|
||||
o.cleanup();
|
||||
});
|
||||
```
|
||||
|
||||
If you want to cleanup the directory even when there are entries in it, then
|
||||
you can pass the `unsafeCleanup` option when creating it.
|
||||
|
||||
You can also use a [disposer](http://stackoverflow.com/questions/28915677/what-is-the-promise-disposer-pattern) here which takes care of cleanup automatically:
|
||||
|
||||
```javascript
|
||||
var tmp = require('tmp-promise');
|
||||
|
||||
tmp.withDir(o => {
|
||||
console.log("Dir: ", o.path);
|
||||
|
||||
// automatic cleanup when the below promise resolves
|
||||
return somePromiseReturningFn();
|
||||
}).then(v => {
|
||||
// the directory has been cleaned here
|
||||
});
|
||||
```
|
||||
|
||||
### Synchronous directory creation
|
||||
|
||||
A synchronous version of the above.
|
||||
|
||||
```javascript
|
||||
var tmp = require('tmp-promise');
|
||||
|
||||
var tmpobj = tmp.dirSync();
|
||||
console.log("Dir: ", tmpobj.name);
|
||||
// Manual cleanup
|
||||
tmpobj.removeCallback();
|
||||
```
|
||||
|
||||
Note that this might throw an exception if either the maximum limit of retries
|
||||
for creating a temporary name fails, or, in case that you do not have the permission
|
||||
to write to the directory where the temporary directory should be created in.
|
||||
|
||||
### Asynchronous filename generation
|
||||
|
||||
It is possible with this library to generate a unique filename in the specified
|
||||
directory.
|
||||
|
||||
```javascript
|
||||
var tmp = require('tmp-promise');
|
||||
|
||||
tmp.tmpName().then(path => {
|
||||
console.log("Created temporary filename: ", path);
|
||||
});
|
||||
```
|
||||
|
||||
### Synchronous filename generation
|
||||
|
||||
A synchronous version of the above.
|
||||
|
||||
```javascript
|
||||
var tmp = require('tmp-promise');
|
||||
|
||||
var name = tmp.tmpNameSync();
|
||||
console.log("Created temporary filename: ", name);
|
||||
```
|
||||
|
||||
## Advanced usage
|
||||
|
||||
### Asynchronous file creation
|
||||
|
||||
Creates a file with mode `0644`, prefix will be `prefix-` and postfix will be `.txt`.
|
||||
|
||||
```javascript
|
||||
var tmp = require('tmp-promise');
|
||||
|
||||
tmp.file({ mode: 0644, prefix: 'prefix-', postfix: '.txt' }).then(o => {
|
||||
console.log("File: ", o.path);
|
||||
console.log("Filedescriptor: ", o.fd);
|
||||
});
|
||||
```
|
||||
|
||||
### Synchronous file creation
|
||||
|
||||
A synchronous version of the above.
|
||||
|
||||
```javascript
|
||||
var tmp = require('tmp-promise');
|
||||
|
||||
var tmpobj = tmp.fileSync({ mode: 0644, prefix: 'prefix-', postfix: '.txt' });
|
||||
console.log("File: ", tmpobj.name);
|
||||
console.log("Filedescriptor: ", tmpobj.fd);
|
||||
```
|
||||
|
||||
### Asynchronous directory creation
|
||||
|
||||
Creates a directory with mode `0755`, prefix will be `myTmpDir_`.
|
||||
|
||||
```javascript
|
||||
var tmp = require('tmp-promise');
|
||||
|
||||
tmp.dir({ mode: 0750, prefix: 'myTmpDir_' }).then(o => {
|
||||
console.log("Dir: ", o.path);
|
||||
});
|
||||
```
|
||||
|
||||
### Synchronous directory creation
|
||||
|
||||
Again, a synchronous version of the above.
|
||||
|
||||
```javascript
|
||||
var tmp = require('tmp-promise');
|
||||
|
||||
var tmpobj = tmp.dirSync({ mode: 0750, prefix: 'myTmpDir_' });
|
||||
console.log("Dir: ", tmpobj.name);
|
||||
```
|
||||
|
||||
|
||||
### mkstemp like, asynchronously
|
||||
|
||||
Creates a new temporary directory with mode `0700` and filename like `/tmp/tmp-nk2J1u`.
|
||||
|
||||
```javascript
|
||||
var tmp = require('tmp-promise');
|
||||
tmp.dir({ template: '/tmp/tmp-XXXXXX' }).then(console.log);
|
||||
```
|
||||
|
||||
|
||||
### mkstemp like, synchronously
|
||||
|
||||
This will behave similarly to the asynchronous version.
|
||||
|
||||
```javascript
|
||||
var tmp = require('tmp-promise');
|
||||
|
||||
var tmpobj = tmp.dirSync({ template: '/tmp/tmp-XXXXXX' });
|
||||
console.log("Dir: ", tmpobj.name);
|
||||
```
|
||||
|
||||
### Asynchronous filename generation
|
||||
|
||||
The `tmpName()` function accepts the `prefix`, `postfix`, `dir`, etc. parameters also:
|
||||
|
||||
```javascript
|
||||
var tmp = require('tmp-promise');
|
||||
|
||||
tmp.tmpName({ template: '/tmp/tmp-XXXXXX' }).then(path =>
|
||||
console.log("Created temporary filename: ", path);
|
||||
);
|
||||
```
|
||||
|
||||
### Synchronous filename generation
|
||||
|
||||
The `tmpNameSync()` function works similarly to `tmpName()`.
|
||||
|
||||
```javascript
|
||||
var tmp = require('tmp-promise');
|
||||
var tmpname = tmp.tmpNameSync({ template: '/tmp/tmp-XXXXXX' });
|
||||
console.log("Created temporary filename: ", tmpname);
|
||||
```
|
||||
|
||||
|
||||
## Graceful cleanup
|
||||
|
||||
One may want to cleanup the temporary files even when an uncaught exception
|
||||
occurs. To enforce this, you can call the `setGracefulCleanup()` method:
|
||||
|
||||
```javascript
|
||||
var tmp = require('tmp');
|
||||
|
||||
tmp.setGracefulCleanup();
|
||||
```
|
||||
|
||||
## Options
|
||||
|
||||
All options are optional :)
|
||||
|
||||
* `mode`: the file mode to create with, it fallbacks to `0600` on file creation and `0700` on directory creation
|
||||
* `prefix`: the optional prefix, fallbacks to `tmp-` if not provided
|
||||
* `postfix`: the optional postfix, fallbacks to `.tmp` on file creation
|
||||
* `template`: [`mkstemp`][3] like filename template, no default
|
||||
* `dir`: the optional temporary directory, fallbacks to system default (guesses from environment)
|
||||
* `tries`: how many times should the function try to get a unique filename before giving up, default `3`
|
||||
* `keep`: signals that the temporary file or directory should not be deleted on exit, default is `false`, means delete
|
||||
* Please keep in mind that it is recommended in this case to call the provided `cleanupCallback` function manually.
|
||||
* `unsafeCleanup`: recursively removes the created temporary directory, even when it's not empty. default is `false`
|
||||
|
||||
|
||||
|
||||
[1]: http://nodejs.org/
|
||||
[2]: https://www.npmjs.com/browse/depended/tmp
|
||||
[3]: http://www.kernel.org/doc/man-pages/online/pages/man3/mkstemp.3.html
|
||||
|
|
|
|||
16
node_modules/tmp-promise/example-usage.js
generated
vendored
16
node_modules/tmp-promise/example-usage.js
generated
vendored
|
|
@ -1,9 +1,9 @@
|
|||
var tmp = require("./index.js");
|
||||
var Promise = require("bluebird"); // just for delay, this works with native promises
|
||||
// disposer
|
||||
tmp.withFile((path) => {
|
||||
console.log("Created at path", path);
|
||||
return Promise.delay(1000);
|
||||
}).then(() => {
|
||||
console.log("File automatically disposed");
|
||||
var tmp = require("./index.js");
|
||||
var Promise = require("bluebird"); // just for delay, this works with native promises
|
||||
// disposer
|
||||
tmp.withFile((path) => {
|
||||
console.log("Created at path", path);
|
||||
return Promise.delay(1000);
|
||||
}).then(() => {
|
||||
console.log("File automatically disposed");
|
||||
});
|
||||
54
node_modules/tmp-promise/index.d.ts
generated
vendored
54
node_modules/tmp-promise/index.d.ts
generated
vendored
|
|
@ -1,27 +1,27 @@
|
|||
import { fileSync, dirSync, tmpNameSync, setGracefulCleanup } from "tmp";
|
||||
import { FileOptions, DirOptions, TmpNameOptions } from "tmp";
|
||||
|
||||
export interface DirectoryResult {
|
||||
path: string;
|
||||
cleanup(): Promise<void>;
|
||||
}
|
||||
|
||||
export interface FileResult extends DirectoryResult {
|
||||
fd: number;
|
||||
}
|
||||
|
||||
export function file(options?: FileOptions): Promise<FileResult>;
|
||||
export function withFile<T>(
|
||||
fn: (result: FileResult) => Promise<T>,
|
||||
options?: FileOptions
|
||||
): Promise<T>;
|
||||
|
||||
export function dir(options?: DirOptions): Promise<DirectoryResult>;
|
||||
export function withDir<T>(
|
||||
fn: (results: DirectoryResult) => Promise<T>,
|
||||
options?: DirOptions
|
||||
): Promise<T>;
|
||||
|
||||
export function tmpName(options?: TmpNameOptions): Promise<string>;
|
||||
|
||||
export { fileSync, dirSync, tmpNameSync, setGracefulCleanup };
|
||||
import { fileSync, dirSync, tmpNameSync, setGracefulCleanup } from "tmp";
|
||||
import { FileOptions, DirOptions, TmpNameOptions } from "tmp";
|
||||
|
||||
export interface DirectoryResult {
|
||||
path: string;
|
||||
cleanup: () => Promise<void>;
|
||||
}
|
||||
|
||||
export interface FileResult extends DirectoryResult {
|
||||
fd: number;
|
||||
}
|
||||
|
||||
export function file(options?: FileOptions): Promise<FileResult>;
|
||||
export function withFile<T>(
|
||||
fn: (result: FileResult) => Promise<T>,
|
||||
options?: FileOptions
|
||||
): Promise<T>;
|
||||
|
||||
export function dir(options?: DirOptions): Promise<DirectoryResult>;
|
||||
export function withDir<T>(
|
||||
fn: (results: DirectoryResult) => Promise<T>,
|
||||
options?: DirOptions
|
||||
): Promise<T>;
|
||||
|
||||
export function tmpName(options?: TmpNameOptions): Promise<string>;
|
||||
|
||||
export { fileSync, dirSync, tmpNameSync, setGracefulCleanup };
|
||||
|
|
|
|||
98
node_modules/tmp-promise/index.js
generated
vendored
98
node_modules/tmp-promise/index.js
generated
vendored
|
|
@ -1,48 +1,50 @@
|
|||
const {promisify} = require("util");
|
||||
const tmp = require("tmp");
|
||||
|
||||
// file
|
||||
module.exports.fileSync = tmp.fileSync;
|
||||
const fileWithOptions = promisify((options, cb) =>
|
||||
tmp.file(options, (err, path, fd, cleanup) =>
|
||||
err ? cb(err) : cb(undefined, { path, fd, cleanup: promisify(cleanup) })
|
||||
)
|
||||
);
|
||||
module.exports.file = async (options) => fileWithOptions(options);
|
||||
|
||||
module.exports.withFile = async function withFile(fn, options) {
|
||||
const { path, fd, cleanup } = await module.exports.file(options);
|
||||
try {
|
||||
return await fn({ path, fd });
|
||||
} finally {
|
||||
await cleanup();
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
// directory
|
||||
module.exports.dirSync = tmp.dirSync;
|
||||
const dirWithOptions = promisify((options, cb) =>
|
||||
tmp.dir(options, (err, path, cleanup) =>
|
||||
err ? cb(err) : cb(undefined, { path, cleanup: promisify(cleanup) })
|
||||
)
|
||||
);
|
||||
module.exports.dir = async (options) => dirWithOptions(options);
|
||||
|
||||
module.exports.withDir = async function withDir(fn, options) {
|
||||
const { path, cleanup } = await module.exports.dir(options);
|
||||
try {
|
||||
return await fn({ path });
|
||||
} finally {
|
||||
await cleanup();
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
// name generation
|
||||
module.exports.tmpNameSync = tmp.tmpNameSync;
|
||||
module.exports.tmpName = promisify(tmp.tmpName);
|
||||
|
||||
module.exports.tmpdir = tmp.tmpdir;
|
||||
|
||||
module.exports.setGracefulCleanup = tmp.setGracefulCleanup;
|
||||
'use strict';
|
||||
|
||||
const { promisify } = require("util");
|
||||
const tmp = require("tmp");
|
||||
|
||||
// file
|
||||
module.exports.fileSync = tmp.fileSync;
|
||||
const fileWithOptions = promisify((options, cb) =>
|
||||
tmp.file(options, (err, path, fd, cleanup) =>
|
||||
err ? cb(err) : cb(undefined, { path, fd, cleanup: promisify(cleanup) })
|
||||
)
|
||||
);
|
||||
module.exports.file = async (options) => fileWithOptions(options);
|
||||
|
||||
module.exports.withFile = async function withFile(fn, options) {
|
||||
const { path, fd, cleanup } = await module.exports.file(options);
|
||||
try {
|
||||
return await fn({ path, fd });
|
||||
} finally {
|
||||
await cleanup();
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
// directory
|
||||
module.exports.dirSync = tmp.dirSync;
|
||||
const dirWithOptions = promisify((options, cb) =>
|
||||
tmp.dir(options, (err, path, cleanup) =>
|
||||
err ? cb(err) : cb(undefined, { path, cleanup: promisify(cleanup) })
|
||||
)
|
||||
);
|
||||
module.exports.dir = async (options) => dirWithOptions(options);
|
||||
|
||||
module.exports.withDir = async function withDir(fn, options) {
|
||||
const { path, cleanup } = await module.exports.dir(options);
|
||||
try {
|
||||
return await fn({ path });
|
||||
} finally {
|
||||
await cleanup();
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
// name generation
|
||||
module.exports.tmpNameSync = tmp.tmpNameSync;
|
||||
module.exports.tmpName = promisify(tmp.tmpName);
|
||||
|
||||
module.exports.tmpdir = tmp.tmpdir;
|
||||
|
||||
module.exports.setGracefulCleanup = tmp.setGracefulCleanup;
|
||||
|
|
|
|||
62
node_modules/tmp-promise/index.test-d.ts
generated
vendored
62
node_modules/tmp-promise/index.test-d.ts
generated
vendored
|
|
@ -1,31 +1,31 @@
|
|||
import { file, withFile, dir, withDir, tmpName } from ".";
|
||||
|
||||
async function fileExample() {
|
||||
const { path, fd, cleanup } = await file({ discardDescriptor: true });
|
||||
await cleanup();
|
||||
|
||||
await withFile(
|
||||
async ({ path, fd, cleanup }) => {
|
||||
console.log(fd);
|
||||
await cleanup();
|
||||
},
|
||||
{ discardDescriptor: true }
|
||||
);
|
||||
}
|
||||
|
||||
async function dirExample() {
|
||||
const { path, cleanup } = await dir({ unsafeCleanup: true });
|
||||
await cleanup();
|
||||
|
||||
await withDir(
|
||||
async ({ path, cleanup }) => {
|
||||
console.log(path);
|
||||
await cleanup();
|
||||
},
|
||||
{ unsafeCleanup: true }
|
||||
);
|
||||
}
|
||||
|
||||
async function tmpNameExample() {
|
||||
const name = await tmpName({ tries: 3 });
|
||||
}
|
||||
import { file, withFile, dir, withDir, tmpName } from ".";
|
||||
|
||||
async function fileExample() {
|
||||
const { path, fd, cleanup } = await file({ discardDescriptor: true });
|
||||
await cleanup();
|
||||
|
||||
await withFile(
|
||||
async ({ path, fd, cleanup }) => {
|
||||
console.log(fd);
|
||||
await cleanup();
|
||||
},
|
||||
{ discardDescriptor: true }
|
||||
);
|
||||
}
|
||||
|
||||
async function dirExample() {
|
||||
const { path, cleanup } = await dir({ unsafeCleanup: true });
|
||||
await cleanup();
|
||||
|
||||
await withDir(
|
||||
async ({ path, cleanup }) => {
|
||||
console.log(path);
|
||||
await cleanup();
|
||||
},
|
||||
{ unsafeCleanup: true }
|
||||
);
|
||||
}
|
||||
|
||||
async function tmpNameExample() {
|
||||
const name = await tmpName({ tries: 3 });
|
||||
}
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue