Compare commits

..

No commits in common. "main" and "v8.0.0" have entirely different histories.
main ... v8.0.0

6 changed files with 375 additions and 436 deletions

View File

@ -154,169 +154,3 @@ jobs:
} }
Write-Host "Successfully downloaded artifact without decompressing: $rawFile (size: $($fileInfo.Length) bytes)" Write-Host "Successfully downloaded artifact without decompressing: $rawFile (size: $($fileInfo.Length) bytes)"
shell: pwsh shell: pwsh
# Regression test for artifact filename vs content-type mismatch
# When an archived artifact has a name with a file extension that doesn't
# match the blob type (e.g. "report.txt" but blob is zip), the server
# should append .zip to the content-disposition filename.
- name: Create and upload archived artifact with misleading extension
shell: bash
run: |
mkdir -p path/to/extension-test
echo '{"key": "value"}' > path/to/extension-test/data.json
- uses: actions/upload-artifact@v4 # V4 is important here to ensure we're supporting older versions correctly
with:
name: report.txt-${{ matrix.runs-on }}.json
path: path/to/extension-test/data.json
- name: Download misleading-extension artifact without decompressing
uses: ./
with:
name: report.txt-${{ matrix.runs-on }}.json
path: ext-test/raw
skip-decompress: true
- name: Verify downloaded file has .zip extension appended
shell: bash
run: |
expected="ext-test/raw/report.txt-${{ matrix.runs-on }}.json.zip"
if [ -f "$expected" ]; then
echo "PASS: Downloaded file has .zip appended: $expected"
else
echo "FAIL: Expected $expected but got:"
ls -al ext-test/raw/
exit 1
fi
# Test uploading and downloading artifacts with CJK (Chinese, Japanese, Korean) characters
# Regression test: certain non-ASCII chars (e.g. U+571F 土) caused 400 errors from
# Azure Blob Storage due to encoding issues in the Content-Disposition / rscd parameter
- name: Create artifacts with CJK names
shell: bash
run: |
mkdir -p path/to/cjk-artifacts
# Chinese - 土 (U+571F) known to fail, 日 (U+65E5) known to work
echo "Content for 土" > "path/to/cjk-artifacts/file-土-${{ matrix.runs-on }}.txt"
echo "Content for 中文测试" > "path/to/cjk-artifacts/file-中文测试-${{ matrix.runs-on }}.txt"
# Japanese - katakana and kanji
echo "Content for テスト" > "path/to/cjk-artifacts/file-テスト-${{ matrix.runs-on }}.txt"
echo "Content for 東京タワー" > "path/to/cjk-artifacts/file-東京タワー-${{ matrix.runs-on }}.txt"
# Korean - Hangul
echo "Content for 테스트" > "path/to/cjk-artifacts/file-테스트-${{ matrix.runs-on }}.txt"
echo "Content for 서울시" > "path/to/cjk-artifacts/file-서울시-${{ matrix.runs-on }}.txt"
- name: Upload CJK artifact - Chinese 土
uses: actions/upload-artifact@v7
with:
path: path/to/cjk-artifacts/file-土-${{ matrix.runs-on }}.txt
archive: false
- name: Upload CJK artifact - Chinese 中文测试
uses: actions/upload-artifact@v7
with:
path: path/to/cjk-artifacts/file-中文测试-${{ matrix.runs-on }}.txt
archive: false
- name: Upload CJK artifact - Japanese テスト
uses: actions/upload-artifact@v7
with:
path: path/to/cjk-artifacts/file-テスト-${{ matrix.runs-on }}.txt
archive: false
- name: Upload CJK artifact - Japanese 東京タワー
uses: actions/upload-artifact@v7
with:
path: path/to/cjk-artifacts/file-東京タワー-${{ matrix.runs-on }}.txt
archive: false
- name: Upload CJK artifact - Korean 테스트
uses: actions/upload-artifact@v7
with:
path: path/to/cjk-artifacts/file-테스트-${{ matrix.runs-on }}.txt
archive: false
- name: Upload CJK artifact - Korean 서울시
uses: actions/upload-artifact@v7
with:
path: path/to/cjk-artifacts/file-서울시-${{ matrix.runs-on }}.txt
archive: false
- name: Download CJK artifact - Chinese 土
uses: ./
with:
name: file-土-${{ matrix.runs-on }}.txt
path: cjk-download/土
- name: Download CJK artifact - Chinese 中文测试
uses: ./
with:
name: file-中文测试-${{ matrix.runs-on }}.txt
path: cjk-download/中文测试
- name: Download CJK artifact - Japanese テスト
uses: ./
with:
name: file-テスト-${{ matrix.runs-on }}.txt
path: cjk-download/テスト
- name: Download CJK artifact - Japanese 東京タワー
uses: ./
with:
name: file-東京タワー-${{ matrix.runs-on }}.txt
path: cjk-download/東京タワー
- name: Download CJK artifact - Korean 테스트
uses: ./
with:
name: file-테스트-${{ matrix.runs-on }}.txt
path: cjk-download/테스트
- name: Download CJK artifact - Korean 서울시
uses: ./
with:
name: file-서울시-${{ matrix.runs-on }}.txt
path: cjk-download/서울시
- name: Verify CJK artifact downloads
shell: bash
run: |
set -e
fail=0
check_file() {
local file="$1"
local expected="$2"
if [ ! -f "$file" ]; then
echo "FAIL: Missing file: $file"
fail=1
return
fi
actual=$(cat "$file")
if [ "$actual" != "$expected" ]; then
echo "FAIL: Content mismatch in $file"
echo " Expected: '$expected'"
echo " Got: '$actual'"
fail=1
return
fi
echo "PASS: $file"
}
echo "=== Chinese ==="
check_file "cjk-download/土/file-土-${{ matrix.runs-on }}.txt" "Content for 土"
check_file "cjk-download/中文测试/file-中文测试-${{ matrix.runs-on }}.txt" "Content for 中文测试"
echo "=== Japanese ==="
check_file "cjk-download/テスト/file-テスト-${{ matrix.runs-on }}.txt" "Content for テスト"
check_file "cjk-download/東京タワー/file-東京タワー-${{ matrix.runs-on }}.txt" "Content for 東京タワー"
echo "=== Korean ==="
check_file "cjk-download/테스트/file-테스트-${{ matrix.runs-on }}.txt" "Content for 테스트"
check_file "cjk-download/서울시/file-서울시-${{ matrix.runs-on }}.txt" "Content for 서울시"
if [ "$fail" -ne 0 ]; then
echo "Some CJK artifact checks failed"
ls -alR cjk-download/ || true
exit 1
fi
echo "All CJK artifact downloads verified successfully"

View File

@ -1,6 +1,6 @@
--- ---
name: "@actions/artifact" name: "@actions/artifact"
version: 6.2.1 version: 6.1.0
type: npm type: npm
summary: Actions artifact lib summary: Actions artifact lib
homepage: https://github.com/actions/toolkit/tree/main/packages/artifact homepage: https://github.com/actions/toolkit/tree/main/packages/artifact

165
README.md
View File

@ -4,23 +4,83 @@ Download [Actions Artifacts](https://docs.github.com/en/actions/using-workflows/
See also [upload-artifact](https://github.com/actions/upload-artifact). See also [upload-artifact](https://github.com/actions/upload-artifact).
- [What's new](#whats-new) - [`@actions/download-artifact`](#actionsdownload-artifact)
- [Note](#note) - [v7 - What's new](#v7---whats-new)
- [GHES Support](#ghes-support) - [v5 - What's new](#v5---whats-new)
- [Usage](#usage) - [v4 - What's new](#v4---whats-new)
- [Inputs](#inputs) - [Improvements](#improvements)
- [Outputs](#outputs) - [Breaking Changes](#breaking-changes)
- [Examples](#examples) - [Note](#note)
- [Download Single Artifact](#download-single-artifact) - [Usage](#usage)
- [Download Artifacts by ID](#download-artifacts-by-id) - [Inputs](#inputs)
- [Download All Artifacts](#download-all-artifacts) - [Outputs](#outputs)
- [Download multiple (filtered) Artifacts to the same directory](#download-multiple-filtered-artifacts-to-the-same-directory) - [Examples](#examples)
- [Download Artifacts from other Workflow Runs or Repositories](#download-artifacts-from-other-workflow-runs-or-repositories) - [Download Single Artifact](#download-single-artifact)
- [Maintaining File Permissions](#maintaining-file-permissions) - [Download Artifacts by ID](#download-artifacts-by-id)
- [Download All Artifacts](#download-all-artifacts)
- [Download multiple (filtered) Artifacts to the same directory](#download-multiple-filtered-artifacts-to-the-same-directory)
- [Download Artifacts from other Workflow Runs or Repositories](#download-artifacts-from-other-workflow-runs-or-repositories)
- [Limitations](#limitations)
- [Permission Loss](#permission-loss)
## What's new ## v8 - What's new
Check out the [releases page](https://github.com/actions/download-artifact/releases) for details on what's new. > [!IMPORTANT]
> actions/download-artifact@v8 has been migrated to an ESM module. This should be transparent to the caller but forks might need to make significant changes.
> [!IMPORTANT]
> Hash mismatches will now error by default. Users can override this behavior with a setting change (see below).
- Downloads will check the content-type returned to determine if a file can be decompressed and skip the decompression stage if so. This removes previous failures where we were trying to decompress a non-zip file. Since this is making a big change to the default behavior, we're making it opt-in via a version bump.
- Users can also download a zip file without decompressing it with the new `skip-decompress` flag.
- Introduces a new parameter `digest-mismatch` that allows callers to specify what to do when the downloaded hash doesn't match the expected hash (`ignore`, `info`, `warn`, `error`). To ensure security by default, the default value is `error`.
- Chore: we've bumped versions on a lot of our dev packages to get them up to date with the latest bugfixes/security patches.
## v7 - What's new
> [!IMPORTANT]
> actions/download-artifact@v7 now runs on Node.js 24 (`runs.using: node24`) and requires a minimum Actions Runner version of 2.327.1. If you are using self-hosted runners, ensure they are updated before upgrading.
### Node.js 24
This release updates the runtime to Node.js 24. v6 had preliminary support for Node 24, however this action was by default still running on Node.js 20. Now this action by default will run on Node.js 24.
## v5 - What's new
Previously, **single artifact downloads** behaved differently depending on how you specified the artifact:
- **By name**: `name: my-artifact` → extracted to `path/` (direct)
- **By ID**: `artifact-ids: 12345` → extracted to `path/my-artifact/` (nested)
Now both methods are consistent:
- **By name**: `name: my-artifact` → extracted to `path/` (unchanged)
- **By ID**: `artifact-ids: 12345` → extracted to `path/` (updated - now direct)
Note: This change also applies to patterns that only match a single artifact.
## v4 - What's new
> [!IMPORTANT]
> download-artifact@v4+ is not currently supported on GitHub Enterprise Server (GHES) yet. If you are on GHES, you must use [v3](https://github.com/actions/download-artifact/releases/tag/v3) (Node 16) or [v3-node20](https://github.com/actions/download-artifact/releases/tag/v3-node20) (Node 20).
The release of upload-artifact@v4 and download-artifact@v4 are major changes to the backend architecture of Artifacts. They have numerous performance and behavioral improvements.
For more information, see the [`@actions/artifact`](https://github.com/actions/toolkit/tree/main/packages/artifact) documentation.
### Improvements
1. Downloads are significantly faster, upwards of 90% improvement in worst case scenarios.
2. Artifacts can be downloaded from other workflow runs and repositories when supplied with a PAT.
### Breaking Changes
1. On self hosted runners, additional [firewall rules](https://github.com/actions/toolkit/tree/main/packages/artifact#breaking-changes) may be required.
2. Downloading artifacts that were created from `action/upload-artifact@v3` and below are not supported.
For assistance with breaking changes, see [MIGRATION.md](docs/MIGRATION.md).
## Note ## Note
@ -40,16 +100,12 @@ We will still provide security updates for this project and fix major breaking c
You are welcome to still raise bugs in this repo. You are welcome to still raise bugs in this repo.
## GHES Support
`download-artifact@v4+` is not currently supported on GitHub Enterprise Server (GHES) yet. If you are on GHES, you must use [v3](https://github.com/actions/download-artifact/releases/tag/v3) (Node 16) or [v3-node20](https://github.com/actions/download-artifact/releases/tag/v3-node20) (Node 20).
## Usage ## Usage
### Inputs ### Inputs
```yaml ```yaml
- uses: actions/download-artifact@v8 - uses: actions/download-artifact@v5
with: with:
# Name of the artifact to download. # Name of the artifact to download.
# If unspecified, all artifacts for the run are downloaded. # If unspecified, all artifacts for the run are downloaded.
@ -91,17 +147,6 @@ You are welcome to still raise bugs in this repo.
# If github-token is specified, this is the run that artifacts will be downloaded from. # If github-token is specified, this is the run that artifacts will be downloaded from.
# Optional. Default is ${{ github.run_id }} # Optional. Default is ${{ github.run_id }}
run-id: run-id:
# Whether to skip decompressing a zip file (if detected).
# If true, the downloaded artifact will not be automatically extracted/decompressed.
# This is useful when you want to handle the artifact as-is without extraction.
# Optional. Default is `false`
skip-decompress:
# What to do if the action detects a mismatch between the downloaded hash and the expected hash from the server.
# Can be one of: `ignore`, `info`, `warn`, `error`
# Optional. Default is `error`
digest-mismatch:
``` ```
### Outputs ### Outputs
@ -118,7 +163,7 @@ Download to current working directory (`$GITHUB_WORKSPACE`):
```yaml ```yaml
steps: steps:
- uses: actions/download-artifact@v8 - uses: actions/download-artifact@v5
with: with:
name: my-artifact name: my-artifact
- name: Display structure of downloaded files - name: Display structure of downloaded files
@ -129,7 +174,7 @@ Download to a specific directory (also supports `~` expansion):
```yaml ```yaml
steps: steps:
- uses: actions/download-artifact@v8 - uses: actions/download-artifact@v5
with: with:
name: my-artifact name: my-artifact
path: your/destination/dir path: your/destination/dir
@ -137,24 +182,15 @@ steps:
run: ls -R your/destination/dir run: ls -R your/destination/dir
``` ```
Directly download a non-zipped file (only supports files uploaded with `actions/upload-artifact@v7` and `archive: false` set):
```yaml
steps:
- uses: actions/download-artifact@v8
with:
name: my-artifact.json # corresponds to the uploaded file name
```
### Download Artifacts by ID ### Download Artifacts by ID
The `artifact-ids` input allows downloading artifacts using their unique ID rather than name. This is particularly useful when working with immutable artifacts from `actions/upload-artifact@v4+` which assigns a unique ID to each artifact. The `artifact-ids` input allows downloading artifacts using their unique ID rather than name. This is particularly useful when working with immutable artifacts from `actions/upload-artifact@v4` which assigns a unique ID to each artifact.
Download a single artifact by ID to the current working directory (`$GITHUB_WORKSPACE`): Download a single artifact by ID to the current working directory (`$GITHUB_WORKSPACE`):
```yaml ```yaml
steps: steps:
- uses: actions/download-artifact@v8 - uses: actions/download-artifact@v5
with: with:
artifact-ids: 12345 artifact-ids: 12345
- name: Display structure of downloaded files - name: Display structure of downloaded files
@ -165,7 +201,7 @@ Download a single artifact by ID to a specific directory:
```yaml ```yaml
steps: steps:
- uses: actions/download-artifact@v8 - uses: actions/download-artifact@v5
with: with:
artifact-ids: 12345 artifact-ids: 12345
path: your/destination/dir path: your/destination/dir
@ -179,7 +215,7 @@ Multiple artifacts can be downloaded by providing a comma-separated list of IDs:
```yaml ```yaml
steps: steps:
- uses: actions/download-artifact@v8 - uses: actions/download-artifact@v5
with: with:
artifact-ids: 12345,67890 artifact-ids: 12345,67890
path: path/to/artifacts path: path/to/artifacts
@ -207,7 +243,7 @@ Download all artifacts to the current working directory:
```yaml ```yaml
steps: steps:
- uses: actions/download-artifact@v8 - uses: actions/download-artifact@v5
- name: Display structure of downloaded files - name: Display structure of downloaded files
run: ls -R run: ls -R
``` ```
@ -216,7 +252,7 @@ Download all artifacts to a specific directory:
```yaml ```yaml
steps: steps:
- uses: actions/download-artifact@v8 - uses: actions/download-artifact@v5
with: with:
path: path/to/artifacts path: path/to/artifacts
- name: Display structure of downloaded files - name: Display structure of downloaded files
@ -227,7 +263,7 @@ To download them to the _same_ directory:
```yaml ```yaml
steps: steps:
- uses: actions/download-artifact@v8 - uses: actions/download-artifact@v5
with: with:
path: path/to/artifacts path: path/to/artifacts
merge-multiple: true merge-multiple: true
@ -258,7 +294,7 @@ jobs:
- name: Create a File - name: Create a File
run: echo "hello from ${{ matrix.runs-on }}" > file-${{ matrix.runs-on }}.txt run: echo "hello from ${{ matrix.runs-on }}" > file-${{ matrix.runs-on }}.txt
- name: Upload Artifact - name: Upload Artifact
uses: actions/upload-artifact@v7 uses: actions/upload-artifact@v4
with: with:
name: my-artifact-${{ matrix.runs-on }} name: my-artifact-${{ matrix.runs-on }}
path: file-${{ matrix.runs-on }}.txt path: file-${{ matrix.runs-on }}.txt
@ -267,7 +303,7 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Download All Artifacts - name: Download All Artifacts
uses: actions/download-artifact@v8 uses: actions/download-artifact@v5
with: with:
path: my-artifact path: my-artifact
pattern: my-artifact-* pattern: my-artifact-*
@ -290,7 +326,7 @@ It may be useful to download Artifacts from other workflow runs, or even other r
```yaml ```yaml
steps: steps:
- uses: actions/download-artifact@v8 - uses: actions/download-artifact@v5
with: with:
name: my-other-artifact name: my-other-artifact
github-token: ${{ secrets.GH_PAT }} # token with actions:read permissions on target repo github-token: ${{ secrets.GH_PAT }} # token with actions:read permissions on target repo
@ -298,30 +334,21 @@ steps:
run-id: 1234 run-id: 1234
``` ```
### Maintaining File Permissions ## Limitations
Zipping files will remove file permissions during artifact upload. All directories will have `755` and all files will have `644`. For example, if you make a file executable using `chmod` and then upload that file as a zip file, post-download the file is no longer guaranteed to be set as an executable. ### Permission Loss
If you must preserve permissions, you can `tar` all of your files together before artifact upload and upload it as a single file (using V7+ of `actions/upload-artifact`). Then download the file directly and unpack it manually: File permissions are not maintained during artifact upload. All directories will have `755` and all files will have `644`. For example, if you make a file executable using `chmod` and then upload that file, post-download the file is no longer guaranteed to be set as an executable.
If you must preserve permissions, you can `tar` all of your files together before artifact upload. Post download, the `tar` file will maintain file permissions and case sensitivity.
```yaml ```yaml
- name: 'Tar files' - name: 'Tar files'
run: tar -cvf my_files.tar /path/to/my/directory run: tar -cvf my_files.tar /path/to/my/directory
- name: 'Upload Artifact' - name: 'Upload Artifact'
uses: actions/upload-artifact@v7 uses: actions/upload-artifact@v4
with: with:
name: my-artifact
path: my_files.tar path: my_files.tar
archive: false ```
----
# Later, download the file by name
- name: 'Download Artifact'
uses: actions/download-artifact@v8
with:
name: my_files.tar
```

462
dist/index.js vendored
View File

@ -77339,7 +77339,7 @@ module.exports = index;
/***/ 2822: /***/ 2822:
/***/ ((module) => { /***/ ((module) => {
module.exports = /*#__PURE__*/JSON.parse('{"name":"@actions/artifact","version":"6.2.1","preview":true,"description":"Actions artifact lib","keywords":["github","actions","artifact"],"homepage":"https://github.com/actions/toolkit/tree/main/packages/artifact","license":"MIT","type":"module","main":"lib/artifact.js","types":"lib/artifact.d.ts","exports":{".":{"types":"./lib/artifact.d.ts","import":"./lib/artifact.js"}},"directories":{"lib":"lib","test":"__tests__"},"files":["lib","!.DS_Store"],"publishConfig":{"access":"public"},"repository":{"type":"git","url":"git+https://github.com/actions/toolkit.git","directory":"packages/artifact"},"scripts":{"audit-moderate":"npm install && npm audit --json --audit-level=moderate > audit.json","test":"cd ../../ && npm run test ./packages/artifact","bootstrap":"cd ../../ && npm run bootstrap","tsc-run":"tsc && cp src/internal/shared/package-version.cjs lib/internal/shared/","tsc":"npm run bootstrap && npm run tsc-run","gen:docs":"typedoc --plugin typedoc-plugin-markdown --out docs/generated src/artifact.ts --githubPages false --readme none"},"bugs":{"url":"https://github.com/actions/toolkit/issues"},"dependencies":{"@actions/core":"^3.0.0","@actions/github":"^9.0.0","@actions/http-client":"^4.0.0","@azure/storage-blob":"^12.30.0","@octokit/core":"^7.0.6","@octokit/plugin-request-log":"^6.0.0","@octokit/plugin-retry":"^8.0.0","@octokit/request":"^10.0.7","@octokit/request-error":"^7.1.0","@protobuf-ts/plugin":"^2.2.3-alpha.1","@protobuf-ts/runtime":"^2.9.4","archiver":"^7.0.1","jwt-decode":"^4.0.0","unzip-stream":"^0.3.1"},"devDependencies":{"@types/archiver":"^7.0.0","@types/unzip-stream":"^0.3.4","typedoc":"^0.28.16","typedoc-plugin-markdown":"^4.9.0","typescript":"^5.9.3"},"overrides":{"uri-js":"npm:uri-js-replace@^1.0.1","node-fetch":"^3.3.2"}}'); module.exports = /*#__PURE__*/JSON.parse('{"name":"@actions/artifact","version":"6.1.0","preview":true,"description":"Actions artifact lib","keywords":["github","actions","artifact"],"homepage":"https://github.com/actions/toolkit/tree/main/packages/artifact","license":"MIT","type":"module","main":"lib/artifact.js","types":"lib/artifact.d.ts","exports":{".":{"types":"./lib/artifact.d.ts","import":"./lib/artifact.js"}},"directories":{"lib":"lib","test":"__tests__"},"files":["lib","!.DS_Store"],"publishConfig":{"access":"public"},"repository":{"type":"git","url":"git+https://github.com/actions/toolkit.git","directory":"packages/artifact"},"scripts":{"audit-moderate":"npm install && npm audit --json --audit-level=moderate > audit.json","test":"cd ../../ && npm run test ./packages/artifact","bootstrap":"cd ../../ && npm run bootstrap","tsc-run":"tsc && cp src/internal/shared/package-version.cjs lib/internal/shared/","tsc":"npm run bootstrap && npm run tsc-run","gen:docs":"typedoc --plugin typedoc-plugin-markdown --out docs/generated src/artifact.ts --githubPages false --readme none"},"bugs":{"url":"https://github.com/actions/toolkit/issues"},"dependencies":{"@actions/core":"^3.0.0","@actions/github":"^9.0.0","@actions/http-client":"^4.0.0","@azure/storage-blob":"^12.30.0","@octokit/core":"^7.0.6","@octokit/plugin-request-log":"^6.0.0","@octokit/plugin-retry":"^8.0.0","@octokit/request":"^10.0.7","@octokit/request-error":"^7.1.0","@protobuf-ts/plugin":"^2.2.3-alpha.1","@protobuf-ts/runtime":"^2.9.4","archiver":"^7.0.1","jwt-decode":"^4.0.0","unzip-stream":"^0.3.1"},"devDependencies":{"@types/archiver":"^7.0.0","@types/unzip-stream":"^0.3.4","typedoc":"^0.28.16","typedoc-plugin-markdown":"^4.9.0","typescript":"^5.9.3"},"overrides":{"uri-js":"npm:uri-js-replace@^1.0.1","node-fetch":"^3.3.2"}}');
/***/ }) /***/ })
@ -81433,6 +81433,236 @@ var build_commonjs = __nccwpck_require__(4420);
// @generated message type with reflection information, may provide speed optimized methods
class MigrateArtifactRequest$Type extends commonjs.MessageType {
constructor() {
super("github.actions.results.api.v1.MigrateArtifactRequest", [
{ no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 2, name: "name", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 3, name: "expires_at", kind: "message", T: () => Timestamp }
]);
}
create(value) {
const message = { workflowRunBackendId: "", name: "" };
globalThis.Object.defineProperty(message, commonjs.MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
(0,commonjs.reflectionMergePartial)(this, message, value);
return message;
}
internalBinaryRead(reader, length, options, target) {
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* string workflow_run_backend_id */ 1:
message.workflowRunBackendId = reader.string();
break;
case /* string name */ 2:
message.name = reader.string();
break;
case /* google.protobuf.Timestamp expires_at */ 3:
message.expiresAt = Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.expiresAt);
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? commonjs.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message, writer, options) {
/* string workflow_run_backend_id = 1; */
if (message.workflowRunBackendId !== "")
writer.tag(1, commonjs.WireType.LengthDelimited).string(message.workflowRunBackendId);
/* string name = 2; */
if (message.name !== "")
writer.tag(2, commonjs.WireType.LengthDelimited).string(message.name);
/* google.protobuf.Timestamp expires_at = 3; */
if (message.expiresAt)
Timestamp.internalBinaryWrite(message.expiresAt, writer.tag(3, commonjs.WireType.LengthDelimited).fork(), options).join();
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? commonjs.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.MigrateArtifactRequest
*/
const MigrateArtifactRequest = new MigrateArtifactRequest$Type();
// @generated message type with reflection information, may provide speed optimized methods
class MigrateArtifactResponse$Type extends commonjs.MessageType {
constructor() {
super("github.actions.results.api.v1.MigrateArtifactResponse", [
{ no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
{ no: 2, name: "signed_upload_url", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
]);
}
create(value) {
const message = { ok: false, signedUploadUrl: "" };
globalThis.Object.defineProperty(message, commonjs.MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
(0,commonjs.reflectionMergePartial)(this, message, value);
return message;
}
internalBinaryRead(reader, length, options, target) {
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* bool ok */ 1:
message.ok = reader.bool();
break;
case /* string signed_upload_url */ 2:
message.signedUploadUrl = reader.string();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? commonjs.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message, writer, options) {
/* bool ok = 1; */
if (message.ok !== false)
writer.tag(1, commonjs.WireType.Varint).bool(message.ok);
/* string signed_upload_url = 2; */
if (message.signedUploadUrl !== "")
writer.tag(2, commonjs.WireType.LengthDelimited).string(message.signedUploadUrl);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? commonjs.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.MigrateArtifactResponse
*/
const MigrateArtifactResponse = new MigrateArtifactResponse$Type();
// @generated message type with reflection information, may provide speed optimized methods
class FinalizeMigratedArtifactRequest$Type extends commonjs.MessageType {
constructor() {
super("github.actions.results.api.v1.FinalizeMigratedArtifactRequest", [
{ no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 2, name: "name", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 3, name: "size", kind: "scalar", T: 3 /*ScalarType.INT64*/ }
]);
}
create(value) {
const message = { workflowRunBackendId: "", name: "", size: "0" };
globalThis.Object.defineProperty(message, commonjs.MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
(0,commonjs.reflectionMergePartial)(this, message, value);
return message;
}
internalBinaryRead(reader, length, options, target) {
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* string workflow_run_backend_id */ 1:
message.workflowRunBackendId = reader.string();
break;
case /* string name */ 2:
message.name = reader.string();
break;
case /* int64 size */ 3:
message.size = reader.int64().toString();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? commonjs.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message, writer, options) {
/* string workflow_run_backend_id = 1; */
if (message.workflowRunBackendId !== "")
writer.tag(1, commonjs.WireType.LengthDelimited).string(message.workflowRunBackendId);
/* string name = 2; */
if (message.name !== "")
writer.tag(2, commonjs.WireType.LengthDelimited).string(message.name);
/* int64 size = 3; */
if (message.size !== "0")
writer.tag(3, commonjs.WireType.Varint).int64(message.size);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? commonjs.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.FinalizeMigratedArtifactRequest
*/
const FinalizeMigratedArtifactRequest = new FinalizeMigratedArtifactRequest$Type();
// @generated message type with reflection information, may provide speed optimized methods
class FinalizeMigratedArtifactResponse$Type extends commonjs.MessageType {
constructor() {
super("github.actions.results.api.v1.FinalizeMigratedArtifactResponse", [
{ no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
{ no: 2, name: "artifact_id", kind: "scalar", T: 3 /*ScalarType.INT64*/ }
]);
}
create(value) {
const message = { ok: false, artifactId: "0" };
globalThis.Object.defineProperty(message, commonjs.MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
(0,commonjs.reflectionMergePartial)(this, message, value);
return message;
}
internalBinaryRead(reader, length, options, target) {
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* bool ok */ 1:
message.ok = reader.bool();
break;
case /* int64 artifact_id */ 2:
message.artifactId = reader.int64().toString();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? commonjs.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message, writer, options) {
/* bool ok = 1; */
if (message.ok !== false)
writer.tag(1, commonjs.WireType.Varint).bool(message.ok);
/* int64 artifact_id = 2; */
if (message.artifactId !== "0")
writer.tag(2, commonjs.WireType.Varint).int64(message.artifactId);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? commonjs.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.FinalizeMigratedArtifactResponse
*/
const FinalizeMigratedArtifactResponse = new FinalizeMigratedArtifactResponse$Type();
// @generated message type with reflection information, may provide speed optimized methods // @generated message type with reflection information, may provide speed optimized methods
class CreateArtifactRequest$Type extends commonjs.MessageType { class CreateArtifactRequest$Type extends commonjs.MessageType {
constructor() { constructor() {
@ -81441,8 +81671,7 @@ class CreateArtifactRequest$Type extends commonjs.MessageType {
{ no: 2, name: "workflow_job_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, { no: 2, name: "workflow_job_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 3, name: "name", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, { no: 3, name: "name", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 4, name: "expires_at", kind: "message", T: () => Timestamp }, { no: 4, name: "expires_at", kind: "message", T: () => Timestamp },
{ no: 5, name: "version", kind: "scalar", T: 5 /*ScalarType.INT32*/ }, { no: 5, name: "version", kind: "scalar", T: 5 /*ScalarType.INT32*/ }
{ no: 6, name: "mime_type", kind: "message", T: () => StringValue }
]); ]);
} }
create(value) { create(value) {
@ -81472,9 +81701,6 @@ class CreateArtifactRequest$Type extends commonjs.MessageType {
case /* int32 version */ 5: case /* int32 version */ 5:
message.version = reader.int32(); message.version = reader.int32();
break; break;
case /* google.protobuf.StringValue mime_type */ 6:
message.mimeType = StringValue.internalBinaryRead(reader, reader.uint32(), options, message.mimeType);
break;
default: default:
let u = options.readUnknownField; let u = options.readUnknownField;
if (u === "throw") if (u === "throw")
@ -81502,9 +81728,6 @@ class CreateArtifactRequest$Type extends commonjs.MessageType {
/* int32 version = 5; */ /* int32 version = 5; */
if (message.version !== 0) if (message.version !== 0)
writer.tag(5, commonjs.WireType.Varint).int32(message.version); writer.tag(5, commonjs.WireType.Varint).int32(message.version);
/* google.protobuf.StringValue mime_type = 6; */
if (message.mimeType)
StringValue.internalBinaryWrite(message.mimeType, writer.tag(6, commonjs.WireType.LengthDelimited).fork(), options).join();
let u = options.writeUnknownFields; let u = options.writeUnknownFields;
if (u !== false) if (u !== false)
(u == true ? commonjs.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); (u == true ? commonjs.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
@ -81770,7 +81993,7 @@ const artifact_ListArtifactsRequest = new ListArtifactsRequest$Type();
class ListArtifactsResponse$Type extends commonjs.MessageType { class ListArtifactsResponse$Type extends commonjs.MessageType {
constructor() { constructor() {
super("github.actions.results.api.v1.ListArtifactsResponse", [ super("github.actions.results.api.v1.ListArtifactsResponse", [
{ no: 1, name: "artifacts", kind: "message", repeat: 2 /*RepeatType.UNPACKED*/, T: () => ListArtifactsResponse_MonolithArtifact } { no: 1, name: "artifacts", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => ListArtifactsResponse_MonolithArtifact }
]); ]);
} }
create(value) { create(value) {
@ -82133,7 +82356,9 @@ const ArtifactService = new build_commonjs/* ServiceType */.C0("github.actions.r
{ name: "FinalizeArtifact", options: {}, I: artifact_FinalizeArtifactRequest, O: artifact_FinalizeArtifactResponse }, { name: "FinalizeArtifact", options: {}, I: artifact_FinalizeArtifactRequest, O: artifact_FinalizeArtifactResponse },
{ name: "ListArtifacts", options: {}, I: artifact_ListArtifactsRequest, O: artifact_ListArtifactsResponse }, { name: "ListArtifacts", options: {}, I: artifact_ListArtifactsRequest, O: artifact_ListArtifactsResponse },
{ name: "GetSignedArtifactURL", options: {}, I: artifact_GetSignedArtifactURLRequest, O: artifact_GetSignedArtifactURLResponse }, { name: "GetSignedArtifactURL", options: {}, I: artifact_GetSignedArtifactURLRequest, O: artifact_GetSignedArtifactURLResponse },
{ name: "DeleteArtifact", options: {}, I: artifact_DeleteArtifactRequest, O: artifact_DeleteArtifactResponse } { name: "DeleteArtifact", options: {}, I: artifact_DeleteArtifactRequest, O: artifact_DeleteArtifactResponse },
{ name: "MigrateArtifact", options: {}, I: MigrateArtifactRequest, O: MigrateArtifactResponse },
{ name: "FinalizeMigratedArtifact", options: {}, I: FinalizeMigratedArtifactRequest, O: FinalizeMigratedArtifactResponse }
]); ]);
//# sourceMappingURL=artifact.js.map //# sourceMappingURL=artifact.js.map
;// CONCATENATED MODULE: ./node_modules/@actions/artifact/lib/generated/results/api/v1/artifact.twirp-client.js ;// CONCATENATED MODULE: ./node_modules/@actions/artifact/lib/generated/results/api/v1/artifact.twirp-client.js
@ -82391,7 +82616,7 @@ NetworkError.isNetworkErrorCode = (code) => {
}; };
class UsageError extends Error { class UsageError extends Error {
constructor() { constructor() {
const message = `Artifact storage quota has been hit. Unable to upload any new artifacts.\nMore info on storage limits: https://docs.github.com/en/billing/managing-billing-for-github-actions/about-billing-for-github-actions#calculating-minute-and-storage-spending`; const message = `Artifact storage quota has been hit. Unable to upload any new artifacts. Usage is recalculated every 6-12 hours.\nMore info on storage limits: https://docs.github.com/en/billing/managing-billing-for-github-actions/about-billing-for-github-actions#calculating-minute-and-storage-spending`;
super(message); super(message);
this.name = 'UsageError'; this.name = 'UsageError';
} }
@ -121876,7 +122101,7 @@ var blob_upload_awaiter = (undefined && undefined.__awaiter) || function (thisAr
function uploadToBlobStorage(authenticatedUploadURL, uploadStream, contentType) { function uploadZipToBlobStorage(authenticatedUploadURL, zipUploadStream) {
return blob_upload_awaiter(this, void 0, void 0, function* () { return blob_upload_awaiter(this, void 0, void 0, function* () {
let uploadByteCount = 0; let uploadByteCount = 0;
let lastProgressTime = Date.now(); let lastProgressTime = Date.now();
@ -121898,26 +122123,26 @@ function uploadToBlobStorage(authenticatedUploadURL, uploadStream, contentType)
const bufferSize = getUploadChunkSize(); const bufferSize = getUploadChunkSize();
const blobClient = new BlobClient(authenticatedUploadURL); const blobClient = new BlobClient(authenticatedUploadURL);
const blockBlobClient = blobClient.getBlockBlobClient(); const blockBlobClient = blobClient.getBlockBlobClient();
core_debug(`Uploading artifact to blob storage with maxConcurrency: ${maxConcurrency}, bufferSize: ${bufferSize}, contentType: ${contentType}`); core_debug(`Uploading artifact zip to blob storage with maxConcurrency: ${maxConcurrency}, bufferSize: ${bufferSize}`);
const uploadCallback = (progress) => { const uploadCallback = (progress) => {
info(`Uploaded bytes ${progress.loadedBytes}`); info(`Uploaded bytes ${progress.loadedBytes}`);
uploadByteCount = progress.loadedBytes; uploadByteCount = progress.loadedBytes;
lastProgressTime = Date.now(); lastProgressTime = Date.now();
}; };
const options = { const options = {
blobHTTPHeaders: { blobContentType: contentType }, blobHTTPHeaders: { blobContentType: 'zip' },
onProgress: uploadCallback, onProgress: uploadCallback,
abortSignal: abortController.signal abortSignal: abortController.signal
}; };
let sha256Hash = undefined; let sha256Hash = undefined;
const blobUploadStream = new external_stream_.PassThrough(); const uploadStream = new external_stream_.PassThrough();
const hashStream = external_crypto_namespaceObject.createHash('sha256'); const hashStream = external_crypto_namespaceObject.createHash('sha256');
uploadStream.pipe(blobUploadStream); // This stream is used for the upload zipUploadStream.pipe(uploadStream); // This stream is used for the upload
uploadStream.pipe(hashStream).setEncoding('hex'); // This stream is used to compute a hash of the content for integrity check zipUploadStream.pipe(hashStream).setEncoding('hex'); // This stream is used to compute a hash of the zip content that gets used. Integrity check
info('Beginning upload of artifact content to blob storage'); info('Beginning upload of artifact content to blob storage');
try { try {
yield Promise.race([ yield Promise.race([
blockBlobClient.uploadStream(blobUploadStream, bufferSize, maxConcurrency, options), blockBlobClient.uploadStream(uploadStream, bufferSize, maxConcurrency, options),
chunkTimer(getUploadChunkTimeout()) chunkTimer(getUploadChunkTimeout())
]); ]);
} }
@ -121933,7 +122158,7 @@ function uploadToBlobStorage(authenticatedUploadURL, uploadStream, contentType)
info('Finished uploading artifact content to blob storage!'); info('Finished uploading artifact content to blob storage!');
hashStream.end(); hashStream.end();
sha256Hash = hashStream.read(); sha256Hash = hashStream.read();
info(`SHA256 digest of uploaded artifact is ${sha256Hash}`); info(`SHA256 digest of uploaded artifact zip is ${sha256Hash}`);
if (uploadByteCount === 0) { if (uploadByteCount === 0) {
warning(`No data was uploaded to blob storage. Reported upload byte count is 0.`); warning(`No data was uploaded to blob storage. Reported upload byte count is 0.`);
} }
@ -121948,59 +122173,6 @@ function uploadToBlobStorage(authenticatedUploadURL, uploadStream, contentType)
const promises_namespaceObject = __WEBPACK_EXTERNAL_createRequire(import.meta.url)("fs/promises"); const promises_namespaceObject = __WEBPACK_EXTERNAL_createRequire(import.meta.url)("fs/promises");
// EXTERNAL MODULE: ./node_modules/archiver/index.js // EXTERNAL MODULE: ./node_modules/archiver/index.js
var archiver = __nccwpck_require__(9392); var archiver = __nccwpck_require__(9392);
;// CONCATENATED MODULE: ./node_modules/@actions/artifact/lib/internal/upload/stream.js
var stream_awaiter = (undefined && undefined.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
// Custom stream transformer so we can set the highWaterMark property
// See https://github.com/nodejs/node/issues/8855
class WaterMarkedUploadStream extends external_stream_.Transform {
constructor(bufferSize) {
super({
highWaterMark: bufferSize
});
}
// eslint-disable-next-line @typescript-eslint/no-explicit-any
_transform(chunk, enc, cb) {
cb(null, chunk);
}
}
function createRawFileUploadStream(filePath) {
return stream_awaiter(this, void 0, void 0, function* () {
core_debug(`Creating raw file upload stream for: ${filePath}`);
const bufferSize = getUploadChunkSize();
const uploadStream = new WaterMarkedUploadStream(bufferSize);
// Check if symlink and resolve the source path
let sourcePath = filePath;
const stats = yield external_fs_.promises.lstat(filePath);
if (stats.isSymbolicLink()) {
sourcePath = yield (0,promises_namespaceObject.realpath)(filePath);
}
// Create a read stream from the file and pipe it to the upload stream
const fileStream = external_fs_.createReadStream(sourcePath, {
highWaterMark: bufferSize
});
fileStream.on('error', error => {
core_error('An error has occurred while reading the file for upload');
core_error(String(error));
uploadStream.destroy(new Error('An error has occurred during file read for the artifact'));
});
fileStream.pipe(uploadStream);
return uploadStream;
});
}
//# sourceMappingURL=stream.js.map
;// CONCATENATED MODULE: ./node_modules/@actions/artifact/lib/internal/upload/zip.js ;// CONCATENATED MODULE: ./node_modules/@actions/artifact/lib/internal/upload/zip.js
var zip_awaiter = (undefined && undefined.__awaiter) || function (thisArg, _arguments, P, generator) { var zip_awaiter = (undefined && undefined.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
@ -122017,6 +122189,19 @@ var zip_awaiter = (undefined && undefined.__awaiter) || function (thisArg, _argu
const DEFAULT_COMPRESSION_LEVEL = 6; const DEFAULT_COMPRESSION_LEVEL = 6;
// Custom stream transformer so we can set the highWaterMark property
// See https://github.com/nodejs/node/issues/8855
class ZipUploadStream extends external_stream_.Transform {
constructor(bufferSize) {
super({
highWaterMark: bufferSize
});
}
// eslint-disable-next-line @typescript-eslint/no-explicit-any
_transform(chunk, enc, cb) {
cb(null, chunk);
}
}
function createZipUploadStream(uploadSpecification_1) { function createZipUploadStream(uploadSpecification_1) {
return zip_awaiter(this, arguments, void 0, function* (uploadSpecification, compressionLevel = DEFAULT_COMPRESSION_LEVEL) { return zip_awaiter(this, arguments, void 0, function* (uploadSpecification, compressionLevel = DEFAULT_COMPRESSION_LEVEL) {
core_debug(`Creating Artifact archive with compressionLevel: ${compressionLevel}`); core_debug(`Creating Artifact archive with compressionLevel: ${compressionLevel}`);
@ -122047,7 +122232,7 @@ function createZipUploadStream(uploadSpecification_1) {
} }
} }
const bufferSize = getUploadChunkSize(); const bufferSize = getUploadChunkSize();
const zipUploadStream = new WaterMarkedUploadStream(bufferSize); const zipUploadStream = new ZipUploadStream(bufferSize);
core_debug(`Zip write high watermark value ${zipUploadStream.writableHighWaterMark}`); core_debug(`Zip write high watermark value ${zipUploadStream.writableHighWaterMark}`);
core_debug(`Zip read high watermark value ${zipUploadStream.readableHighWaterMark}`); core_debug(`Zip read high watermark value ${zipUploadStream.readableHighWaterMark}`);
zip.pipe(zipUploadStream); zip.pipe(zipUploadStream);
@ -122079,78 +122264,6 @@ const zipEndCallback = () => {
core_debug('Zip stream for upload has ended.'); core_debug('Zip stream for upload has ended.');
}; };
//# sourceMappingURL=zip.js.map //# sourceMappingURL=zip.js.map
;// CONCATENATED MODULE: ./node_modules/@actions/artifact/lib/internal/upload/types.js
/**
* Maps file extensions to MIME types
*/
const types_mimeTypes = {
// Text
'.txt': 'text/plain',
'.html': 'text/html',
'.htm': 'text/html',
'.css': 'text/css',
'.csv': 'text/csv',
'.xml': 'text/xml',
'.md': 'text/markdown',
// JavaScript/JSON
'.js': 'application/javascript',
'.mjs': 'application/javascript',
'.json': 'application/json',
// Images
'.png': 'image/png',
'.jpg': 'image/jpeg',
'.jpeg': 'image/jpeg',
'.gif': 'image/gif',
'.svg': 'image/svg+xml',
'.webp': 'image/webp',
'.ico': 'image/x-icon',
'.bmp': 'image/bmp',
'.tiff': 'image/tiff',
'.tif': 'image/tiff',
// Audio
'.mp3': 'audio/mpeg',
'.wav': 'audio/wav',
'.ogg': 'audio/ogg',
'.flac': 'audio/flac',
// Video
'.mp4': 'video/mp4',
'.webm': 'video/webm',
'.avi': 'video/x-msvideo',
'.mov': 'video/quicktime',
// Documents
'.pdf': 'application/pdf',
'.doc': 'application/msword',
'.docx': 'application/vnd.openxmlformats-officedocument.wordprocessingml.document',
'.xls': 'application/vnd.ms-excel',
'.xlsx': 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet',
'.ppt': 'application/vnd.ms-powerpoint',
'.pptx': 'application/vnd.openxmlformats-officedocument.presentationml.presentation',
// Archives
'.zip': 'application/zip',
'.tar': 'application/x-tar',
'.gz': 'application/gzip',
'.rar': 'application/vnd.rar',
'.7z': 'application/x-7z-compressed',
// Code/Data
'.wasm': 'application/wasm',
'.yaml': 'application/x-yaml',
'.yml': 'application/x-yaml',
// Fonts
'.woff': 'font/woff',
'.woff2': 'font/woff2',
'.ttf': 'font/ttf',
'.otf': 'font/otf',
'.eot': 'application/vnd.ms-fontobject'
};
/**
* Gets the MIME type for a file based on its extension
*/
function getMimeType(filePath) {
const ext = external_path_.extname(filePath).toLowerCase();
return types_mimeTypes[ext] || 'application/octet-stream';
}
//# sourceMappingURL=types.js.map
;// CONCATENATED MODULE: ./node_modules/@actions/artifact/lib/internal/upload/upload-artifact.js ;// CONCATENATED MODULE: ./node_modules/@actions/artifact/lib/internal/upload/upload-artifact.js
var upload_artifact_awaiter = (undefined && undefined.__awaiter) || function (thisArg, _arguments, P, generator) { var upload_artifact_awaiter = (undefined && undefined.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
@ -122171,36 +122284,14 @@ var upload_artifact_awaiter = (undefined && undefined.__awaiter) || function (th
function uploadArtifact(name, files, rootDirectory, options) { function uploadArtifact(name, files, rootDirectory, options) {
return upload_artifact_awaiter(this, void 0, void 0, function* () { return upload_artifact_awaiter(this, void 0, void 0, function* () {
let artifactFileName = `${name}.zip`;
if (options === null || options === void 0 ? void 0 : options.skipArchive) {
if (files.length === 0) {
throw new FilesNotFoundError([]);
}
if (files.length > 1) {
throw new Error('skipArchive option is only supported when uploading a single file');
}
if (!external_fs_.existsSync(files[0])) {
throw new FilesNotFoundError(files);
}
artifactFileName = external_path_.basename(files[0]);
name = artifactFileName;
}
validateArtifactName(name); validateArtifactName(name);
validateRootDirectory(rootDirectory); validateRootDirectory(rootDirectory);
let zipSpecification = []; const zipSpecification = getUploadZipSpecification(files, rootDirectory);
if (!(options === null || options === void 0 ? void 0 : options.skipArchive)) { if (zipSpecification.length === 0) {
zipSpecification = getUploadZipSpecification(files, rootDirectory); throw new FilesNotFoundError(zipSpecification.flatMap(s => (s.sourcePath ? [s.sourcePath] : [])));
if (zipSpecification.length === 0) {
throw new FilesNotFoundError(zipSpecification.flatMap(s => (s.sourcePath ? [s.sourcePath] : [])));
}
} }
const contentType = getMimeType(artifactFileName);
// get the IDs needed for the artifact creation // get the IDs needed for the artifact creation
const backendIds = getBackendIdsFromToken(); const backendIds = getBackendIdsFromToken();
// create the artifact client // create the artifact client
@ -122210,8 +122301,7 @@ function uploadArtifact(name, files, rootDirectory, options) {
workflowRunBackendId: backendIds.workflowRunBackendId, workflowRunBackendId: backendIds.workflowRunBackendId,
workflowJobRunBackendId: backendIds.workflowJobRunBackendId, workflowJobRunBackendId: backendIds.workflowJobRunBackendId,
name, name,
mimeType: StringValue.create({ value: contentType }), version: 4
version: 7
}; };
// if there is a retention period, add it to the request // if there is a retention period, add it to the request
const expiresAt = getExpiration(options === null || options === void 0 ? void 0 : options.retentionDays); const expiresAt = getExpiration(options === null || options === void 0 ? void 0 : options.retentionDays);
@ -122222,17 +122312,9 @@ function uploadArtifact(name, files, rootDirectory, options) {
if (!createArtifactResp.ok) { if (!createArtifactResp.ok) {
throw new InvalidResponseError('CreateArtifact: response from backend was not ok'); throw new InvalidResponseError('CreateArtifact: response from backend was not ok');
} }
let stream; const zipUploadStream = yield createZipUploadStream(zipSpecification, options === null || options === void 0 ? void 0 : options.compressionLevel);
if (options === null || options === void 0 ? void 0 : options.skipArchive) { // Upload zip to blob storage
// Upload raw file without archiving const uploadResult = yield uploadZipToBlobStorage(createArtifactResp.signedUploadUrl, zipUploadStream);
stream = yield createRawFileUploadStream(files[0]);
}
else {
// Create and upload zip archive
stream = yield createZipUploadStream(zipSpecification, options === null || options === void 0 ? void 0 : options.compressionLevel);
}
info(`Uploading artifact: ${artifactFileName}`);
const uploadResult = yield uploadToBlobStorage(createArtifactResp.signedUploadUrl, stream, contentType);
// finalize the artifact // finalize the artifact
const finalizeArtifactReq = { const finalizeArtifactReq = {
workflowRunBackendId: backendIds.workflowRunBackendId, workflowRunBackendId: backendIds.workflowRunBackendId,
@ -122251,7 +122333,7 @@ function uploadArtifact(name, files, rootDirectory, options) {
throw new InvalidResponseError('FinalizeArtifact: response from backend was not ok'); throw new InvalidResponseError('FinalizeArtifact: response from backend was not ok');
} }
const artifactId = BigInt(finalizeArtifactResp.artifactId); const artifactId = BigInt(finalizeArtifactResp.artifactId);
info(`Artifact ${name} successfully finalized. Artifact ID ${artifactId}`); info(`Artifact ${name}.zip successfully finalized. Artifact ID ${artifactId}`);
return { return {
size: uploadResult.uploadSize, size: uploadResult.uploadSize,
digest: uploadResult.sha256Hash, digest: uploadResult.sha256Hash,
@ -126422,17 +126504,13 @@ function streamExtractExternal(url_1, directory_1) {
mimeType === 'application/zip-compressed' || mimeType === 'application/zip-compressed' ||
urlEndsWithZip; urlEndsWithZip;
// Extract filename from Content-Disposition header // Extract filename from Content-Disposition header
// Prefer filename* (RFC 5987) which supports UTF-8 encoded filenames,
// fall back to filename which may contain ASCII-only replacements
const contentDisposition = response.message.headers['content-disposition'] || ''; const contentDisposition = response.message.headers['content-disposition'] || '';
let fileName = 'artifact'; let fileName = 'artifact';
const filenameStar = contentDisposition.match(/filename\*\s*=\s*UTF-8''([^;\r\n]*)/i); const filenameMatch = contentDisposition.match(/filename\*?=['"]?(?:UTF-\d['"]*)?([^;\r\n"']*)['"]?/i);
const filenamePlain = contentDisposition.match(/(?<!\*)filename\s*=\s*['"]?([^;\r\n"']*)['"]?/i); if (filenameMatch && filenameMatch[1]) {
const rawName = (filenameStar === null || filenameStar === void 0 ? void 0 : filenameStar[1]) || (filenamePlain === null || filenamePlain === void 0 ? void 0 : filenamePlain[1]);
if (rawName) {
// Sanitize fileName to prevent path traversal attacks // Sanitize fileName to prevent path traversal attacks
// Use path.basename to extract only the filename component // Use path.basename to extract only the filename component
fileName = external_path_.basename(decodeURIComponent(rawName.trim())); fileName = external_path_.basename(decodeURIComponent(filenameMatch[1].trim()));
} }
core_debug(`Content-Type: ${contentType}, mimeType: ${mimeType}, urlEndsWithZip: ${urlEndsWithZip}, isZip: ${isZip}, skipDecompress: ${skipDecompress}`); core_debug(`Content-Type: ${contentType}, mimeType: ${mimeType}, urlEndsWithZip: ${urlEndsWithZip}, isZip: ${isZip}, skipDecompress: ${skipDecompress}`);
core_debug(`Content-Disposition: ${contentDisposition}, fileName: ${fileName}`); core_debug(`Content-Disposition: ${contentDisposition}, fileName: ${fileName}`);

12
package-lock.json generated
View File

@ -1,15 +1,15 @@
{ {
"name": "download-artifact", "name": "download-artifact",
"version": "8.0.1", "version": "7.0.0",
"lockfileVersion": 3, "lockfileVersion": 3,
"requires": true, "requires": true,
"packages": { "packages": {
"": { "": {
"name": "download-artifact", "name": "download-artifact",
"version": "8.0.1", "version": "7.0.0",
"license": "MIT", "license": "MIT",
"dependencies": { "dependencies": {
"@actions/artifact": "^6.2.1", "@actions/artifact": "^6.1.0",
"@actions/core": "^3.0.0", "@actions/core": "^3.0.0",
"minimatch": "^10.1.1" "minimatch": "^10.1.1"
}, },
@ -36,9 +36,9 @@
} }
}, },
"node_modules/@actions/artifact": { "node_modules/@actions/artifact": {
"version": "6.2.1", "version": "6.1.0",
"resolved": "https://registry.npmjs.org/@actions/artifact/-/artifact-6.2.1.tgz", "resolved": "https://registry.npmjs.org/@actions/artifact/-/artifact-6.1.0.tgz",
"integrity": "sha512-sJGH0mhEbEjBCw7o6SaLhUU66u27aFW8HTfkIb5Tk2/Wy0caUDc+oYQEgnuFN7a0HCpAbQyK0U6U7XUJDgDWrw==", "integrity": "sha512-oRn9YhKkboXgIq2TQZ9uj6bhkT5ZUzFtnyTQ0tLGBwImaD0GfWShE5R0tPbN25EJmS3tz5sDd2JnVokAOtNrZQ==",
"license": "MIT", "license": "MIT",
"dependencies": { "dependencies": {
"@actions/core": "^3.0.0", "@actions/core": "^3.0.0",

View File

@ -1,6 +1,6 @@
{ {
"name": "download-artifact", "name": "download-artifact",
"version": "8.0.1", "version": "8.0.0",
"description": "Download an Actions Artifact from a workflow run", "description": "Download an Actions Artifact from a workflow run",
"type": "module", "type": "module",
"engines": { "engines": {
@ -33,7 +33,7 @@
}, },
"homepage": "https://github.com/actions/download-artifact#readme", "homepage": "https://github.com/actions/download-artifact#readme",
"dependencies": { "dependencies": {
"@actions/artifact": "^6.2.1", "@actions/artifact": "^6.1.0",
"@actions/core": "^3.0.0", "@actions/core": "^3.0.0",
"minimatch": "^10.1.1" "minimatch": "^10.1.1"
}, },