Compare commits

...

3 Commits
v8.0.0 ... main

Author SHA1 Message Date
Daniel Kennedy
484a0b528f
Update the readme (#473)
* Update the readme

* Update upload artifact versions

* Add back the GHES support note

* Update the skip-decompress description

* Be more specific with the direct download example

* Add GHES support section to TOC
2026-03-12 09:59:43 -04:00
Daniel Kennedy
3e5f45b2cf
Add regression tests for CJK characters (#471)
* Add regression tests for CJK characters

* Dedupe the names and remove the ignored `name` param

* Bump @actions/artifact to v6.2.1

* Run `npm run release`

* Update licenses
2026-03-11 11:35:54 -04:00
Daniel Kennedy
e6d03f6737
Add a regression test for artifact name + content-type mismatches (#472)
* Add a regression test for artifact name + content-type mismatches

* Specify that we want v4 in a comment

* Fix the expected type
2026-03-11 08:57:07 -04:00
6 changed files with 436 additions and 375 deletions

View File

@ -154,3 +154,169 @@ jobs:
}
Write-Host "Successfully downloaded artifact without decompressing: $rawFile (size: $($fileInfo.Length) bytes)"
shell: pwsh
# Regression test for artifact filename vs content-type mismatch
# When an archived artifact has a name with a file extension that doesn't
# match the blob type (e.g. "report.txt" but blob is zip), the server
# should append .zip to the content-disposition filename.
- name: Create and upload archived artifact with misleading extension
shell: bash
run: |
mkdir -p path/to/extension-test
echo '{"key": "value"}' > path/to/extension-test/data.json
- uses: actions/upload-artifact@v4 # V4 is important here to ensure we're supporting older versions correctly
with:
name: report.txt-${{ matrix.runs-on }}.json
path: path/to/extension-test/data.json
- name: Download misleading-extension artifact without decompressing
uses: ./
with:
name: report.txt-${{ matrix.runs-on }}.json
path: ext-test/raw
skip-decompress: true
- name: Verify downloaded file has .zip extension appended
shell: bash
run: |
expected="ext-test/raw/report.txt-${{ matrix.runs-on }}.json.zip"
if [ -f "$expected" ]; then
echo "PASS: Downloaded file has .zip appended: $expected"
else
echo "FAIL: Expected $expected but got:"
ls -al ext-test/raw/
exit 1
fi
# Test uploading and downloading artifacts with CJK (Chinese, Japanese, Korean) characters
# Regression test: certain non-ASCII chars (e.g. U+571F 土) caused 400 errors from
# Azure Blob Storage due to encoding issues in the Content-Disposition / rscd parameter
- name: Create artifacts with CJK names
shell: bash
run: |
mkdir -p path/to/cjk-artifacts
# Chinese - 土 (U+571F) known to fail, 日 (U+65E5) known to work
echo "Content for 土" > "path/to/cjk-artifacts/file-土-${{ matrix.runs-on }}.txt"
echo "Content for 中文测试" > "path/to/cjk-artifacts/file-中文测试-${{ matrix.runs-on }}.txt"
# Japanese - katakana and kanji
echo "Content for テスト" > "path/to/cjk-artifacts/file-テスト-${{ matrix.runs-on }}.txt"
echo "Content for 東京タワー" > "path/to/cjk-artifacts/file-東京タワー-${{ matrix.runs-on }}.txt"
# Korean - Hangul
echo "Content for 테스트" > "path/to/cjk-artifacts/file-테스트-${{ matrix.runs-on }}.txt"
echo "Content for 서울시" > "path/to/cjk-artifacts/file-서울시-${{ matrix.runs-on }}.txt"
- name: Upload CJK artifact - Chinese 土
uses: actions/upload-artifact@v7
with:
path: path/to/cjk-artifacts/file-土-${{ matrix.runs-on }}.txt
archive: false
- name: Upload CJK artifact - Chinese 中文测试
uses: actions/upload-artifact@v7
with:
path: path/to/cjk-artifacts/file-中文测试-${{ matrix.runs-on }}.txt
archive: false
- name: Upload CJK artifact - Japanese テスト
uses: actions/upload-artifact@v7
with:
path: path/to/cjk-artifacts/file-テスト-${{ matrix.runs-on }}.txt
archive: false
- name: Upload CJK artifact - Japanese 東京タワー
uses: actions/upload-artifact@v7
with:
path: path/to/cjk-artifacts/file-東京タワー-${{ matrix.runs-on }}.txt
archive: false
- name: Upload CJK artifact - Korean 테스트
uses: actions/upload-artifact@v7
with:
path: path/to/cjk-artifacts/file-테스트-${{ matrix.runs-on }}.txt
archive: false
- name: Upload CJK artifact - Korean 서울시
uses: actions/upload-artifact@v7
with:
path: path/to/cjk-artifacts/file-서울시-${{ matrix.runs-on }}.txt
archive: false
- name: Download CJK artifact - Chinese 土
uses: ./
with:
name: file-土-${{ matrix.runs-on }}.txt
path: cjk-download/土
- name: Download CJK artifact - Chinese 中文测试
uses: ./
with:
name: file-中文测试-${{ matrix.runs-on }}.txt
path: cjk-download/中文测试
- name: Download CJK artifact - Japanese テスト
uses: ./
with:
name: file-テスト-${{ matrix.runs-on }}.txt
path: cjk-download/テスト
- name: Download CJK artifact - Japanese 東京タワー
uses: ./
with:
name: file-東京タワー-${{ matrix.runs-on }}.txt
path: cjk-download/東京タワー
- name: Download CJK artifact - Korean 테스트
uses: ./
with:
name: file-테스트-${{ matrix.runs-on }}.txt
path: cjk-download/테스트
- name: Download CJK artifact - Korean 서울시
uses: ./
with:
name: file-서울시-${{ matrix.runs-on }}.txt
path: cjk-download/서울시
- name: Verify CJK artifact downloads
shell: bash
run: |
set -e
fail=0
check_file() {
local file="$1"
local expected="$2"
if [ ! -f "$file" ]; then
echo "FAIL: Missing file: $file"
fail=1
return
fi
actual=$(cat "$file")
if [ "$actual" != "$expected" ]; then
echo "FAIL: Content mismatch in $file"
echo " Expected: '$expected'"
echo " Got: '$actual'"
fail=1
return
fi
echo "PASS: $file"
}
echo "=== Chinese ==="
check_file "cjk-download/土/file-土-${{ matrix.runs-on }}.txt" "Content for 土"
check_file "cjk-download/中文测试/file-中文测试-${{ matrix.runs-on }}.txt" "Content for 中文测试"
echo "=== Japanese ==="
check_file "cjk-download/テスト/file-テスト-${{ matrix.runs-on }}.txt" "Content for テスト"
check_file "cjk-download/東京タワー/file-東京タワー-${{ matrix.runs-on }}.txt" "Content for 東京タワー"
echo "=== Korean ==="
check_file "cjk-download/테스트/file-테스트-${{ matrix.runs-on }}.txt" "Content for 테스트"
check_file "cjk-download/서울시/file-서울시-${{ matrix.runs-on }}.txt" "Content for 서울시"
if [ "$fail" -ne 0 ]; then
echo "Some CJK artifact checks failed"
ls -alR cjk-download/ || true
exit 1
fi
echo "All CJK artifact downloads verified successfully"

View File

@ -1,6 +1,6 @@
---
name: "@actions/artifact"
version: 6.1.0
version: 6.2.1
type: npm
summary: Actions artifact lib
homepage: https://github.com/actions/toolkit/tree/main/packages/artifact

165
README.md
View File

@ -4,83 +4,23 @@ Download [Actions Artifacts](https://docs.github.com/en/actions/using-workflows/
See also [upload-artifact](https://github.com/actions/upload-artifact).
- [`@actions/download-artifact`](#actionsdownload-artifact)
- [v7 - What's new](#v7---whats-new)
- [v5 - What's new](#v5---whats-new)
- [v4 - What's new](#v4---whats-new)
- [Improvements](#improvements)
- [Breaking Changes](#breaking-changes)
- [Note](#note)
- [Usage](#usage)
- [Inputs](#inputs)
- [Outputs](#outputs)
- [Examples](#examples)
- [Download Single Artifact](#download-single-artifact)
- [Download Artifacts by ID](#download-artifacts-by-id)
- [Download All Artifacts](#download-all-artifacts)
- [Download multiple (filtered) Artifacts to the same directory](#download-multiple-filtered-artifacts-to-the-same-directory)
- [Download Artifacts from other Workflow Runs or Repositories](#download-artifacts-from-other-workflow-runs-or-repositories)
- [Limitations](#limitations)
- [Permission Loss](#permission-loss)
- [What's new](#whats-new)
- [Note](#note)
- [GHES Support](#ghes-support)
- [Usage](#usage)
- [Inputs](#inputs)
- [Outputs](#outputs)
- [Examples](#examples)
- [Download Single Artifact](#download-single-artifact)
- [Download Artifacts by ID](#download-artifacts-by-id)
- [Download All Artifacts](#download-all-artifacts)
- [Download multiple (filtered) Artifacts to the same directory](#download-multiple-filtered-artifacts-to-the-same-directory)
- [Download Artifacts from other Workflow Runs or Repositories](#download-artifacts-from-other-workflow-runs-or-repositories)
- [Maintaining File Permissions](#maintaining-file-permissions)
## v8 - What's new
## What's new
> [!IMPORTANT]
> actions/download-artifact@v8 has been migrated to an ESM module. This should be transparent to the caller but forks might need to make significant changes.
> [!IMPORTANT]
> Hash mismatches will now error by default. Users can override this behavior with a setting change (see below).
- Downloads will check the content-type returned to determine if a file can be decompressed and skip the decompression stage if so. This removes previous failures where we were trying to decompress a non-zip file. Since this is making a big change to the default behavior, we're making it opt-in via a version bump.
- Users can also download a zip file without decompressing it with the new `skip-decompress` flag.
- Introduces a new parameter `digest-mismatch` that allows callers to specify what to do when the downloaded hash doesn't match the expected hash (`ignore`, `info`, `warn`, `error`). To ensure security by default, the default value is `error`.
- Chore: we've bumped versions on a lot of our dev packages to get them up to date with the latest bugfixes/security patches.
## v7 - What's new
> [!IMPORTANT]
> actions/download-artifact@v7 now runs on Node.js 24 (`runs.using: node24`) and requires a minimum Actions Runner version of 2.327.1. If you are using self-hosted runners, ensure they are updated before upgrading.
### Node.js 24
This release updates the runtime to Node.js 24. v6 had preliminary support for Node 24, however this action was by default still running on Node.js 20. Now this action by default will run on Node.js 24.
## v5 - What's new
Previously, **single artifact downloads** behaved differently depending on how you specified the artifact:
- **By name**: `name: my-artifact` → extracted to `path/` (direct)
- **By ID**: `artifact-ids: 12345` → extracted to `path/my-artifact/` (nested)
Now both methods are consistent:
- **By name**: `name: my-artifact` → extracted to `path/` (unchanged)
- **By ID**: `artifact-ids: 12345` → extracted to `path/` (updated - now direct)
Note: This change also applies to patterns that only match a single artifact.
## v4 - What's new
> [!IMPORTANT]
> download-artifact@v4+ is not currently supported on GitHub Enterprise Server (GHES) yet. If you are on GHES, you must use [v3](https://github.com/actions/download-artifact/releases/tag/v3) (Node 16) or [v3-node20](https://github.com/actions/download-artifact/releases/tag/v3-node20) (Node 20).
The release of upload-artifact@v4 and download-artifact@v4 are major changes to the backend architecture of Artifacts. They have numerous performance and behavioral improvements.
For more information, see the [`@actions/artifact`](https://github.com/actions/toolkit/tree/main/packages/artifact) documentation.
### Improvements
1. Downloads are significantly faster, upwards of 90% improvement in worst case scenarios.
2. Artifacts can be downloaded from other workflow runs and repositories when supplied with a PAT.
### Breaking Changes
1. On self hosted runners, additional [firewall rules](https://github.com/actions/toolkit/tree/main/packages/artifact#breaking-changes) may be required.
2. Downloading artifacts that were created from `action/upload-artifact@v3` and below are not supported.
For assistance with breaking changes, see [MIGRATION.md](docs/MIGRATION.md).
Check out the [releases page](https://github.com/actions/download-artifact/releases) for details on what's new.
## Note
@ -100,12 +40,16 @@ We will still provide security updates for this project and fix major breaking c
You are welcome to still raise bugs in this repo.
## GHES Support
`download-artifact@v4+` is not currently supported on GitHub Enterprise Server (GHES) yet. If you are on GHES, you must use [v3](https://github.com/actions/download-artifact/releases/tag/v3) (Node 16) or [v3-node20](https://github.com/actions/download-artifact/releases/tag/v3-node20) (Node 20).
## Usage
### Inputs
```yaml
- uses: actions/download-artifact@v5
- uses: actions/download-artifact@v8
with:
# Name of the artifact to download.
# If unspecified, all artifacts for the run are downloaded.
@ -147,6 +91,17 @@ You are welcome to still raise bugs in this repo.
# If github-token is specified, this is the run that artifacts will be downloaded from.
# Optional. Default is ${{ github.run_id }}
run-id:
# Whether to skip decompressing a zip file (if detected).
# If true, the downloaded artifact will not be automatically extracted/decompressed.
# This is useful when you want to handle the artifact as-is without extraction.
# Optional. Default is `false`
skip-decompress:
# What to do if the action detects a mismatch between the downloaded hash and the expected hash from the server.
# Can be one of: `ignore`, `info`, `warn`, `error`
# Optional. Default is `error`
digest-mismatch:
```
### Outputs
@ -163,7 +118,7 @@ Download to current working directory (`$GITHUB_WORKSPACE`):
```yaml
steps:
- uses: actions/download-artifact@v5
- uses: actions/download-artifact@v8
with:
name: my-artifact
- name: Display structure of downloaded files
@ -174,7 +129,7 @@ Download to a specific directory (also supports `~` expansion):
```yaml
steps:
- uses: actions/download-artifact@v5
- uses: actions/download-artifact@v8
with:
name: my-artifact
path: your/destination/dir
@ -182,15 +137,24 @@ steps:
run: ls -R your/destination/dir
```
Directly download a non-zipped file (only supports files uploaded with `actions/upload-artifact@v7` and `archive: false` set):
```yaml
steps:
- uses: actions/download-artifact@v8
with:
name: my-artifact.json # corresponds to the uploaded file name
```
### Download Artifacts by ID
The `artifact-ids` input allows downloading artifacts using their unique ID rather than name. This is particularly useful when working with immutable artifacts from `actions/upload-artifact@v4` which assigns a unique ID to each artifact.
The `artifact-ids` input allows downloading artifacts using their unique ID rather than name. This is particularly useful when working with immutable artifacts from `actions/upload-artifact@v4+` which assigns a unique ID to each artifact.
Download a single artifact by ID to the current working directory (`$GITHUB_WORKSPACE`):
```yaml
steps:
- uses: actions/download-artifact@v5
- uses: actions/download-artifact@v8
with:
artifact-ids: 12345
- name: Display structure of downloaded files
@ -201,7 +165,7 @@ Download a single artifact by ID to a specific directory:
```yaml
steps:
- uses: actions/download-artifact@v5
- uses: actions/download-artifact@v8
with:
artifact-ids: 12345
path: your/destination/dir
@ -215,7 +179,7 @@ Multiple artifacts can be downloaded by providing a comma-separated list of IDs:
```yaml
steps:
- uses: actions/download-artifact@v5
- uses: actions/download-artifact@v8
with:
artifact-ids: 12345,67890
path: path/to/artifacts
@ -243,7 +207,7 @@ Download all artifacts to the current working directory:
```yaml
steps:
- uses: actions/download-artifact@v5
- uses: actions/download-artifact@v8
- name: Display structure of downloaded files
run: ls -R
```
@ -252,7 +216,7 @@ Download all artifacts to a specific directory:
```yaml
steps:
- uses: actions/download-artifact@v5
- uses: actions/download-artifact@v8
with:
path: path/to/artifacts
- name: Display structure of downloaded files
@ -263,7 +227,7 @@ To download them to the _same_ directory:
```yaml
steps:
- uses: actions/download-artifact@v5
- uses: actions/download-artifact@v8
with:
path: path/to/artifacts
merge-multiple: true
@ -294,7 +258,7 @@ jobs:
- name: Create a File
run: echo "hello from ${{ matrix.runs-on }}" > file-${{ matrix.runs-on }}.txt
- name: Upload Artifact
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v7
with:
name: my-artifact-${{ matrix.runs-on }}
path: file-${{ matrix.runs-on }}.txt
@ -303,7 +267,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Download All Artifacts
uses: actions/download-artifact@v5
uses: actions/download-artifact@v8
with:
path: my-artifact
pattern: my-artifact-*
@ -326,7 +290,7 @@ It may be useful to download Artifacts from other workflow runs, or even other r
```yaml
steps:
- uses: actions/download-artifact@v5
- uses: actions/download-artifact@v8
with:
name: my-other-artifact
github-token: ${{ secrets.GH_PAT }} # token with actions:read permissions on target repo
@ -334,21 +298,30 @@ steps:
run-id: 1234
```
## Limitations
### Maintaining File Permissions
### Permission Loss
Zipping files will remove file permissions during artifact upload. All directories will have `755` and all files will have `644`. For example, if you make a file executable using `chmod` and then upload that file as a zip file, post-download the file is no longer guaranteed to be set as an executable.
File permissions are not maintained during artifact upload. All directories will have `755` and all files will have `644`. For example, if you make a file executable using `chmod` and then upload that file, post-download the file is no longer guaranteed to be set as an executable.
If you must preserve permissions, you can `tar` all of your files together before artifact upload. Post download, the `tar` file will maintain file permissions and case sensitivity.
If you must preserve permissions, you can `tar` all of your files together before artifact upload and upload it as a single file (using V7+ of `actions/upload-artifact`). Then download the file directly and unpack it manually:
```yaml
- name: 'Tar files'
run: tar -cvf my_files.tar /path/to/my/directory
- name: 'Upload Artifact'
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v7
with:
name: my-artifact
path: my_files.tar
```
archive: false
----
# Later, download the file by name
- name: 'Download Artifact'
uses: actions/download-artifact@v8
with:
name: my_files.tar
```

462
dist/index.js vendored
View File

@ -77339,7 +77339,7 @@ module.exports = index;
/***/ 2822:
/***/ ((module) => {
module.exports = /*#__PURE__*/JSON.parse('{"name":"@actions/artifact","version":"6.1.0","preview":true,"description":"Actions artifact lib","keywords":["github","actions","artifact"],"homepage":"https://github.com/actions/toolkit/tree/main/packages/artifact","license":"MIT","type":"module","main":"lib/artifact.js","types":"lib/artifact.d.ts","exports":{".":{"types":"./lib/artifact.d.ts","import":"./lib/artifact.js"}},"directories":{"lib":"lib","test":"__tests__"},"files":["lib","!.DS_Store"],"publishConfig":{"access":"public"},"repository":{"type":"git","url":"git+https://github.com/actions/toolkit.git","directory":"packages/artifact"},"scripts":{"audit-moderate":"npm install && npm audit --json --audit-level=moderate > audit.json","test":"cd ../../ && npm run test ./packages/artifact","bootstrap":"cd ../../ && npm run bootstrap","tsc-run":"tsc && cp src/internal/shared/package-version.cjs lib/internal/shared/","tsc":"npm run bootstrap && npm run tsc-run","gen:docs":"typedoc --plugin typedoc-plugin-markdown --out docs/generated src/artifact.ts --githubPages false --readme none"},"bugs":{"url":"https://github.com/actions/toolkit/issues"},"dependencies":{"@actions/core":"^3.0.0","@actions/github":"^9.0.0","@actions/http-client":"^4.0.0","@azure/storage-blob":"^12.30.0","@octokit/core":"^7.0.6","@octokit/plugin-request-log":"^6.0.0","@octokit/plugin-retry":"^8.0.0","@octokit/request":"^10.0.7","@octokit/request-error":"^7.1.0","@protobuf-ts/plugin":"^2.2.3-alpha.1","@protobuf-ts/runtime":"^2.9.4","archiver":"^7.0.1","jwt-decode":"^4.0.0","unzip-stream":"^0.3.1"},"devDependencies":{"@types/archiver":"^7.0.0","@types/unzip-stream":"^0.3.4","typedoc":"^0.28.16","typedoc-plugin-markdown":"^4.9.0","typescript":"^5.9.3"},"overrides":{"uri-js":"npm:uri-js-replace@^1.0.1","node-fetch":"^3.3.2"}}');
module.exports = /*#__PURE__*/JSON.parse('{"name":"@actions/artifact","version":"6.2.1","preview":true,"description":"Actions artifact lib","keywords":["github","actions","artifact"],"homepage":"https://github.com/actions/toolkit/tree/main/packages/artifact","license":"MIT","type":"module","main":"lib/artifact.js","types":"lib/artifact.d.ts","exports":{".":{"types":"./lib/artifact.d.ts","import":"./lib/artifact.js"}},"directories":{"lib":"lib","test":"__tests__"},"files":["lib","!.DS_Store"],"publishConfig":{"access":"public"},"repository":{"type":"git","url":"git+https://github.com/actions/toolkit.git","directory":"packages/artifact"},"scripts":{"audit-moderate":"npm install && npm audit --json --audit-level=moderate > audit.json","test":"cd ../../ && npm run test ./packages/artifact","bootstrap":"cd ../../ && npm run bootstrap","tsc-run":"tsc && cp src/internal/shared/package-version.cjs lib/internal/shared/","tsc":"npm run bootstrap && npm run tsc-run","gen:docs":"typedoc --plugin typedoc-plugin-markdown --out docs/generated src/artifact.ts --githubPages false --readme none"},"bugs":{"url":"https://github.com/actions/toolkit/issues"},"dependencies":{"@actions/core":"^3.0.0","@actions/github":"^9.0.0","@actions/http-client":"^4.0.0","@azure/storage-blob":"^12.30.0","@octokit/core":"^7.0.6","@octokit/plugin-request-log":"^6.0.0","@octokit/plugin-retry":"^8.0.0","@octokit/request":"^10.0.7","@octokit/request-error":"^7.1.0","@protobuf-ts/plugin":"^2.2.3-alpha.1","@protobuf-ts/runtime":"^2.9.4","archiver":"^7.0.1","jwt-decode":"^4.0.0","unzip-stream":"^0.3.1"},"devDependencies":{"@types/archiver":"^7.0.0","@types/unzip-stream":"^0.3.4","typedoc":"^0.28.16","typedoc-plugin-markdown":"^4.9.0","typescript":"^5.9.3"},"overrides":{"uri-js":"npm:uri-js-replace@^1.0.1","node-fetch":"^3.3.2"}}');
/***/ })
@ -81433,236 +81433,6 @@ var build_commonjs = __nccwpck_require__(4420);
// @generated message type with reflection information, may provide speed optimized methods
class MigrateArtifactRequest$Type extends commonjs.MessageType {
constructor() {
super("github.actions.results.api.v1.MigrateArtifactRequest", [
{ no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 2, name: "name", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 3, name: "expires_at", kind: "message", T: () => Timestamp }
]);
}
create(value) {
const message = { workflowRunBackendId: "", name: "" };
globalThis.Object.defineProperty(message, commonjs.MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
(0,commonjs.reflectionMergePartial)(this, message, value);
return message;
}
internalBinaryRead(reader, length, options, target) {
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* string workflow_run_backend_id */ 1:
message.workflowRunBackendId = reader.string();
break;
case /* string name */ 2:
message.name = reader.string();
break;
case /* google.protobuf.Timestamp expires_at */ 3:
message.expiresAt = Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.expiresAt);
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? commonjs.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message, writer, options) {
/* string workflow_run_backend_id = 1; */
if (message.workflowRunBackendId !== "")
writer.tag(1, commonjs.WireType.LengthDelimited).string(message.workflowRunBackendId);
/* string name = 2; */
if (message.name !== "")
writer.tag(2, commonjs.WireType.LengthDelimited).string(message.name);
/* google.protobuf.Timestamp expires_at = 3; */
if (message.expiresAt)
Timestamp.internalBinaryWrite(message.expiresAt, writer.tag(3, commonjs.WireType.LengthDelimited).fork(), options).join();
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? commonjs.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.MigrateArtifactRequest
*/
const MigrateArtifactRequest = new MigrateArtifactRequest$Type();
// @generated message type with reflection information, may provide speed optimized methods
class MigrateArtifactResponse$Type extends commonjs.MessageType {
constructor() {
super("github.actions.results.api.v1.MigrateArtifactResponse", [
{ no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
{ no: 2, name: "signed_upload_url", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
]);
}
create(value) {
const message = { ok: false, signedUploadUrl: "" };
globalThis.Object.defineProperty(message, commonjs.MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
(0,commonjs.reflectionMergePartial)(this, message, value);
return message;
}
internalBinaryRead(reader, length, options, target) {
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* bool ok */ 1:
message.ok = reader.bool();
break;
case /* string signed_upload_url */ 2:
message.signedUploadUrl = reader.string();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? commonjs.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message, writer, options) {
/* bool ok = 1; */
if (message.ok !== false)
writer.tag(1, commonjs.WireType.Varint).bool(message.ok);
/* string signed_upload_url = 2; */
if (message.signedUploadUrl !== "")
writer.tag(2, commonjs.WireType.LengthDelimited).string(message.signedUploadUrl);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? commonjs.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.MigrateArtifactResponse
*/
const MigrateArtifactResponse = new MigrateArtifactResponse$Type();
// @generated message type with reflection information, may provide speed optimized methods
class FinalizeMigratedArtifactRequest$Type extends commonjs.MessageType {
constructor() {
super("github.actions.results.api.v1.FinalizeMigratedArtifactRequest", [
{ no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 2, name: "name", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 3, name: "size", kind: "scalar", T: 3 /*ScalarType.INT64*/ }
]);
}
create(value) {
const message = { workflowRunBackendId: "", name: "", size: "0" };
globalThis.Object.defineProperty(message, commonjs.MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
(0,commonjs.reflectionMergePartial)(this, message, value);
return message;
}
internalBinaryRead(reader, length, options, target) {
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* string workflow_run_backend_id */ 1:
message.workflowRunBackendId = reader.string();
break;
case /* string name */ 2:
message.name = reader.string();
break;
case /* int64 size */ 3:
message.size = reader.int64().toString();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? commonjs.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message, writer, options) {
/* string workflow_run_backend_id = 1; */
if (message.workflowRunBackendId !== "")
writer.tag(1, commonjs.WireType.LengthDelimited).string(message.workflowRunBackendId);
/* string name = 2; */
if (message.name !== "")
writer.tag(2, commonjs.WireType.LengthDelimited).string(message.name);
/* int64 size = 3; */
if (message.size !== "0")
writer.tag(3, commonjs.WireType.Varint).int64(message.size);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? commonjs.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.FinalizeMigratedArtifactRequest
*/
const FinalizeMigratedArtifactRequest = new FinalizeMigratedArtifactRequest$Type();
// @generated message type with reflection information, may provide speed optimized methods
class FinalizeMigratedArtifactResponse$Type extends commonjs.MessageType {
constructor() {
super("github.actions.results.api.v1.FinalizeMigratedArtifactResponse", [
{ no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
{ no: 2, name: "artifact_id", kind: "scalar", T: 3 /*ScalarType.INT64*/ }
]);
}
create(value) {
const message = { ok: false, artifactId: "0" };
globalThis.Object.defineProperty(message, commonjs.MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
(0,commonjs.reflectionMergePartial)(this, message, value);
return message;
}
internalBinaryRead(reader, length, options, target) {
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* bool ok */ 1:
message.ok = reader.bool();
break;
case /* int64 artifact_id */ 2:
message.artifactId = reader.int64().toString();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? commonjs.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message, writer, options) {
/* bool ok = 1; */
if (message.ok !== false)
writer.tag(1, commonjs.WireType.Varint).bool(message.ok);
/* int64 artifact_id = 2; */
if (message.artifactId !== "0")
writer.tag(2, commonjs.WireType.Varint).int64(message.artifactId);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? commonjs.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.FinalizeMigratedArtifactResponse
*/
const FinalizeMigratedArtifactResponse = new FinalizeMigratedArtifactResponse$Type();
// @generated message type with reflection information, may provide speed optimized methods
class CreateArtifactRequest$Type extends commonjs.MessageType {
constructor() {
@ -81671,7 +81441,8 @@ class CreateArtifactRequest$Type extends commonjs.MessageType {
{ no: 2, name: "workflow_job_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 3, name: "name", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 4, name: "expires_at", kind: "message", T: () => Timestamp },
{ no: 5, name: "version", kind: "scalar", T: 5 /*ScalarType.INT32*/ }
{ no: 5, name: "version", kind: "scalar", T: 5 /*ScalarType.INT32*/ },
{ no: 6, name: "mime_type", kind: "message", T: () => StringValue }
]);
}
create(value) {
@ -81701,6 +81472,9 @@ class CreateArtifactRequest$Type extends commonjs.MessageType {
case /* int32 version */ 5:
message.version = reader.int32();
break;
case /* google.protobuf.StringValue mime_type */ 6:
message.mimeType = StringValue.internalBinaryRead(reader, reader.uint32(), options, message.mimeType);
break;
default:
let u = options.readUnknownField;
if (u === "throw")
@ -81728,6 +81502,9 @@ class CreateArtifactRequest$Type extends commonjs.MessageType {
/* int32 version = 5; */
if (message.version !== 0)
writer.tag(5, commonjs.WireType.Varint).int32(message.version);
/* google.protobuf.StringValue mime_type = 6; */
if (message.mimeType)
StringValue.internalBinaryWrite(message.mimeType, writer.tag(6, commonjs.WireType.LengthDelimited).fork(), options).join();
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? commonjs.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
@ -81993,7 +81770,7 @@ const artifact_ListArtifactsRequest = new ListArtifactsRequest$Type();
class ListArtifactsResponse$Type extends commonjs.MessageType {
constructor() {
super("github.actions.results.api.v1.ListArtifactsResponse", [
{ no: 1, name: "artifacts", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => ListArtifactsResponse_MonolithArtifact }
{ no: 1, name: "artifacts", kind: "message", repeat: 2 /*RepeatType.UNPACKED*/, T: () => ListArtifactsResponse_MonolithArtifact }
]);
}
create(value) {
@ -82356,9 +82133,7 @@ const ArtifactService = new build_commonjs/* ServiceType */.C0("github.actions.r
{ name: "FinalizeArtifact", options: {}, I: artifact_FinalizeArtifactRequest, O: artifact_FinalizeArtifactResponse },
{ name: "ListArtifacts", options: {}, I: artifact_ListArtifactsRequest, O: artifact_ListArtifactsResponse },
{ name: "GetSignedArtifactURL", options: {}, I: artifact_GetSignedArtifactURLRequest, O: artifact_GetSignedArtifactURLResponse },
{ name: "DeleteArtifact", options: {}, I: artifact_DeleteArtifactRequest, O: artifact_DeleteArtifactResponse },
{ name: "MigrateArtifact", options: {}, I: MigrateArtifactRequest, O: MigrateArtifactResponse },
{ name: "FinalizeMigratedArtifact", options: {}, I: FinalizeMigratedArtifactRequest, O: FinalizeMigratedArtifactResponse }
{ name: "DeleteArtifact", options: {}, I: artifact_DeleteArtifactRequest, O: artifact_DeleteArtifactResponse }
]);
//# sourceMappingURL=artifact.js.map
;// CONCATENATED MODULE: ./node_modules/@actions/artifact/lib/generated/results/api/v1/artifact.twirp-client.js
@ -82616,7 +82391,7 @@ NetworkError.isNetworkErrorCode = (code) => {
};
class UsageError extends Error {
constructor() {
const message = `Artifact storage quota has been hit. Unable to upload any new artifacts. Usage is recalculated every 6-12 hours.\nMore info on storage limits: https://docs.github.com/en/billing/managing-billing-for-github-actions/about-billing-for-github-actions#calculating-minute-and-storage-spending`;
const message = `Artifact storage quota has been hit. Unable to upload any new artifacts.\nMore info on storage limits: https://docs.github.com/en/billing/managing-billing-for-github-actions/about-billing-for-github-actions#calculating-minute-and-storage-spending`;
super(message);
this.name = 'UsageError';
}
@ -122101,7 +121876,7 @@ var blob_upload_awaiter = (undefined && undefined.__awaiter) || function (thisAr
function uploadZipToBlobStorage(authenticatedUploadURL, zipUploadStream) {
function uploadToBlobStorage(authenticatedUploadURL, uploadStream, contentType) {
return blob_upload_awaiter(this, void 0, void 0, function* () {
let uploadByteCount = 0;
let lastProgressTime = Date.now();
@ -122123,26 +121898,26 @@ function uploadZipToBlobStorage(authenticatedUploadURL, zipUploadStream) {
const bufferSize = getUploadChunkSize();
const blobClient = new BlobClient(authenticatedUploadURL);
const blockBlobClient = blobClient.getBlockBlobClient();
core_debug(`Uploading artifact zip to blob storage with maxConcurrency: ${maxConcurrency}, bufferSize: ${bufferSize}`);
core_debug(`Uploading artifact to blob storage with maxConcurrency: ${maxConcurrency}, bufferSize: ${bufferSize}, contentType: ${contentType}`);
const uploadCallback = (progress) => {
info(`Uploaded bytes ${progress.loadedBytes}`);
uploadByteCount = progress.loadedBytes;
lastProgressTime = Date.now();
};
const options = {
blobHTTPHeaders: { blobContentType: 'zip' },
blobHTTPHeaders: { blobContentType: contentType },
onProgress: uploadCallback,
abortSignal: abortController.signal
};
let sha256Hash = undefined;
const uploadStream = new external_stream_.PassThrough();
const blobUploadStream = new external_stream_.PassThrough();
const hashStream = external_crypto_namespaceObject.createHash('sha256');
zipUploadStream.pipe(uploadStream); // This stream is used for the upload
zipUploadStream.pipe(hashStream).setEncoding('hex'); // This stream is used to compute a hash of the zip content that gets used. Integrity check
uploadStream.pipe(blobUploadStream); // This stream is used for the upload
uploadStream.pipe(hashStream).setEncoding('hex'); // This stream is used to compute a hash of the content for integrity check
info('Beginning upload of artifact content to blob storage');
try {
yield Promise.race([
blockBlobClient.uploadStream(uploadStream, bufferSize, maxConcurrency, options),
blockBlobClient.uploadStream(blobUploadStream, bufferSize, maxConcurrency, options),
chunkTimer(getUploadChunkTimeout())
]);
}
@ -122158,7 +121933,7 @@ function uploadZipToBlobStorage(authenticatedUploadURL, zipUploadStream) {
info('Finished uploading artifact content to blob storage!');
hashStream.end();
sha256Hash = hashStream.read();
info(`SHA256 digest of uploaded artifact zip is ${sha256Hash}`);
info(`SHA256 digest of uploaded artifact is ${sha256Hash}`);
if (uploadByteCount === 0) {
warning(`No data was uploaded to blob storage. Reported upload byte count is 0.`);
}
@ -122173,6 +121948,59 @@ function uploadZipToBlobStorage(authenticatedUploadURL, zipUploadStream) {
const promises_namespaceObject = __WEBPACK_EXTERNAL_createRequire(import.meta.url)("fs/promises");
// EXTERNAL MODULE: ./node_modules/archiver/index.js
var archiver = __nccwpck_require__(9392);
;// CONCATENATED MODULE: ./node_modules/@actions/artifact/lib/internal/upload/stream.js
var stream_awaiter = (undefined && undefined.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
// Custom stream transformer so we can set the highWaterMark property
// See https://github.com/nodejs/node/issues/8855
class WaterMarkedUploadStream extends external_stream_.Transform {
constructor(bufferSize) {
super({
highWaterMark: bufferSize
});
}
// eslint-disable-next-line @typescript-eslint/no-explicit-any
_transform(chunk, enc, cb) {
cb(null, chunk);
}
}
function createRawFileUploadStream(filePath) {
return stream_awaiter(this, void 0, void 0, function* () {
core_debug(`Creating raw file upload stream for: ${filePath}`);
const bufferSize = getUploadChunkSize();
const uploadStream = new WaterMarkedUploadStream(bufferSize);
// Check if symlink and resolve the source path
let sourcePath = filePath;
const stats = yield external_fs_.promises.lstat(filePath);
if (stats.isSymbolicLink()) {
sourcePath = yield (0,promises_namespaceObject.realpath)(filePath);
}
// Create a read stream from the file and pipe it to the upload stream
const fileStream = external_fs_.createReadStream(sourcePath, {
highWaterMark: bufferSize
});
fileStream.on('error', error => {
core_error('An error has occurred while reading the file for upload');
core_error(String(error));
uploadStream.destroy(new Error('An error has occurred during file read for the artifact'));
});
fileStream.pipe(uploadStream);
return uploadStream;
});
}
//# sourceMappingURL=stream.js.map
;// CONCATENATED MODULE: ./node_modules/@actions/artifact/lib/internal/upload/zip.js
var zip_awaiter = (undefined && undefined.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
@ -122189,19 +122017,6 @@ var zip_awaiter = (undefined && undefined.__awaiter) || function (thisArg, _argu
const DEFAULT_COMPRESSION_LEVEL = 6;
// Custom stream transformer so we can set the highWaterMark property
// See https://github.com/nodejs/node/issues/8855
class ZipUploadStream extends external_stream_.Transform {
constructor(bufferSize) {
super({
highWaterMark: bufferSize
});
}
// eslint-disable-next-line @typescript-eslint/no-explicit-any
_transform(chunk, enc, cb) {
cb(null, chunk);
}
}
function createZipUploadStream(uploadSpecification_1) {
return zip_awaiter(this, arguments, void 0, function* (uploadSpecification, compressionLevel = DEFAULT_COMPRESSION_LEVEL) {
core_debug(`Creating Artifact archive with compressionLevel: ${compressionLevel}`);
@ -122232,7 +122047,7 @@ function createZipUploadStream(uploadSpecification_1) {
}
}
const bufferSize = getUploadChunkSize();
const zipUploadStream = new ZipUploadStream(bufferSize);
const zipUploadStream = new WaterMarkedUploadStream(bufferSize);
core_debug(`Zip write high watermark value ${zipUploadStream.writableHighWaterMark}`);
core_debug(`Zip read high watermark value ${zipUploadStream.readableHighWaterMark}`);
zip.pipe(zipUploadStream);
@ -122264,6 +122079,78 @@ const zipEndCallback = () => {
core_debug('Zip stream for upload has ended.');
};
//# sourceMappingURL=zip.js.map
;// CONCATENATED MODULE: ./node_modules/@actions/artifact/lib/internal/upload/types.js
/**
* Maps file extensions to MIME types
*/
const types_mimeTypes = {
// Text
'.txt': 'text/plain',
'.html': 'text/html',
'.htm': 'text/html',
'.css': 'text/css',
'.csv': 'text/csv',
'.xml': 'text/xml',
'.md': 'text/markdown',
// JavaScript/JSON
'.js': 'application/javascript',
'.mjs': 'application/javascript',
'.json': 'application/json',
// Images
'.png': 'image/png',
'.jpg': 'image/jpeg',
'.jpeg': 'image/jpeg',
'.gif': 'image/gif',
'.svg': 'image/svg+xml',
'.webp': 'image/webp',
'.ico': 'image/x-icon',
'.bmp': 'image/bmp',
'.tiff': 'image/tiff',
'.tif': 'image/tiff',
// Audio
'.mp3': 'audio/mpeg',
'.wav': 'audio/wav',
'.ogg': 'audio/ogg',
'.flac': 'audio/flac',
// Video
'.mp4': 'video/mp4',
'.webm': 'video/webm',
'.avi': 'video/x-msvideo',
'.mov': 'video/quicktime',
// Documents
'.pdf': 'application/pdf',
'.doc': 'application/msword',
'.docx': 'application/vnd.openxmlformats-officedocument.wordprocessingml.document',
'.xls': 'application/vnd.ms-excel',
'.xlsx': 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet',
'.ppt': 'application/vnd.ms-powerpoint',
'.pptx': 'application/vnd.openxmlformats-officedocument.presentationml.presentation',
// Archives
'.zip': 'application/zip',
'.tar': 'application/x-tar',
'.gz': 'application/gzip',
'.rar': 'application/vnd.rar',
'.7z': 'application/x-7z-compressed',
// Code/Data
'.wasm': 'application/wasm',
'.yaml': 'application/x-yaml',
'.yml': 'application/x-yaml',
// Fonts
'.woff': 'font/woff',
'.woff2': 'font/woff2',
'.ttf': 'font/ttf',
'.otf': 'font/otf',
'.eot': 'application/vnd.ms-fontobject'
};
/**
* Gets the MIME type for a file based on its extension
*/
function getMimeType(filePath) {
const ext = external_path_.extname(filePath).toLowerCase();
return types_mimeTypes[ext] || 'application/octet-stream';
}
//# sourceMappingURL=types.js.map
;// CONCATENATED MODULE: ./node_modules/@actions/artifact/lib/internal/upload/upload-artifact.js
var upload_artifact_awaiter = (undefined && undefined.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
@ -122284,14 +122171,36 @@ var upload_artifact_awaiter = (undefined && undefined.__awaiter) || function (th
function uploadArtifact(name, files, rootDirectory, options) {
return upload_artifact_awaiter(this, void 0, void 0, function* () {
let artifactFileName = `${name}.zip`;
if (options === null || options === void 0 ? void 0 : options.skipArchive) {
if (files.length === 0) {
throw new FilesNotFoundError([]);
}
if (files.length > 1) {
throw new Error('skipArchive option is only supported when uploading a single file');
}
if (!external_fs_.existsSync(files[0])) {
throw new FilesNotFoundError(files);
}
artifactFileName = external_path_.basename(files[0]);
name = artifactFileName;
}
validateArtifactName(name);
validateRootDirectory(rootDirectory);
const zipSpecification = getUploadZipSpecification(files, rootDirectory);
if (zipSpecification.length === 0) {
throw new FilesNotFoundError(zipSpecification.flatMap(s => (s.sourcePath ? [s.sourcePath] : [])));
let zipSpecification = [];
if (!(options === null || options === void 0 ? void 0 : options.skipArchive)) {
zipSpecification = getUploadZipSpecification(files, rootDirectory);
if (zipSpecification.length === 0) {
throw new FilesNotFoundError(zipSpecification.flatMap(s => (s.sourcePath ? [s.sourcePath] : [])));
}
}
const contentType = getMimeType(artifactFileName);
// get the IDs needed for the artifact creation
const backendIds = getBackendIdsFromToken();
// create the artifact client
@ -122301,7 +122210,8 @@ function uploadArtifact(name, files, rootDirectory, options) {
workflowRunBackendId: backendIds.workflowRunBackendId,
workflowJobRunBackendId: backendIds.workflowJobRunBackendId,
name,
version: 4
mimeType: StringValue.create({ value: contentType }),
version: 7
};
// if there is a retention period, add it to the request
const expiresAt = getExpiration(options === null || options === void 0 ? void 0 : options.retentionDays);
@ -122312,9 +122222,17 @@ function uploadArtifact(name, files, rootDirectory, options) {
if (!createArtifactResp.ok) {
throw new InvalidResponseError('CreateArtifact: response from backend was not ok');
}
const zipUploadStream = yield createZipUploadStream(zipSpecification, options === null || options === void 0 ? void 0 : options.compressionLevel);
// Upload zip to blob storage
const uploadResult = yield uploadZipToBlobStorage(createArtifactResp.signedUploadUrl, zipUploadStream);
let stream;
if (options === null || options === void 0 ? void 0 : options.skipArchive) {
// Upload raw file without archiving
stream = yield createRawFileUploadStream(files[0]);
}
else {
// Create and upload zip archive
stream = yield createZipUploadStream(zipSpecification, options === null || options === void 0 ? void 0 : options.compressionLevel);
}
info(`Uploading artifact: ${artifactFileName}`);
const uploadResult = yield uploadToBlobStorage(createArtifactResp.signedUploadUrl, stream, contentType);
// finalize the artifact
const finalizeArtifactReq = {
workflowRunBackendId: backendIds.workflowRunBackendId,
@ -122333,7 +122251,7 @@ function uploadArtifact(name, files, rootDirectory, options) {
throw new InvalidResponseError('FinalizeArtifact: response from backend was not ok');
}
const artifactId = BigInt(finalizeArtifactResp.artifactId);
info(`Artifact ${name}.zip successfully finalized. Artifact ID ${artifactId}`);
info(`Artifact ${name} successfully finalized. Artifact ID ${artifactId}`);
return {
size: uploadResult.uploadSize,
digest: uploadResult.sha256Hash,
@ -126504,13 +126422,17 @@ function streamExtractExternal(url_1, directory_1) {
mimeType === 'application/zip-compressed' ||
urlEndsWithZip;
// Extract filename from Content-Disposition header
// Prefer filename* (RFC 5987) which supports UTF-8 encoded filenames,
// fall back to filename which may contain ASCII-only replacements
const contentDisposition = response.message.headers['content-disposition'] || '';
let fileName = 'artifact';
const filenameMatch = contentDisposition.match(/filename\*?=['"]?(?:UTF-\d['"]*)?([^;\r\n"']*)['"]?/i);
if (filenameMatch && filenameMatch[1]) {
const filenameStar = contentDisposition.match(/filename\*\s*=\s*UTF-8''([^;\r\n]*)/i);
const filenamePlain = contentDisposition.match(/(?<!\*)filename\s*=\s*['"]?([^;\r\n"']*)['"]?/i);
const rawName = (filenameStar === null || filenameStar === void 0 ? void 0 : filenameStar[1]) || (filenamePlain === null || filenamePlain === void 0 ? void 0 : filenamePlain[1]);
if (rawName) {
// Sanitize fileName to prevent path traversal attacks
// Use path.basename to extract only the filename component
fileName = external_path_.basename(decodeURIComponent(filenameMatch[1].trim()));
fileName = external_path_.basename(decodeURIComponent(rawName.trim()));
}
core_debug(`Content-Type: ${contentType}, mimeType: ${mimeType}, urlEndsWithZip: ${urlEndsWithZip}, isZip: ${isZip}, skipDecompress: ${skipDecompress}`);
core_debug(`Content-Disposition: ${contentDisposition}, fileName: ${fileName}`);

12
package-lock.json generated
View File

@ -1,15 +1,15 @@
{
"name": "download-artifact",
"version": "7.0.0",
"version": "8.0.1",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "download-artifact",
"version": "7.0.0",
"version": "8.0.1",
"license": "MIT",
"dependencies": {
"@actions/artifact": "^6.1.0",
"@actions/artifact": "^6.2.1",
"@actions/core": "^3.0.0",
"minimatch": "^10.1.1"
},
@ -36,9 +36,9 @@
}
},
"node_modules/@actions/artifact": {
"version": "6.1.0",
"resolved": "https://registry.npmjs.org/@actions/artifact/-/artifact-6.1.0.tgz",
"integrity": "sha512-oRn9YhKkboXgIq2TQZ9uj6bhkT5ZUzFtnyTQ0tLGBwImaD0GfWShE5R0tPbN25EJmS3tz5sDd2JnVokAOtNrZQ==",
"version": "6.2.1",
"resolved": "https://registry.npmjs.org/@actions/artifact/-/artifact-6.2.1.tgz",
"integrity": "sha512-sJGH0mhEbEjBCw7o6SaLhUU66u27aFW8HTfkIb5Tk2/Wy0caUDc+oYQEgnuFN7a0HCpAbQyK0U6U7XUJDgDWrw==",
"license": "MIT",
"dependencies": {
"@actions/core": "^3.0.0",

View File

@ -1,6 +1,6 @@
{
"name": "download-artifact",
"version": "8.0.0",
"version": "8.0.1",
"description": "Download an Actions Artifact from a workflow run",
"type": "module",
"engines": {
@ -33,7 +33,7 @@
},
"homepage": "https://github.com/actions/download-artifact#readme",
"dependencies": {
"@actions/artifact": "^6.1.0",
"@actions/artifact": "^6.2.1",
"@actions/core": "^3.0.0",
"minimatch": "^10.1.1"
},