Feature non-zipped actions artifacts (action v7) (#36786)

- content_encoding contains a slash => v4 artifact
- updated proto files to support mime_type and no longer return errors for upload-artifact v7
- json and txt files are now previewed in browser
- normalized content-disposition header creation
- azure blob storage uploads directly in servedirect mode (no proxying data)
- normalize content-disposition headers based on go mime package
  - getting both filename and filename* encoding is done via custom code

Closes #36829

-----

Signed-off-by: ChristopherHX <christopher.homberger@web.de>
Co-authored-by: wxiaoguang <wxiaoguang@gmail.com>
This commit is contained in:
ChristopherHX
2026-03-25 17:37:48 +01:00
committed by GitHub
parent 435123fe65
commit bc5c554072
29 changed files with 1003 additions and 826 deletions

View File

@@ -53,6 +53,11 @@ func init() {
db.RegisterModel(new(ActionArtifact))
}
const (
ContentEncodingV3Gzip = "gzip"
ContentTypeZip = "application/zip"
)
// ActionArtifact is a file that is stored in the artifact storage.
type ActionArtifact struct {
ID int64 `xorm:"pk autoincr"`
@@ -61,16 +66,26 @@ type ActionArtifact struct {
RepoID int64 `xorm:"index"`
OwnerID int64
CommitSHA string
StoragePath string // The path to the artifact in the storage
FileSize int64 // The size of the artifact in bytes
FileCompressedSize int64 // The size of the artifact in bytes after gzip compression
ContentEncoding string // The content encoding of the artifact
ArtifactPath string `xorm:"index unique(runid_name_path)"` // The path to the artifact when runner uploads it
ArtifactName string `xorm:"index unique(runid_name_path)"` // The name of the artifact when runner uploads it
Status ArtifactStatus `xorm:"index"` // The status of the artifact, uploading, expired or need-delete
CreatedUnix timeutil.TimeStamp `xorm:"created"`
UpdatedUnix timeutil.TimeStamp `xorm:"updated index"`
ExpiredUnix timeutil.TimeStamp `xorm:"index"` // The time when the artifact will be expired
StoragePath string // The path to the artifact in the storage
FileSize int64 // The size of the artifact in bytes
FileCompressedSize int64 // The size of the artifact in bytes after gzip compression
// The content encoding or content type of the artifact
// * empty or null: legacy (v3) uncompressed content
// * magic string "gzip" (ContentEncodingV3Gzip): v3 gzip compressed content
// * requires gzip decoding before storing in a zip for download
// * requires gzip content-encoding header when downloaded single files within a workflow
// * mime type for "Content-Type":
// * "application/zip" (ContentTypeZip), seems to be an abuse, fortunately there is no conflict, and it won't cause problems?
// * "application/pdf", "text/html", etc.: real content type of the artifact
ContentEncodingOrType string `xorm:"content_encoding"`
ArtifactPath string `xorm:"index unique(runid_name_path)"` // The path to the artifact when runner uploads it
ArtifactName string `xorm:"index unique(runid_name_path)"` // The name of the artifact when runner uploads it
Status ArtifactStatus `xorm:"index"` // The status of the artifact, uploading, expired or need-delete
CreatedUnix timeutil.TimeStamp `xorm:"created"`
UpdatedUnix timeutil.TimeStamp `xorm:"updated index"`
ExpiredUnix timeutil.TimeStamp `xorm:"index"` // The time when the artifact will be expired
}
func CreateArtifact(ctx context.Context, t *ActionTask, artifactName, artifactPath string, expiredDays int64) (*ActionArtifact, error) {
@@ -156,7 +171,8 @@ func (opts FindArtifactsOptions) ToConds() builder.Cond {
}
if opts.FinalizedArtifactsV4 {
cond = cond.And(builder.Eq{"status": ArtifactStatusUploadConfirmed}.Or(builder.Eq{"status": ArtifactStatusExpired}))
cond = cond.And(builder.Eq{"content_encoding": "application/zip"})
// see the comment of ActionArtifact.ContentEncodingOrType: "*/*" means the field is a content type
cond = cond.And(builder.Like{"content_encoding", "%/%"})
}
return cond

View File

@@ -141,3 +141,39 @@
created_unix: 1730330775
updated_unix: 1730330775
expired_unix: 1738106775
-
id: 26
run_id: 792
runner_id: 1
repo_id: 4
owner_id: 1
commit_sha: c2d72f548424103f01ee1dc02889c1e2bff816b0
storage_path: "27/5/1730330775594233150.chunk"
file_size: 1024
file_compressed_size: 1024
content_encoding: "application/pdf"
artifact_path: "report.pdf"
artifact_name: "report.pdf"
status: 2
created_unix: 1730330775
updated_unix: 1730330775
expired_unix: 1738106775
-
id: 27
run_id: 792
runner_id: 1
repo_id: 4
owner_id: 1
commit_sha: c2d72f548424103f01ee1dc02889c1e2bff816b0
storage_path: "27/5/1730330775594233150.chunk"
file_size: 1024
file_compressed_size: 1024
content_encoding: "application/html"
artifact_path: "report.html"
artifact_name: "report.html"
status: 2
created_unix: 1730330775
updated_unix: 1730330775
expired_unix: 1738106775

View File

@@ -5,44 +5,61 @@ package actions
import (
"net/http"
"strings"
actions_model "code.gitea.io/gitea/models/actions"
"code.gitea.io/gitea/modules/httplib"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/storage"
"code.gitea.io/gitea/services/context"
)
// Artifacts using the v4 backend are stored as a single combined zip file per artifact on the backend
// The v4 backend ensures ContentEncoding is set to "application/zip", which is not the case for the old backend
// IsArtifactV4 detects whether the artifact is likely from v4.
// V4 backend stores the files as a single combined zip file per artifact, and ensures ContentEncoding contains a slash
// (otherwise this uses application/zip instead of the custom mime type), which is not the case for the old backend.
func IsArtifactV4(art *actions_model.ActionArtifact) bool {
return art.ArtifactName+".zip" == art.ArtifactPath && art.ContentEncoding == "application/zip"
return strings.Contains(art.ContentEncodingOrType, "/")
}
func DownloadArtifactV4ServeDirectOnly(ctx *context.Base, art *actions_model.ActionArtifact) (bool, error) {
if setting.Actions.ArtifactStorage.ServeDirect() {
u, err := storage.ActionsArtifacts.ServeDirectURL(art.StoragePath, art.ArtifactPath, ctx.Req.Method, nil)
if u != nil && err == nil {
ctx.Redirect(u.String(), http.StatusFound)
return true, nil
}
func GetArtifactV4ServeDirectURL(art *actions_model.ActionArtifact, method string) (string, error) {
contentType := art.ContentEncodingOrType
u, err := storage.ActionsArtifacts.ServeDirectURL(art.StoragePath, art.ArtifactPath, method, &storage.ServeDirectOptions{ContentType: contentType})
if err != nil {
return "", err
}
return false, nil
return u.String(), nil
}
func DownloadArtifactV4Fallback(ctx *context.Base, art *actions_model.ActionArtifact) error {
func DownloadArtifactV4ServeDirect(ctx *context.Base, art *actions_model.ActionArtifact) bool {
if !setting.Actions.ArtifactStorage.ServeDirect() {
return false
}
u, err := GetArtifactV4ServeDirectURL(art, ctx.Req.Method)
if err != nil {
log.Error("GetArtifactV4ServeDirectURL: %v", err)
return false
}
ctx.Redirect(u, http.StatusFound)
return true
}
func DownloadArtifactV4ReadStorage(ctx *context.Base, art *actions_model.ActionArtifact) error {
f, err := storage.ActionsArtifacts.Open(art.StoragePath)
if err != nil {
return err
}
defer f.Close()
http.ServeContent(ctx.Resp, ctx.Req, art.ArtifactName+".zip", art.CreatedUnix.AsLocalTime(), f)
httplib.ServeUserContentByFile(ctx.Req, ctx.Resp, f, httplib.ServeHeaderOptions{
Filename: art.ArtifactPath,
ContentType: art.ContentEncodingOrType, // v4 guarantees that the field is Content-Type
})
return nil
}
func DownloadArtifactV4(ctx *context.Base, art *actions_model.ActionArtifact) error {
ok, err := DownloadArtifactV4ServeDirectOnly(ctx, art)
if ok || err != nil {
return err
if DownloadArtifactV4ServeDirect(ctx, art) {
return nil
}
return DownloadArtifactV4Fallback(ctx, art)
return DownloadArtifactV4ReadStorage(ctx, art)
}

View File

@@ -0,0 +1,65 @@
// Copyright 2026 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package httplib
import (
"mime"
"strings"
"code.gitea.io/gitea/modules/setting"
)
type ContentDispositionType string
const (
ContentDispositionInline ContentDispositionType = "inline"
ContentDispositionAttachment ContentDispositionType = "attachment"
)
func needsEncodingRune(b rune) bool {
return (b < ' ' || b > '~') && b != '\t'
}
// getSafeName replaces all invalid chars in the filename field by underscore
func getSafeName(s string) (_ string, needsEncoding bool) {
var out strings.Builder
for _, b := range s {
if needsEncodingRune(b) {
needsEncoding = true
out.WriteRune('_')
} else {
out.WriteRune(b)
}
}
return out.String(), needsEncoding
}
func EncodeContentDispositionAttachment(filename string) string {
return encodeContentDisposition(ContentDispositionAttachment, filename)
}
func EncodeContentDispositionInline(filename string) string {
return encodeContentDisposition(ContentDispositionInline, filename)
}
// encodeContentDisposition encodes a correct Content-Disposition Header
func encodeContentDisposition(t ContentDispositionType, filename string) string {
safeFilename, needsEncoding := getSafeName(filename)
result := mime.FormatMediaType(string(t), map[string]string{"filename": safeFilename})
// No need for the utf8 encoding
if !needsEncoding {
return result
}
utf8Result := mime.FormatMediaType(string(t), map[string]string{"filename": filename})
// The mime package might have unexpected results in other go versions
// Make tests instance fail, otherwise use the default behavior of the go mime package
if !strings.HasPrefix(result, string(t)+"; filename=") || !strings.HasPrefix(utf8Result, string(t)+"; filename*=") {
setting.PanicInDevOrTesting("Unexpected mime package result %s", result)
return utf8Result
}
encodedFileName := strings.TrimPrefix(utf8Result, string(t))
return result + encodedFileName
}

View File

@@ -0,0 +1,64 @@
// Copyright 2026 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package httplib
import (
"mime"
"strings"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestContentDisposition(t *testing.T) {
type testEntry struct {
disposition ContentDispositionType
filename string
header string
}
table := []testEntry{
{disposition: ContentDispositionInline, filename: "test.txt", header: "inline; filename=test.txt"},
{disposition: ContentDispositionInline, filename: "test❌.txt", header: "inline; filename=test_.txt; filename*=utf-8''test%E2%9D%8C.txt"},
{disposition: ContentDispositionInline, filename: "test ❌.txt", header: "inline; filename=\"test _.txt\"; filename*=utf-8''test%20%E2%9D%8C.txt"},
{disposition: ContentDispositionInline, filename: "\"test.txt", header: "inline; filename=\"\\\"test.txt\""},
{disposition: ContentDispositionInline, filename: "hello\tworld.txt", header: "inline; filename=\"hello\tworld.txt\""},
{disposition: ContentDispositionAttachment, filename: "hello\tworld.txt", header: "attachment; filename=\"hello\tworld.txt\""},
{disposition: ContentDispositionAttachment, filename: "hello\nworld.txt", header: "attachment; filename=hello_world.txt; filename*=utf-8''hello%0Aworld.txt"},
{disposition: ContentDispositionAttachment, filename: "hello\rworld.txt", header: "attachment; filename=hello_world.txt; filename*=utf-8''hello%0Dworld.txt"},
}
// Check the needsEncodingRune replacer ranges except tab that is checked above
// Any change in behavior should fail here
for c := ' '; !needsEncodingRune(c); c++ {
var header string
switch {
case strings.ContainsAny(string(c), ` (),/:;<=>?@[]`):
header = "inline; filename=\"hello" + string(c) + "world.txt\""
case strings.ContainsAny(string(c), `"\`):
// This document advises against for backslash in quoted form:
// https://datatracker.ietf.org/doc/html/rfc6266#appendix-D
// However the mime package is not generating the filename* in this scenario
header = "inline; filename=\"hello\\" + string(c) + "world.txt\""
default:
header = "inline; filename=hello" + string(c) + "world.txt"
}
table = append(table, testEntry{
disposition: ContentDispositionInline,
filename: "hello" + string(c) + "world.txt",
header: header,
})
}
for _, entry := range table {
t.Run(string(entry.disposition)+"_"+entry.filename, func(t *testing.T) {
encoded := encodeContentDisposition(entry.disposition, entry.filename)
assert.Equal(t, entry.header, encoded)
disposition, params, err := mime.ParseMediaType(encoded)
require.NoError(t, err)
assert.Equal(t, string(entry.disposition), disposition)
assert.Equal(t, entry.filename, params["filename"])
})
}
}

View File

@@ -8,10 +8,9 @@ import (
"errors"
"fmt"
"io"
"io/fs"
"net/http"
"net/url"
"path"
"path/filepath"
"strconv"
"strings"
"time"
@@ -27,18 +26,19 @@ import (
)
type ServeHeaderOptions struct {
ContentType string // defaults to "application/octet-stream"
ContentTypeCharset string
ContentLength *int64
Disposition string // defaults to "attachment"
ContentType string // defaults to "application/octet-stream"
ContentLength *int64
Filename string
CacheIsPublic bool
CacheDuration time.Duration // defaults to 5 minutes
LastModified time.Time
ContentDisposition ContentDispositionType
CacheIsPublic bool
CacheDuration time.Duration // defaults to 5 minutes
LastModified time.Time
}
// ServeSetHeaders sets necessary content serve headers
func ServeSetHeaders(w http.ResponseWriter, opts *ServeHeaderOptions) {
func ServeSetHeaders(w http.ResponseWriter, opts ServeHeaderOptions) {
header := w.Header()
skipCompressionExts := container.SetOf(".gz", ".bz2", ".zip", ".xz", ".zst", ".deb", ".apk", ".jar", ".png", ".jpg", ".webp")
@@ -46,14 +46,7 @@ func ServeSetHeaders(w http.ResponseWriter, opts *ServeHeaderOptions) {
w.Header().Add(gzhttp.HeaderNoCompression, "1")
}
contentType := typesniffer.MimeTypeApplicationOctetStream
if opts.ContentType != "" {
if opts.ContentTypeCharset != "" {
contentType = opts.ContentType + "; charset=" + strings.ToLower(opts.ContentTypeCharset)
} else {
contentType = opts.ContentType
}
}
contentType := util.IfZero(opts.ContentType, typesniffer.MimeTypeApplicationOctetStream)
header.Set("Content-Type", contentType)
header.Set("X-Content-Type-Options", "nosniff")
@@ -61,14 +54,18 @@ func ServeSetHeaders(w http.ResponseWriter, opts *ServeHeaderOptions) {
header.Set("Content-Length", strconv.FormatInt(*opts.ContentLength, 10))
}
if opts.Filename != "" {
disposition := opts.Disposition
if disposition == "" {
disposition = "attachment"
}
// Disable script execution of HTML/SVG files, since we serve the file from the same origin as Gitea server
header.Set("Content-Security-Policy", "default-src 'none'; style-src 'unsafe-inline'; sandbox")
if strings.Contains(contentType, "application/pdf") {
// no sandbox attribute for PDF as it breaks rendering in at least safari. this
// should generally be safe as scripts inside PDF can not escape the PDF document
// see https://bugs.chromium.org/p/chromium/issues/detail?id=413851 for more discussion
// HINT: PDF-RENDER-SANDBOX: PDF won't render in sandboxed context
header.Set("Content-Security-Policy", "default-src 'none'; style-src 'unsafe-inline'")
}
backslashEscapedName := strings.ReplaceAll(strings.ReplaceAll(opts.Filename, `\`, `\\`), `"`, `\"`) // \ -> \\, " -> \"
header.Set("Content-Disposition", fmt.Sprintf(`%s; filename="%s"; filename*=UTF-8''%s`, disposition, backslashEscapedName, url.PathEscape(opts.Filename)))
if opts.Filename != "" && opts.ContentDisposition != "" {
header.Set("Content-Disposition", encodeContentDisposition(opts.ContentDisposition, path.Base(opts.Filename)))
header.Set("Access-Control-Expose-Headers", "Content-Disposition")
}
@@ -84,49 +81,40 @@ func ServeSetHeaders(w http.ResponseWriter, opts *ServeHeaderOptions) {
}
}
// ServeData download file from io.Reader
func setServeHeadersByFile(r *http.Request, w http.ResponseWriter, mineBuf []byte, opts *ServeHeaderOptions) {
// do not set "Content-Length", because the length could only be set by callers, and it needs to support range requests
sniffedType := typesniffer.DetectContentType(mineBuf)
// the "render" parameter came from year 2016: 638dd24c, it doesn't have clear meaning, so I think it could be removed later
isPlain := sniffedType.IsText() || r.FormValue("render") != ""
func serveSetHeadersByUserContent(w http.ResponseWriter, contentPrefetchBuf []byte, opts ServeHeaderOptions) {
var detectCharset bool
if setting.MimeTypeMap.Enabled {
fileExtension := strings.ToLower(filepath.Ext(opts.Filename))
fileExtension := strings.ToLower(path.Ext(opts.Filename))
opts.ContentType = setting.MimeTypeMap.Map[fileExtension]
detectCharset = !strings.Contains(opts.ContentType, "charset=")
}
if opts.ContentType == "" {
sniffedType := typesniffer.DetectContentType(contentPrefetchBuf)
if sniffedType.IsBrowsableBinaryType() {
opts.ContentType = sniffedType.GetMimeType()
} else if isPlain {
} else if sniffedType.IsText() {
// intentionally do not render user's HTML content as a page, for safety, and avoid content spamming & abusing
opts.ContentType = "text/plain"
detectCharset = true
} else {
opts.ContentType = typesniffer.MimeTypeApplicationOctetStream
}
}
if isPlain {
charset, _ := charsetModule.DetectEncoding(mineBuf)
opts.ContentTypeCharset = strings.ToLower(charset)
if detectCharset {
if charset, _ := charsetModule.DetectEncoding(contentPrefetchBuf); charset != "" {
opts.ContentType += "; charset=" + strings.ToLower(charset)
}
}
// serve types that can present a security risk with CSP
w.Header().Set("Content-Security-Policy", "default-src 'none'; style-src 'unsafe-inline'; sandbox")
if sniffedType.IsPDF() {
// no sandbox attribute for PDF as it breaks rendering in at least safari. this
// should generally be safe as scripts inside PDF can not escape the PDF document
// see https://bugs.chromium.org/p/chromium/issues/detail?id=413851 for more discussion
// HINT: PDF-RENDER-SANDBOX: PDF won't render in sandboxed context
w.Header().Set("Content-Security-Policy", "default-src 'none'; style-src 'unsafe-inline'")
}
// TODO: UNIFY-CONTENT-DISPOSITION-FROM-STORAGE
opts.Disposition = "inline"
if sniffedType.IsSvgImage() && !setting.UI.SVG.Enabled {
opts.Disposition = "attachment"
if opts.ContentDisposition == "" {
sniffedType := typesniffer.FromContentType(opts.ContentType)
opts.ContentDisposition = ContentDispositionInline
if sniffedType.IsSvgImage() && !setting.UI.SVG.Enabled {
opts.ContentDisposition = ContentDispositionAttachment
}
}
ServeSetHeaders(w, opts)
@@ -134,7 +122,10 @@ func setServeHeadersByFile(r *http.Request, w http.ResponseWriter, mineBuf []byt
const mimeDetectionBufferLen = 1024
func ServeContentByReader(r *http.Request, w http.ResponseWriter, size int64, reader io.Reader, opts *ServeHeaderOptions) {
func ServeUserContentByReader(r *http.Request, w http.ResponseWriter, size int64, reader io.Reader, opts ServeHeaderOptions) {
if opts.ContentLength != nil {
panic("do not set ContentLength, use size argument instead")
}
buf := make([]byte, mimeDetectionBufferLen)
n, err := util.ReadAtMost(reader, buf)
if err != nil {
@@ -144,7 +135,7 @@ func ServeContentByReader(r *http.Request, w http.ResponseWriter, size int64, re
if n >= 0 {
buf = buf[:n]
}
setServeHeadersByFile(r, w, buf, opts)
serveSetHeadersByUserContent(w, buf, opts)
// reset the reader to the beginning
reader = io.MultiReader(bytes.NewReader(buf), reader)
@@ -198,32 +189,29 @@ func ServeContentByReader(r *http.Request, w http.ResponseWriter, size int64, re
partialLength := end - start + 1
w.Header().Set("Content-Range", fmt.Sprintf("bytes %d-%d/%d", start, end, size))
w.Header().Set("Content-Length", strconv.FormatInt(partialLength, 10))
if _, err = io.CopyN(io.Discard, reader, start); err != nil {
http.Error(w, "serve content: unable to skip", http.StatusInternalServerError)
return
if seeker, ok := reader.(io.Seeker); ok {
if _, err = seeker.Seek(start, io.SeekStart); err != nil {
http.Error(w, "serve content: unable to seek", http.StatusInternalServerError)
return
}
} else {
if _, err = io.CopyN(io.Discard, reader, start); err != nil {
http.Error(w, "serve content: unable to skip", http.StatusInternalServerError)
return
}
}
w.WriteHeader(http.StatusPartialContent)
_, _ = io.CopyN(w, reader, partialLength) // just like http.ServeContent, not necessary to handle the error
}
func ServeContentByReadSeeker(r *http.Request, w http.ResponseWriter, modTime *time.Time, reader io.ReadSeeker, opts *ServeHeaderOptions) {
buf := make([]byte, mimeDetectionBufferLen)
n, err := util.ReadAtMost(reader, buf)
func ServeUserContentByFile(r *http.Request, w http.ResponseWriter, file fs.File, opts ServeHeaderOptions) {
info, err := file.Stat()
if err != nil {
http.Error(w, "serve content: unable to read", http.StatusInternalServerError)
http.Error(w, "unable to serve file, stat error", http.StatusInternalServerError)
return
}
if _, err = reader.Seek(0, io.SeekStart); err != nil {
http.Error(w, "serve content: unable to seek", http.StatusInternalServerError)
return
}
if n >= 0 {
buf = buf[:n]
}
setServeHeadersByFile(r, w, buf, opts)
if modTime == nil {
modTime = &time.Time{}
}
http.ServeContent(w, r, opts.Filename, *modTime, reader)
opts.LastModified = info.ModTime()
ServeUserContentByReader(r, w, info.Size(), file, opts)
}

View File

@@ -16,7 +16,7 @@ import (
"github.com/stretchr/testify/require"
)
func TestServeContentByReader(t *testing.T) {
func TestServeUserContentByReader(t *testing.T) {
data := "0123456789abcdef"
test := func(t *testing.T, expectedStatusCode int, expectedContent string) {
@@ -27,7 +27,7 @@ func TestServeContentByReader(t *testing.T) {
}
reader := strings.NewReader(data)
w := httptest.NewRecorder()
ServeContentByReader(r, w, int64(len(data)), reader, &ServeHeaderOptions{})
ServeUserContentByReader(r, w, int64(len(data)), reader, ServeHeaderOptions{})
assert.Equal(t, expectedStatusCode, w.Code)
if expectedStatusCode == http.StatusPartialContent || expectedStatusCode == http.StatusOK {
assert.Equal(t, strconv.Itoa(len(expectedContent)), w.Header().Get("Content-Length"))
@@ -58,7 +58,7 @@ func TestServeContentByReader(t *testing.T) {
})
}
func TestServeContentByReadSeeker(t *testing.T) {
func TestServeUserContentByFile(t *testing.T) {
data := "0123456789abcdef"
tmpFile := t.TempDir() + "/test"
err := os.WriteFile(tmpFile, []byte(data), 0o644)
@@ -76,7 +76,7 @@ func TestServeContentByReadSeeker(t *testing.T) {
defer seekReader.Close()
w := httptest.NewRecorder()
ServeContentByReadSeeker(r, w, nil, seekReader, &ServeHeaderOptions{})
ServeUserContentByFile(r, w, seekReader, ServeHeaderOptions{})
assert.Equal(t, expectedStatusCode, w.Code)
if expectedStatusCode == http.StatusPartialContent || expectedStatusCode == http.StatusOK {
assert.Equal(t, strconv.Itoa(len(expectedContent)), w.Header().Get("Content-Length"))

View File

@@ -104,7 +104,7 @@ func (s *ContentStore) Verify(pointer Pointer) (bool, error) {
}
// ReadMetaObject will read a git_model.LFSMetaObject and return a reader
func ReadMetaObject(pointer Pointer) (io.ReadSeekCloser, error) {
func ReadMetaObject(pointer Pointer) (storage.Object, error) {
contentStore := NewContentStore()
return contentStore.Get(pointer)
}

View File

@@ -23,11 +23,7 @@ import (
"github.com/minio/minio-go/v7/pkg/credentials"
)
var (
_ ObjectStorage = &MinioStorage{}
quoteEscaper = strings.NewReplacer("\\", "\\\\", `"`, "\\\"")
)
var _ ObjectStorage = &MinioStorage{}
type minioObject struct {
*minio.Object

View File

@@ -12,6 +12,7 @@ import (
"os"
"path"
"code.gitea.io/gitea/modules/httplib"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/public"
"code.gitea.io/gitea/modules/setting"
@@ -62,31 +63,30 @@ type Object interface {
type ServeDirectOptions struct {
// Overrides the automatically detected MIME type.
ContentType string
// Overrides the default Content-Disposition header, which is `inline; filename="name"`.
ContentDisposition string
}
// Safe defaults are applied only when not explicitly overridden by the caller.
func prepareServeDirectOptions(optsOptional *ServeDirectOptions, name string) (ret ServeDirectOptions) {
func prepareServeDirectOptions(optsOptional *ServeDirectOptions, name string) (ret struct {
ContentType string
ContentDisposition string
},
) {
// Here we might not know the real filename, and it's quite inefficient to detect the MIME type by pre-fetching the object head.
// So we just do a quick detection by extension name, at least it works for the "View Raw File" for an LFS file on the Web UI.
// TODO: OBJECT-STORAGE-CONTENT-TYPE: need a complete solution and refactor for Azure in the future
if optsOptional != nil {
ret = *optsOptional
ret.ContentType = optsOptional.ContentType
}
// TODO: UNIFY-CONTENT-DISPOSITION-FROM-STORAGE
name = path.Base(name)
if ret.ContentType == "" {
ext := path.Ext(name)
ret.ContentType = public.DetectWellKnownMimeType(ext)
}
if ret.ContentDisposition == "" {
// When using ServeDirect, the URL is from the object storage's web server,
// it is not the same origin as Gitea server, so it should be safe enough to use "inline" to render the content directly.
// If a browser doesn't support the content type to be displayed inline, browser will download with the filename.
ret.ContentDisposition = fmt.Sprintf(`inline; filename="%s"`, quoteEscaper.Replace(name))
}
// When using ServeDirect, the URL is from the object storage's web server,
// it is not the same origin as Gitea server, so it should be safe enough to use "inline" to render the content directly.
// If a browser doesn't support the content type to be displayed inline, browser will download with the filename.
ret.ContentDisposition = httplib.EncodeContentDispositionInline(name)
return ret
}

View File

@@ -53,7 +53,12 @@ func testStorageIterator(t *testing.T, typStr Type, cfg *setting.Storage) {
}
}
func testSingleBlobStorageURLContentTypeAndDisposition(t *testing.T, s ObjectStorage, path, name string, expected ServeDirectOptions, reqParams *ServeDirectOptions) {
type expectedServeDirectHeaders struct {
ContentType string
ContentDisposition string
}
func testSingleBlobStorageURLContentTypeAndDisposition(t *testing.T, s ObjectStorage, path, name string, expected expectedServeDirectHeaders, reqParams *ServeDirectOptions) {
u, err := s.ServeDirectURL(path, name, http.MethodGet, reqParams)
require.NoError(t, err)
resp, err := http.Get(u.String())
@@ -71,36 +76,29 @@ func testBlobStorageURLContentTypeAndDisposition(t *testing.T, typStr Type, cfg
s, err := NewStorage(typStr, cfg)
assert.NoError(t, err)
data := "Q2xTckt6Y1hDOWh0" // arbitrary test content; specific value is irrelevant to this test
testfilename := "test.txt" // arbitrary file name; specific value is irrelevant to this test
_, err = s.Save(testfilename, strings.NewReader(data), int64(len(data)))
testFilename := "test.txt"
_, err = s.Save(testFilename, strings.NewReader("dummy-content"), -1)
assert.NoError(t, err)
testSingleBlobStorageURLContentTypeAndDisposition(t, s, testfilename, "test.txt", ServeDirectOptions{
testSingleBlobStorageURLContentTypeAndDisposition(t, s, testFilename, "test.txt", expectedServeDirectHeaders{
ContentType: "text/plain; charset=utf-8",
ContentDisposition: `inline; filename="test.txt"`,
ContentDisposition: `inline; filename=test.txt`,
}, nil)
testSingleBlobStorageURLContentTypeAndDisposition(t, s, testfilename, "test.pdf", ServeDirectOptions{
testSingleBlobStorageURLContentTypeAndDisposition(t, s, testFilename, "test.pdf", expectedServeDirectHeaders{
ContentType: "application/pdf",
ContentDisposition: `inline; filename="test.pdf"`,
ContentDisposition: `inline; filename=test.pdf`,
}, nil)
testSingleBlobStorageURLContentTypeAndDisposition(t, s, testfilename, "test.wasm", ServeDirectOptions{
ContentDisposition: `inline; filename="test.wasm"`,
testSingleBlobStorageURLContentTypeAndDisposition(t, s, testFilename, "test.wasm", expectedServeDirectHeaders{
ContentDisposition: `inline; filename=test.wasm`,
}, nil)
testSingleBlobStorageURLContentTypeAndDisposition(t, s, testfilename, "test.wasm", ServeDirectOptions{
ContentDisposition: `inline; filename="test.wasm"`,
}, &ServeDirectOptions{})
testSingleBlobStorageURLContentTypeAndDisposition(t, s, testfilename, "test.txt", ServeDirectOptions{
ContentType: "application/octet-stream",
ContentDisposition: `inline; filename="test.xml"`,
testSingleBlobStorageURLContentTypeAndDisposition(t, s, testFilename, "test.wasm", expectedServeDirectHeaders{
ContentType: "application/wasm",
ContentDisposition: `inline; filename=test.wasm`,
}, &ServeDirectOptions{
ContentType: "application/octet-stream",
ContentDisposition: `inline; filename="test.xml"`,
ContentType: "application/wasm",
})
assert.NoError(t, s.Delete(testfilename))
assert.NoError(t, s.Delete(testFilename))
}

View File

@@ -183,3 +183,7 @@ func DetectContentType(data []byte) SniffedType {
}
return SniffedType{ct}
}
func FromContentType(contentType string) SniffedType {
return SniffedType{contentType}
}

View File

@@ -3,8 +3,8 @@
// Code generated by protoc-gen-go. DO NOT EDIT.
// versions:
// protoc-gen-go v1.32.0
// protoc v4.25.2
// protoc-gen-go v1.36.11
// protoc v7.34.0
// source: artifact.proto
package actions
@@ -12,6 +12,7 @@ package actions
import (
reflect "reflect"
sync "sync"
unsafe "unsafe"
protoreflect "google.golang.org/protobuf/reflect/protoreflect"
protoimpl "google.golang.org/protobuf/runtime/protoimpl"
@@ -27,24 +28,22 @@ const (
)
type CreateArtifactRequest struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
WorkflowRunBackendId string `protobuf:"bytes,1,opt,name=workflow_run_backend_id,json=workflowRunBackendId,proto3" json:"workflow_run_backend_id,omitempty"`
WorkflowJobRunBackendId string `protobuf:"bytes,2,opt,name=workflow_job_run_backend_id,json=workflowJobRunBackendId,proto3" json:"workflow_job_run_backend_id,omitempty"`
Name string `protobuf:"bytes,3,opt,name=name,proto3" json:"name,omitempty"`
ExpiresAt *timestamppb.Timestamp `protobuf:"bytes,4,opt,name=expires_at,json=expiresAt,proto3" json:"expires_at,omitempty"`
Version int32 `protobuf:"varint,5,opt,name=version,proto3" json:"version,omitempty"`
state protoimpl.MessageState `protogen:"open.v1"`
WorkflowRunBackendId string `protobuf:"bytes,1,opt,name=workflow_run_backend_id,json=workflowRunBackendId,proto3" json:"workflow_run_backend_id,omitempty"`
WorkflowJobRunBackendId string `protobuf:"bytes,2,opt,name=workflow_job_run_backend_id,json=workflowJobRunBackendId,proto3" json:"workflow_job_run_backend_id,omitempty"`
Name string `protobuf:"bytes,3,opt,name=name,proto3" json:"name,omitempty"`
ExpiresAt *timestamppb.Timestamp `protobuf:"bytes,4,opt,name=expires_at,json=expiresAt,proto3" json:"expires_at,omitempty"`
Version int32 `protobuf:"varint,5,opt,name=version,proto3" json:"version,omitempty"`
MimeType *wrapperspb.StringValue `protobuf:"bytes,6,opt,name=mime_type,json=mimeType,proto3" json:"mime_type,omitempty"`
unknownFields protoimpl.UnknownFields
sizeCache protoimpl.SizeCache
}
func (x *CreateArtifactRequest) Reset() {
*x = CreateArtifactRequest{}
if protoimpl.UnsafeEnabled {
mi := &file_artifact_proto_msgTypes[0]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
mi := &file_artifact_proto_msgTypes[0]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
func (x *CreateArtifactRequest) String() string {
@@ -55,7 +54,7 @@ func (*CreateArtifactRequest) ProtoMessage() {}
func (x *CreateArtifactRequest) ProtoReflect() protoreflect.Message {
mi := &file_artifact_proto_msgTypes[0]
if protoimpl.UnsafeEnabled && x != nil {
if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -105,22 +104,26 @@ func (x *CreateArtifactRequest) GetVersion() int32 {
return 0
}
type CreateArtifactResponse struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
func (x *CreateArtifactRequest) GetMimeType() *wrapperspb.StringValue {
if x != nil {
return x.MimeType
}
return nil
}
Ok bool `protobuf:"varint,1,opt,name=ok,proto3" json:"ok,omitempty"`
SignedUploadUrl string `protobuf:"bytes,2,opt,name=signed_upload_url,json=signedUploadUrl,proto3" json:"signed_upload_url,omitempty"`
type CreateArtifactResponse struct {
state protoimpl.MessageState `protogen:"open.v1"`
Ok bool `protobuf:"varint,1,opt,name=ok,proto3" json:"ok,omitempty"`
SignedUploadUrl string `protobuf:"bytes,2,opt,name=signed_upload_url,json=signedUploadUrl,proto3" json:"signed_upload_url,omitempty"`
unknownFields protoimpl.UnknownFields
sizeCache protoimpl.SizeCache
}
func (x *CreateArtifactResponse) Reset() {
*x = CreateArtifactResponse{}
if protoimpl.UnsafeEnabled {
mi := &file_artifact_proto_msgTypes[1]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
mi := &file_artifact_proto_msgTypes[1]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
func (x *CreateArtifactResponse) String() string {
@@ -131,7 +134,7 @@ func (*CreateArtifactResponse) ProtoMessage() {}
func (x *CreateArtifactResponse) ProtoReflect() protoreflect.Message {
mi := &file_artifact_proto_msgTypes[1]
if protoimpl.UnsafeEnabled && x != nil {
if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -161,24 +164,21 @@ func (x *CreateArtifactResponse) GetSignedUploadUrl() string {
}
type FinalizeArtifactRequest struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
state protoimpl.MessageState `protogen:"open.v1"`
WorkflowRunBackendId string `protobuf:"bytes,1,opt,name=workflow_run_backend_id,json=workflowRunBackendId,proto3" json:"workflow_run_backend_id,omitempty"`
WorkflowJobRunBackendId string `protobuf:"bytes,2,opt,name=workflow_job_run_backend_id,json=workflowJobRunBackendId,proto3" json:"workflow_job_run_backend_id,omitempty"`
Name string `protobuf:"bytes,3,opt,name=name,proto3" json:"name,omitempty"`
Size int64 `protobuf:"varint,4,opt,name=size,proto3" json:"size,omitempty"`
Hash *wrapperspb.StringValue `protobuf:"bytes,5,opt,name=hash,proto3" json:"hash,omitempty"`
unknownFields protoimpl.UnknownFields
sizeCache protoimpl.SizeCache
}
func (x *FinalizeArtifactRequest) Reset() {
*x = FinalizeArtifactRequest{}
if protoimpl.UnsafeEnabled {
mi := &file_artifact_proto_msgTypes[2]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
mi := &file_artifact_proto_msgTypes[2]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
func (x *FinalizeArtifactRequest) String() string {
@@ -189,7 +189,7 @@ func (*FinalizeArtifactRequest) ProtoMessage() {}
func (x *FinalizeArtifactRequest) ProtoReflect() protoreflect.Message {
mi := &file_artifact_proto_msgTypes[2]
if protoimpl.UnsafeEnabled && x != nil {
if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -240,21 +240,18 @@ func (x *FinalizeArtifactRequest) GetHash() *wrapperspb.StringValue {
}
type FinalizeArtifactResponse struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
state protoimpl.MessageState `protogen:"open.v1"`
Ok bool `protobuf:"varint,1,opt,name=ok,proto3" json:"ok,omitempty"`
ArtifactId int64 `protobuf:"varint,2,opt,name=artifact_id,json=artifactId,proto3" json:"artifact_id,omitempty"`
unknownFields protoimpl.UnknownFields
Ok bool `protobuf:"varint,1,opt,name=ok,proto3" json:"ok,omitempty"`
ArtifactId int64 `protobuf:"varint,2,opt,name=artifact_id,json=artifactId,proto3" json:"artifact_id,omitempty"`
sizeCache protoimpl.SizeCache
}
func (x *FinalizeArtifactResponse) Reset() {
*x = FinalizeArtifactResponse{}
if protoimpl.UnsafeEnabled {
mi := &file_artifact_proto_msgTypes[3]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
mi := &file_artifact_proto_msgTypes[3]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
func (x *FinalizeArtifactResponse) String() string {
@@ -265,7 +262,7 @@ func (*FinalizeArtifactResponse) ProtoMessage() {}
func (x *FinalizeArtifactResponse) ProtoReflect() protoreflect.Message {
mi := &file_artifact_proto_msgTypes[3]
if protoimpl.UnsafeEnabled && x != nil {
if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -295,23 +292,20 @@ func (x *FinalizeArtifactResponse) GetArtifactId() int64 {
}
type ListArtifactsRequest struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
state protoimpl.MessageState `protogen:"open.v1"`
WorkflowRunBackendId string `protobuf:"bytes,1,opt,name=workflow_run_backend_id,json=workflowRunBackendId,proto3" json:"workflow_run_backend_id,omitempty"`
WorkflowJobRunBackendId string `protobuf:"bytes,2,opt,name=workflow_job_run_backend_id,json=workflowJobRunBackendId,proto3" json:"workflow_job_run_backend_id,omitempty"`
NameFilter *wrapperspb.StringValue `protobuf:"bytes,3,opt,name=name_filter,json=nameFilter,proto3" json:"name_filter,omitempty"`
IdFilter *wrapperspb.Int64Value `protobuf:"bytes,4,opt,name=id_filter,json=idFilter,proto3" json:"id_filter,omitempty"`
unknownFields protoimpl.UnknownFields
sizeCache protoimpl.SizeCache
}
func (x *ListArtifactsRequest) Reset() {
*x = ListArtifactsRequest{}
if protoimpl.UnsafeEnabled {
mi := &file_artifact_proto_msgTypes[4]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
mi := &file_artifact_proto_msgTypes[4]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
func (x *ListArtifactsRequest) String() string {
@@ -322,7 +316,7 @@ func (*ListArtifactsRequest) ProtoMessage() {}
func (x *ListArtifactsRequest) ProtoReflect() protoreflect.Message {
mi := &file_artifact_proto_msgTypes[4]
if protoimpl.UnsafeEnabled && x != nil {
if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -366,20 +360,17 @@ func (x *ListArtifactsRequest) GetIdFilter() *wrapperspb.Int64Value {
}
type ListArtifactsResponse struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
state protoimpl.MessageState `protogen:"open.v1"`
Artifacts []*ListArtifactsResponse_MonolithArtifact `protobuf:"bytes,1,rep,name=artifacts,proto3" json:"artifacts,omitempty"`
unknownFields protoimpl.UnknownFields
Artifacts []*ListArtifactsResponse_MonolithArtifact `protobuf:"bytes,1,rep,name=artifacts,proto3" json:"artifacts,omitempty"`
sizeCache protoimpl.SizeCache
}
func (x *ListArtifactsResponse) Reset() {
*x = ListArtifactsResponse{}
if protoimpl.UnsafeEnabled {
mi := &file_artifact_proto_msgTypes[5]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
mi := &file_artifact_proto_msgTypes[5]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
func (x *ListArtifactsResponse) String() string {
@@ -390,7 +381,7 @@ func (*ListArtifactsResponse) ProtoMessage() {}
func (x *ListArtifactsResponse) ProtoReflect() protoreflect.Message {
mi := &file_artifact_proto_msgTypes[5]
if protoimpl.UnsafeEnabled && x != nil {
if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -413,25 +404,22 @@ func (x *ListArtifactsResponse) GetArtifacts() []*ListArtifactsResponse_Monolith
}
type ListArtifactsResponse_MonolithArtifact struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
state protoimpl.MessageState `protogen:"open.v1"`
WorkflowRunBackendId string `protobuf:"bytes,1,opt,name=workflow_run_backend_id,json=workflowRunBackendId,proto3" json:"workflow_run_backend_id,omitempty"`
WorkflowJobRunBackendId string `protobuf:"bytes,2,opt,name=workflow_job_run_backend_id,json=workflowJobRunBackendId,proto3" json:"workflow_job_run_backend_id,omitempty"`
DatabaseId int64 `protobuf:"varint,3,opt,name=database_id,json=databaseId,proto3" json:"database_id,omitempty"`
Name string `protobuf:"bytes,4,opt,name=name,proto3" json:"name,omitempty"`
Size int64 `protobuf:"varint,5,opt,name=size,proto3" json:"size,omitempty"`
CreatedAt *timestamppb.Timestamp `protobuf:"bytes,6,opt,name=created_at,json=createdAt,proto3" json:"created_at,omitempty"`
unknownFields protoimpl.UnknownFields
sizeCache protoimpl.SizeCache
}
func (x *ListArtifactsResponse_MonolithArtifact) Reset() {
*x = ListArtifactsResponse_MonolithArtifact{}
if protoimpl.UnsafeEnabled {
mi := &file_artifact_proto_msgTypes[6]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
mi := &file_artifact_proto_msgTypes[6]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
func (x *ListArtifactsResponse_MonolithArtifact) String() string {
@@ -442,7 +430,7 @@ func (*ListArtifactsResponse_MonolithArtifact) ProtoMessage() {}
func (x *ListArtifactsResponse_MonolithArtifact) ProtoReflect() protoreflect.Message {
mi := &file_artifact_proto_msgTypes[6]
if protoimpl.UnsafeEnabled && x != nil {
if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -500,22 +488,19 @@ func (x *ListArtifactsResponse_MonolithArtifact) GetCreatedAt() *timestamppb.Tim
}
type GetSignedArtifactURLRequest struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
WorkflowRunBackendId string `protobuf:"bytes,1,opt,name=workflow_run_backend_id,json=workflowRunBackendId,proto3" json:"workflow_run_backend_id,omitempty"`
WorkflowJobRunBackendId string `protobuf:"bytes,2,opt,name=workflow_job_run_backend_id,json=workflowJobRunBackendId,proto3" json:"workflow_job_run_backend_id,omitempty"`
Name string `protobuf:"bytes,3,opt,name=name,proto3" json:"name,omitempty"`
state protoimpl.MessageState `protogen:"open.v1"`
WorkflowRunBackendId string `protobuf:"bytes,1,opt,name=workflow_run_backend_id,json=workflowRunBackendId,proto3" json:"workflow_run_backend_id,omitempty"`
WorkflowJobRunBackendId string `protobuf:"bytes,2,opt,name=workflow_job_run_backend_id,json=workflowJobRunBackendId,proto3" json:"workflow_job_run_backend_id,omitempty"`
Name string `protobuf:"bytes,3,opt,name=name,proto3" json:"name,omitempty"`
unknownFields protoimpl.UnknownFields
sizeCache protoimpl.SizeCache
}
func (x *GetSignedArtifactURLRequest) Reset() {
*x = GetSignedArtifactURLRequest{}
if protoimpl.UnsafeEnabled {
mi := &file_artifact_proto_msgTypes[7]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
mi := &file_artifact_proto_msgTypes[7]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
func (x *GetSignedArtifactURLRequest) String() string {
@@ -526,7 +511,7 @@ func (*GetSignedArtifactURLRequest) ProtoMessage() {}
func (x *GetSignedArtifactURLRequest) ProtoReflect() protoreflect.Message {
mi := &file_artifact_proto_msgTypes[7]
if protoimpl.UnsafeEnabled && x != nil {
if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -563,20 +548,17 @@ func (x *GetSignedArtifactURLRequest) GetName() string {
}
type GetSignedArtifactURLResponse struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
state protoimpl.MessageState `protogen:"open.v1"`
SignedUrl string `protobuf:"bytes,1,opt,name=signed_url,json=signedUrl,proto3" json:"signed_url,omitempty"`
unknownFields protoimpl.UnknownFields
SignedUrl string `protobuf:"bytes,1,opt,name=signed_url,json=signedUrl,proto3" json:"signed_url,omitempty"`
sizeCache protoimpl.SizeCache
}
func (x *GetSignedArtifactURLResponse) Reset() {
*x = GetSignedArtifactURLResponse{}
if protoimpl.UnsafeEnabled {
mi := &file_artifact_proto_msgTypes[8]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
mi := &file_artifact_proto_msgTypes[8]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
func (x *GetSignedArtifactURLResponse) String() string {
@@ -587,7 +569,7 @@ func (*GetSignedArtifactURLResponse) ProtoMessage() {}
func (x *GetSignedArtifactURLResponse) ProtoReflect() protoreflect.Message {
mi := &file_artifact_proto_msgTypes[8]
if protoimpl.UnsafeEnabled && x != nil {
if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -610,22 +592,19 @@ func (x *GetSignedArtifactURLResponse) GetSignedUrl() string {
}
type DeleteArtifactRequest struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
WorkflowRunBackendId string `protobuf:"bytes,1,opt,name=workflow_run_backend_id,json=workflowRunBackendId,proto3" json:"workflow_run_backend_id,omitempty"`
WorkflowJobRunBackendId string `protobuf:"bytes,2,opt,name=workflow_job_run_backend_id,json=workflowJobRunBackendId,proto3" json:"workflow_job_run_backend_id,omitempty"`
Name string `protobuf:"bytes,3,opt,name=name,proto3" json:"name,omitempty"`
state protoimpl.MessageState `protogen:"open.v1"`
WorkflowRunBackendId string `protobuf:"bytes,1,opt,name=workflow_run_backend_id,json=workflowRunBackendId,proto3" json:"workflow_run_backend_id,omitempty"`
WorkflowJobRunBackendId string `protobuf:"bytes,2,opt,name=workflow_job_run_backend_id,json=workflowJobRunBackendId,proto3" json:"workflow_job_run_backend_id,omitempty"`
Name string `protobuf:"bytes,3,opt,name=name,proto3" json:"name,omitempty"`
unknownFields protoimpl.UnknownFields
sizeCache protoimpl.SizeCache
}
func (x *DeleteArtifactRequest) Reset() {
*x = DeleteArtifactRequest{}
if protoimpl.UnsafeEnabled {
mi := &file_artifact_proto_msgTypes[9]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
mi := &file_artifact_proto_msgTypes[9]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
func (x *DeleteArtifactRequest) String() string {
@@ -636,7 +615,7 @@ func (*DeleteArtifactRequest) ProtoMessage() {}
func (x *DeleteArtifactRequest) ProtoReflect() protoreflect.Message {
mi := &file_artifact_proto_msgTypes[9]
if protoimpl.UnsafeEnabled && x != nil {
if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -673,21 +652,18 @@ func (x *DeleteArtifactRequest) GetName() string {
}
type DeleteArtifactResponse struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
state protoimpl.MessageState `protogen:"open.v1"`
Ok bool `protobuf:"varint,1,opt,name=ok,proto3" json:"ok,omitempty"`
ArtifactId int64 `protobuf:"varint,2,opt,name=artifact_id,json=artifactId,proto3" json:"artifact_id,omitempty"`
unknownFields protoimpl.UnknownFields
Ok bool `protobuf:"varint,1,opt,name=ok,proto3" json:"ok,omitempty"`
ArtifactId int64 `protobuf:"varint,2,opt,name=artifact_id,json=artifactId,proto3" json:"artifact_id,omitempty"`
sizeCache protoimpl.SizeCache
}
func (x *DeleteArtifactResponse) Reset() {
*x = DeleteArtifactResponse{}
if protoimpl.UnsafeEnabled {
mi := &file_artifact_proto_msgTypes[10]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
mi := &file_artifact_proto_msgTypes[10]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
func (x *DeleteArtifactResponse) String() string {
@@ -698,7 +674,7 @@ func (*DeleteArtifactResponse) ProtoMessage() {}
func (x *DeleteArtifactResponse) ProtoReflect() protoreflect.Message {
mi := &file_artifact_proto_msgTypes[10]
if protoimpl.UnsafeEnabled && x != nil {
if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -729,173 +705,105 @@ func (x *DeleteArtifactResponse) GetArtifactId() int64 {
var File_artifact_proto protoreflect.FileDescriptor
var file_artifact_proto_rawDesc = []byte{
0x0a, 0x0e, 0x61, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f,
0x12, 0x1d, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73,
0x2e, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x73, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x1a,
0x1f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66,
0x2f, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f,
0x1a, 0x1e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75,
0x66, 0x2f, 0x77, 0x72, 0x61, 0x70, 0x70, 0x65, 0x72, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f,
0x22, 0xf5, 0x01, 0x0a, 0x15, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x41, 0x72, 0x74, 0x69, 0x66,
0x61, 0x63, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x35, 0x0a, 0x17, 0x77, 0x6f,
0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x72, 0x75, 0x6e, 0x5f, 0x62, 0x61, 0x63, 0x6b, 0x65,
0x6e, 0x64, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x14, 0x77, 0x6f, 0x72,
0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x52, 0x75, 0x6e, 0x42, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x49,
0x64, 0x12, 0x3c, 0x0a, 0x1b, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x6a, 0x6f,
0x62, 0x5f, 0x72, 0x75, 0x6e, 0x5f, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x5f, 0x69, 0x64,
0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x17, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77,
0x4a, 0x6f, 0x62, 0x52, 0x75, 0x6e, 0x42, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x49, 0x64, 0x12,
0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e,
0x61, 0x6d, 0x65, 0x12, 0x39, 0x0a, 0x0a, 0x65, 0x78, 0x70, 0x69, 0x72, 0x65, 0x73, 0x5f, 0x61,
0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65,
0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74,
0x61, 0x6d, 0x70, 0x52, 0x09, 0x65, 0x78, 0x70, 0x69, 0x72, 0x65, 0x73, 0x41, 0x74, 0x12, 0x18,
0x0a, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x05, 0x20, 0x01, 0x28, 0x05, 0x52,
0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x22, 0x54, 0x0a, 0x16, 0x43, 0x72, 0x65, 0x61,
0x74, 0x65, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e,
0x73, 0x65, 0x12, 0x0e, 0x0a, 0x02, 0x6f, 0x6b, 0x18, 0x01, 0x20, 0x01, 0x28, 0x08, 0x52, 0x02,
0x6f, 0x6b, 0x12, 0x2a, 0x0a, 0x11, 0x73, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x5f, 0x75, 0x70, 0x6c,
0x6f, 0x61, 0x64, 0x5f, 0x75, 0x72, 0x6c, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0f, 0x73,
0x69, 0x67, 0x6e, 0x65, 0x64, 0x55, 0x70, 0x6c, 0x6f, 0x61, 0x64, 0x55, 0x72, 0x6c, 0x22, 0xe8,
0x01, 0x0a, 0x17, 0x46, 0x69, 0x6e, 0x61, 0x6c, 0x69, 0x7a, 0x65, 0x41, 0x72, 0x74, 0x69, 0x66,
0x61, 0x63, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x35, 0x0a, 0x17, 0x77, 0x6f,
0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x72, 0x75, 0x6e, 0x5f, 0x62, 0x61, 0x63, 0x6b, 0x65,
0x6e, 0x64, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x14, 0x77, 0x6f, 0x72,
0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x52, 0x75, 0x6e, 0x42, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x49,
0x64, 0x12, 0x3c, 0x0a, 0x1b, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x6a, 0x6f,
0x62, 0x5f, 0x72, 0x75, 0x6e, 0x5f, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x5f, 0x69, 0x64,
0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x17, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77,
0x4a, 0x6f, 0x62, 0x52, 0x75, 0x6e, 0x42, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x49, 0x64, 0x12,
0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e,
0x61, 0x6d, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x73, 0x69, 0x7a, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28,
0x03, 0x52, 0x04, 0x73, 0x69, 0x7a, 0x65, 0x12, 0x30, 0x0a, 0x04, 0x68, 0x61, 0x73, 0x68, 0x18,
0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70,
0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x56, 0x61,
0x6c, 0x75, 0x65, 0x52, 0x04, 0x68, 0x61, 0x73, 0x68, 0x22, 0x4b, 0x0a, 0x18, 0x46, 0x69, 0x6e,
0x61, 0x6c, 0x69, 0x7a, 0x65, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x52, 0x65, 0x73,
0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x0e, 0x0a, 0x02, 0x6f, 0x6b, 0x18, 0x01, 0x20, 0x01, 0x28,
0x08, 0x52, 0x02, 0x6f, 0x6b, 0x12, 0x1f, 0x0a, 0x0b, 0x61, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63,
0x74, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x03, 0x52, 0x0a, 0x61, 0x72, 0x74, 0x69,
0x66, 0x61, 0x63, 0x74, 0x49, 0x64, 0x22, 0x84, 0x02, 0x0a, 0x14, 0x4c, 0x69, 0x73, 0x74, 0x41,
0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12,
0x35, 0x0a, 0x17, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x72, 0x75, 0x6e, 0x5f,
0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09,
0x52, 0x14, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x52, 0x75, 0x6e, 0x42, 0x61, 0x63,
0x6b, 0x65, 0x6e, 0x64, 0x49, 0x64, 0x12, 0x3c, 0x0a, 0x1b, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c,
0x6f, 0x77, 0x5f, 0x6a, 0x6f, 0x62, 0x5f, 0x72, 0x75, 0x6e, 0x5f, 0x62, 0x61, 0x63, 0x6b, 0x65,
0x6e, 0x64, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x17, 0x77, 0x6f, 0x72,
0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x4a, 0x6f, 0x62, 0x52, 0x75, 0x6e, 0x42, 0x61, 0x63, 0x6b, 0x65,
0x6e, 0x64, 0x49, 0x64, 0x12, 0x3d, 0x0a, 0x0b, 0x6e, 0x61, 0x6d, 0x65, 0x5f, 0x66, 0x69, 0x6c,
0x74, 0x65, 0x72, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x67, 0x6f, 0x6f, 0x67,
0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x53, 0x74, 0x72, 0x69,
0x6e, 0x67, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x0a, 0x6e, 0x61, 0x6d, 0x65, 0x46, 0x69, 0x6c,
0x74, 0x65, 0x72, 0x12, 0x38, 0x0a, 0x09, 0x69, 0x64, 0x5f, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72,
0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e,
0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x49, 0x6e, 0x74, 0x36, 0x34, 0x56, 0x61,
0x6c, 0x75, 0x65, 0x52, 0x08, 0x69, 0x64, 0x46, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x22, 0x7c, 0x0a,
0x15, 0x4c, 0x69, 0x73, 0x74, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x73, 0x52, 0x65,
0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x63, 0x0a, 0x09, 0x61, 0x72, 0x74, 0x69, 0x66, 0x61,
0x63, 0x74, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x45, 0x2e, 0x67, 0x69, 0x74, 0x68,
0x75, 0x62, 0x2e, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x72, 0x65, 0x73, 0x75, 0x6c,
0x74, 0x73, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x41, 0x72,
0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x5f,
0x4d, 0x6f, 0x6e, 0x6f, 0x6c, 0x69, 0x74, 0x68, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74,
0x52, 0x09, 0x61, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x73, 0x22, 0xa1, 0x02, 0x0a, 0x26,
0x4c, 0x69, 0x73, 0x74, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x73, 0x52, 0x65, 0x73,
0x70, 0x6f, 0x6e, 0x73, 0x65, 0x5f, 0x4d, 0x6f, 0x6e, 0x6f, 0x6c, 0x69, 0x74, 0x68, 0x41, 0x72,
0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x12, 0x35, 0x0a, 0x17, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c,
0x6f, 0x77, 0x5f, 0x72, 0x75, 0x6e, 0x5f, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x5f, 0x69,
0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x14, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f,
0x77, 0x52, 0x75, 0x6e, 0x42, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x49, 0x64, 0x12, 0x3c, 0x0a,
0x1b, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x6a, 0x6f, 0x62, 0x5f, 0x72, 0x75,
0x6e, 0x5f, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01,
0x28, 0x09, 0x52, 0x17, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x4a, 0x6f, 0x62, 0x52,
0x75, 0x6e, 0x42, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x49, 0x64, 0x12, 0x1f, 0x0a, 0x0b, 0x64,
0x61, 0x74, 0x61, 0x62, 0x61, 0x73, 0x65, 0x5f, 0x69, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x03,
0x52, 0x0a, 0x64, 0x61, 0x74, 0x61, 0x62, 0x61, 0x73, 0x65, 0x49, 0x64, 0x12, 0x12, 0x0a, 0x04,
0x6e, 0x61, 0x6d, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65,
0x12, 0x12, 0x0a, 0x04, 0x73, 0x69, 0x7a, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x03, 0x52, 0x04,
0x73, 0x69, 0x7a, 0x65, 0x12, 0x39, 0x0a, 0x0a, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x5f,
0x61, 0x74, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c,
0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73,
0x74, 0x61, 0x6d, 0x70, 0x52, 0x09, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x41, 0x74, 0x22,
0xa6, 0x01, 0x0a, 0x1b, 0x47, 0x65, 0x74, 0x53, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x41, 0x72, 0x74,
0x69, 0x66, 0x61, 0x63, 0x74, 0x55, 0x52, 0x4c, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12,
0x35, 0x0a, 0x17, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x72, 0x75, 0x6e, 0x5f,
0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09,
0x52, 0x14, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x52, 0x75, 0x6e, 0x42, 0x61, 0x63,
0x6b, 0x65, 0x6e, 0x64, 0x49, 0x64, 0x12, 0x3c, 0x0a, 0x1b, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c,
0x6f, 0x77, 0x5f, 0x6a, 0x6f, 0x62, 0x5f, 0x72, 0x75, 0x6e, 0x5f, 0x62, 0x61, 0x63, 0x6b, 0x65,
0x6e, 0x64, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x17, 0x77, 0x6f, 0x72,
0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x4a, 0x6f, 0x62, 0x52, 0x75, 0x6e, 0x42, 0x61, 0x63, 0x6b, 0x65,
0x6e, 0x64, 0x49, 0x64, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x03, 0x20, 0x01,
0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x22, 0x3d, 0x0a, 0x1c, 0x47, 0x65, 0x74, 0x53,
0x69, 0x67, 0x6e, 0x65, 0x64, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x55, 0x52, 0x4c,
0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x1d, 0x0a, 0x0a, 0x73, 0x69, 0x67, 0x6e,
0x65, 0x64, 0x5f, 0x75, 0x72, 0x6c, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x73, 0x69,
0x67, 0x6e, 0x65, 0x64, 0x55, 0x72, 0x6c, 0x22, 0xa0, 0x01, 0x0a, 0x15, 0x44, 0x65, 0x6c, 0x65,
0x74, 0x65, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73,
0x74, 0x12, 0x35, 0x0a, 0x17, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x72, 0x75,
0x6e, 0x5f, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01,
0x28, 0x09, 0x52, 0x14, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x52, 0x75, 0x6e, 0x42,
0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x49, 0x64, 0x12, 0x3c, 0x0a, 0x1b, 0x77, 0x6f, 0x72, 0x6b,
0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x6a, 0x6f, 0x62, 0x5f, 0x72, 0x75, 0x6e, 0x5f, 0x62, 0x61, 0x63,
0x6b, 0x65, 0x6e, 0x64, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x17, 0x77,
0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x4a, 0x6f, 0x62, 0x52, 0x75, 0x6e, 0x42, 0x61, 0x63,
0x6b, 0x65, 0x6e, 0x64, 0x49, 0x64, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x03,
0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x22, 0x49, 0x0a, 0x16, 0x44, 0x65,
0x6c, 0x65, 0x74, 0x65, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x52, 0x65, 0x73, 0x70,
0x6f, 0x6e, 0x73, 0x65, 0x12, 0x0e, 0x0a, 0x02, 0x6f, 0x6b, 0x18, 0x01, 0x20, 0x01, 0x28, 0x08,
0x52, 0x02, 0x6f, 0x6b, 0x12, 0x1f, 0x0a, 0x0b, 0x61, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74,
0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x03, 0x52, 0x0a, 0x61, 0x72, 0x74, 0x69, 0x66,
0x61, 0x63, 0x74, 0x49, 0x64, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
}
const file_artifact_proto_rawDesc = "" +
"\n" +
"\x0eartifact.proto\x12\x1dgithub.actions.results.api.v1\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto\"\xb0\x02\n" +
"\x15CreateArtifactRequest\x125\n" +
"\x17workflow_run_backend_id\x18\x01 \x01(\tR\x14workflowRunBackendId\x12<\n" +
"\x1bworkflow_job_run_backend_id\x18\x02 \x01(\tR\x17workflowJobRunBackendId\x12\x12\n" +
"\x04name\x18\x03 \x01(\tR\x04name\x129\n" +
"\n" +
"expires_at\x18\x04 \x01(\v2\x1a.google.protobuf.TimestampR\texpiresAt\x12\x18\n" +
"\aversion\x18\x05 \x01(\x05R\aversion\x129\n" +
"\tmime_type\x18\x06 \x01(\v2\x1c.google.protobuf.StringValueR\bmimeType\"T\n" +
"\x16CreateArtifactResponse\x12\x0e\n" +
"\x02ok\x18\x01 \x01(\bR\x02ok\x12*\n" +
"\x11signed_upload_url\x18\x02 \x01(\tR\x0fsignedUploadUrl\"\xe8\x01\n" +
"\x17FinalizeArtifactRequest\x125\n" +
"\x17workflow_run_backend_id\x18\x01 \x01(\tR\x14workflowRunBackendId\x12<\n" +
"\x1bworkflow_job_run_backend_id\x18\x02 \x01(\tR\x17workflowJobRunBackendId\x12\x12\n" +
"\x04name\x18\x03 \x01(\tR\x04name\x12\x12\n" +
"\x04size\x18\x04 \x01(\x03R\x04size\x120\n" +
"\x04hash\x18\x05 \x01(\v2\x1c.google.protobuf.StringValueR\x04hash\"K\n" +
"\x18FinalizeArtifactResponse\x12\x0e\n" +
"\x02ok\x18\x01 \x01(\bR\x02ok\x12\x1f\n" +
"\vartifact_id\x18\x02 \x01(\x03R\n" +
"artifactId\"\x84\x02\n" +
"\x14ListArtifactsRequest\x125\n" +
"\x17workflow_run_backend_id\x18\x01 \x01(\tR\x14workflowRunBackendId\x12<\n" +
"\x1bworkflow_job_run_backend_id\x18\x02 \x01(\tR\x17workflowJobRunBackendId\x12=\n" +
"\vname_filter\x18\x03 \x01(\v2\x1c.google.protobuf.StringValueR\n" +
"nameFilter\x128\n" +
"\tid_filter\x18\x04 \x01(\v2\x1b.google.protobuf.Int64ValueR\bidFilter\"|\n" +
"\x15ListArtifactsResponse\x12c\n" +
"\tartifacts\x18\x01 \x03(\v2E.github.actions.results.api.v1.ListArtifactsResponse_MonolithArtifactR\tartifacts\"\xa1\x02\n" +
"&ListArtifactsResponse_MonolithArtifact\x125\n" +
"\x17workflow_run_backend_id\x18\x01 \x01(\tR\x14workflowRunBackendId\x12<\n" +
"\x1bworkflow_job_run_backend_id\x18\x02 \x01(\tR\x17workflowJobRunBackendId\x12\x1f\n" +
"\vdatabase_id\x18\x03 \x01(\x03R\n" +
"databaseId\x12\x12\n" +
"\x04name\x18\x04 \x01(\tR\x04name\x12\x12\n" +
"\x04size\x18\x05 \x01(\x03R\x04size\x129\n" +
"\n" +
"created_at\x18\x06 \x01(\v2\x1a.google.protobuf.TimestampR\tcreatedAt\"\xa6\x01\n" +
"\x1bGetSignedArtifactURLRequest\x125\n" +
"\x17workflow_run_backend_id\x18\x01 \x01(\tR\x14workflowRunBackendId\x12<\n" +
"\x1bworkflow_job_run_backend_id\x18\x02 \x01(\tR\x17workflowJobRunBackendId\x12\x12\n" +
"\x04name\x18\x03 \x01(\tR\x04name\"=\n" +
"\x1cGetSignedArtifactURLResponse\x12\x1d\n" +
"\n" +
"signed_url\x18\x01 \x01(\tR\tsignedUrl\"\xa0\x01\n" +
"\x15DeleteArtifactRequest\x125\n" +
"\x17workflow_run_backend_id\x18\x01 \x01(\tR\x14workflowRunBackendId\x12<\n" +
"\x1bworkflow_job_run_backend_id\x18\x02 \x01(\tR\x17workflowJobRunBackendId\x12\x12\n" +
"\x04name\x18\x03 \x01(\tR\x04name\"I\n" +
"\x16DeleteArtifactResponse\x12\x0e\n" +
"\x02ok\x18\x01 \x01(\bR\x02ok\x12\x1f\n" +
"\vartifact_id\x18\x02 \x01(\x03R\n" +
"artifactIdB)Z'code.gitea.io/gitea/routers/api/actionsb\x06proto3"
var (
file_artifact_proto_rawDescOnce sync.Once
file_artifact_proto_rawDescData = file_artifact_proto_rawDesc
file_artifact_proto_rawDescData []byte
)
func file_artifact_proto_rawDescGZIP() []byte {
file_artifact_proto_rawDescOnce.Do(func() {
file_artifact_proto_rawDescData = protoimpl.X.CompressGZIP(file_artifact_proto_rawDescData)
file_artifact_proto_rawDescData = protoimpl.X.CompressGZIP(unsafe.Slice(unsafe.StringData(file_artifact_proto_rawDesc), len(file_artifact_proto_rawDesc)))
})
return file_artifact_proto_rawDescData
}
var (
file_artifact_proto_msgTypes = make([]protoimpl.MessageInfo, 11)
file_artifact_proto_goTypes = []interface{}{
(*CreateArtifactRequest)(nil), // 0: github.actions.results.api.v1.CreateArtifactRequest
(*CreateArtifactResponse)(nil), // 1: github.actions.results.api.v1.CreateArtifactResponse
(*FinalizeArtifactRequest)(nil), // 2: github.actions.results.api.v1.FinalizeArtifactRequest
(*FinalizeArtifactResponse)(nil), // 3: github.actions.results.api.v1.FinalizeArtifactResponse
(*ListArtifactsRequest)(nil), // 4: github.actions.results.api.v1.ListArtifactsRequest
(*ListArtifactsResponse)(nil), // 5: github.actions.results.api.v1.ListArtifactsResponse
(*ListArtifactsResponse_MonolithArtifact)(nil), // 6: github.actions.results.api.v1.ListArtifactsResponse_MonolithArtifact
(*GetSignedArtifactURLRequest)(nil), // 7: github.actions.results.api.v1.GetSignedArtifactURLRequest
(*GetSignedArtifactURLResponse)(nil), // 8: github.actions.results.api.v1.GetSignedArtifactURLResponse
(*DeleteArtifactRequest)(nil), // 9: github.actions.results.api.v1.DeleteArtifactRequest
(*DeleteArtifactResponse)(nil), // 10: github.actions.results.api.v1.DeleteArtifactResponse
(*timestamppb.Timestamp)(nil), // 11: google.protobuf.Timestamp
(*wrapperspb.StringValue)(nil), // 12: google.protobuf.StringValue
(*wrapperspb.Int64Value)(nil), // 13: google.protobuf.Int64Value
}
)
var file_artifact_proto_msgTypes = make([]protoimpl.MessageInfo, 11)
var file_artifact_proto_goTypes = []any{
(*CreateArtifactRequest)(nil), // 0: github.actions.results.api.v1.CreateArtifactRequest
(*CreateArtifactResponse)(nil), // 1: github.actions.results.api.v1.CreateArtifactResponse
(*FinalizeArtifactRequest)(nil), // 2: github.actions.results.api.v1.FinalizeArtifactRequest
(*FinalizeArtifactResponse)(nil), // 3: github.actions.results.api.v1.FinalizeArtifactResponse
(*ListArtifactsRequest)(nil), // 4: github.actions.results.api.v1.ListArtifactsRequest
(*ListArtifactsResponse)(nil), // 5: github.actions.results.api.v1.ListArtifactsResponse
(*ListArtifactsResponse_MonolithArtifact)(nil), // 6: github.actions.results.api.v1.ListArtifactsResponse_MonolithArtifact
(*GetSignedArtifactURLRequest)(nil), // 7: github.actions.results.api.v1.GetSignedArtifactURLRequest
(*GetSignedArtifactURLResponse)(nil), // 8: github.actions.results.api.v1.GetSignedArtifactURLResponse
(*DeleteArtifactRequest)(nil), // 9: github.actions.results.api.v1.DeleteArtifactRequest
(*DeleteArtifactResponse)(nil), // 10: github.actions.results.api.v1.DeleteArtifactResponse
(*timestamppb.Timestamp)(nil), // 11: google.protobuf.Timestamp
(*wrapperspb.StringValue)(nil), // 12: google.protobuf.StringValue
(*wrapperspb.Int64Value)(nil), // 13: google.protobuf.Int64Value
}
var file_artifact_proto_depIdxs = []int32{
11, // 0: github.actions.results.api.v1.CreateArtifactRequest.expires_at:type_name -> google.protobuf.Timestamp
12, // 1: github.actions.results.api.v1.FinalizeArtifactRequest.hash:type_name -> google.protobuf.StringValue
12, // 2: github.actions.results.api.v1.ListArtifactsRequest.name_filter:type_name -> google.protobuf.StringValue
13, // 3: github.actions.results.api.v1.ListArtifactsRequest.id_filter:type_name -> google.protobuf.Int64Value
6, // 4: github.actions.results.api.v1.ListArtifactsResponse.artifacts:type_name -> github.actions.results.api.v1.ListArtifactsResponse_MonolithArtifact
11, // 5: github.actions.results.api.v1.ListArtifactsResponse_MonolithArtifact.created_at:type_name -> google.protobuf.Timestamp
6, // [6:6] is the sub-list for method output_type
6, // [6:6] is the sub-list for method input_type
6, // [6:6] is the sub-list for extension type_name
6, // [6:6] is the sub-list for extension extendee
0, // [0:6] is the sub-list for field type_name
12, // 1: github.actions.results.api.v1.CreateArtifactRequest.mime_type:type_name -> google.protobuf.StringValue
12, // 2: github.actions.results.api.v1.FinalizeArtifactRequest.hash:type_name -> google.protobuf.StringValue
12, // 3: github.actions.results.api.v1.ListArtifactsRequest.name_filter:type_name -> google.protobuf.StringValue
13, // 4: github.actions.results.api.v1.ListArtifactsRequest.id_filter:type_name -> google.protobuf.Int64Value
6, // 5: github.actions.results.api.v1.ListArtifactsResponse.artifacts:type_name -> github.actions.results.api.v1.ListArtifactsResponse_MonolithArtifact
11, // 6: github.actions.results.api.v1.ListArtifactsResponse_MonolithArtifact.created_at:type_name -> google.protobuf.Timestamp
7, // [7:7] is the sub-list for method output_type
7, // [7:7] is the sub-list for method input_type
7, // [7:7] is the sub-list for extension type_name
7, // [7:7] is the sub-list for extension extendee
0, // [0:7] is the sub-list for field type_name
}
func init() { file_artifact_proto_init() }
@@ -903,145 +811,11 @@ func file_artifact_proto_init() {
if File_artifact_proto != nil {
return
}
if !protoimpl.UnsafeEnabled {
file_artifact_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*CreateArtifactRequest); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_artifact_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*CreateArtifactResponse); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_artifact_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*FinalizeArtifactRequest); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_artifact_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*FinalizeArtifactResponse); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_artifact_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*ListArtifactsRequest); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_artifact_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*ListArtifactsResponse); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_artifact_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*ListArtifactsResponse_MonolithArtifact); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_artifact_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*GetSignedArtifactURLRequest); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_artifact_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*GetSignedArtifactURLResponse); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_artifact_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*DeleteArtifactRequest); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_artifact_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*DeleteArtifactResponse); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
}
type x struct{}
out := protoimpl.TypeBuilder{
File: protoimpl.DescBuilder{
GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
RawDescriptor: file_artifact_proto_rawDesc,
RawDescriptor: unsafe.Slice(unsafe.StringData(file_artifact_proto_rawDesc), len(file_artifact_proto_rawDesc)),
NumEnums: 0,
NumMessages: 11,
NumExtensions: 0,
@@ -1052,7 +826,6 @@ func file_artifact_proto_init() {
MessageInfos: file_artifact_proto_msgTypes,
}.Build()
File_artifact_proto = out.File
file_artifact_proto_rawDesc = nil
file_artifact_proto_goTypes = nil
file_artifact_proto_depIdxs = nil
}

View File

@@ -5,12 +5,15 @@ import "google/protobuf/wrappers.proto";
package github.actions.results.api.v1;
option go_package = "code.gitea.io/gitea/routers/api/actions";
message CreateArtifactRequest {
string workflow_run_backend_id = 1;
string workflow_job_run_backend_id = 2;
string name = 3;
google.protobuf.Timestamp expires_at = 4;
int32 version = 5;
google.protobuf.StringValue mime_type = 6;
}
message CreateArtifactResponse {

View File

@@ -282,7 +282,7 @@ func (ar artifactRoutes) uploadArtifact(ctx *ArtifactContext) {
artifact.FileCompressedSize != chunksTotalSize {
artifact.FileSize = fileRealTotalSize
artifact.FileCompressedSize = chunksTotalSize
artifact.ContentEncoding = ctx.Req.Header.Get("Content-Encoding")
artifact.ContentEncodingOrType = ctx.Req.Header.Get("Content-Encoding")
if err := actions.UpdateArtifactByID(ctx, artifact.ID, artifact); err != nil {
log.Error("Error update artifact: %v", err)
ctx.HTTPError(http.StatusInternalServerError, "Error update artifact")
@@ -492,7 +492,7 @@ func (ar artifactRoutes) downloadArtifact(ctx *ArtifactContext) {
defer fd.Close()
// if artifact is compressed, set content-encoding header to gzip
if artifact.ContentEncoding == "gzip" {
if artifact.ContentEncodingOrType == actions.ContentEncodingV3Gzip {
ctx.Resp.Header().Set("Content-Encoding", "gzip")
}
log.Debug("[artifact] downloadArtifact, name: %s, path: %s, storage: %s, size: %d", artifact.ArtifactName, artifact.ArtifactPath, artifact.StoragePath, artifact.FileSize)

View File

@@ -285,6 +285,17 @@ func mergeChunksForRun(ctx *ArtifactContext, st storage.ObjectStorage, runID int
return nil
}
func generateArtifactStoragePath(artifact *actions.ActionArtifact) string {
// if chunk is gzip, use gz as extension
// download-artifact action will use content-encoding header to decide if it should decompress the file
extension := "chunk"
if artifact.ContentEncodingOrType == actions.ContentEncodingV3Gzip {
extension = "chunk.gz"
}
return fmt.Sprintf("%d/%d/%d.%s", artifact.RunID%255, artifact.ID%255, time.Now().UnixNano(), extension)
}
func mergeChunksForArtifact(ctx *ArtifactContext, chunks []*chunkFileItem, st storage.ObjectStorage, artifact *actions.ActionArtifact, checksum string) error {
sort.Slice(chunks, func(i, j int) bool {
return chunks[i].Start < chunks[j].Start
@@ -335,15 +346,8 @@ func mergeChunksForArtifact(ctx *ArtifactContext, chunks []*chunkFileItem, st st
mergedReader = io.TeeReader(mergedReader, hashSha256)
}
// if chunk is gzip, use gz as extension
// download-artifact action will use content-encoding header to decide if it should decompress the file
extension := "chunk"
if artifact.ContentEncoding == "gzip" {
extension = "chunk.gz"
}
// save merged file
storagePath := fmt.Sprintf("%d/%d/%d.%s", artifact.RunID%255, artifact.ID%255, time.Now().UnixNano(), extension)
storagePath := generateArtifactStoragePath(artifact)
written, err := st.Save(storagePath, mergedReader, artifact.FileCompressedSize)
if err != nil {
return fmt.Errorf("save merged file error: %v", err)

View File

@@ -89,10 +89,12 @@ import (
"crypto/hmac"
"crypto/sha256"
"encoding/base64"
"encoding/hex"
"encoding/xml"
"errors"
"fmt"
"io"
"mime"
"net/http"
"net/url"
"path"
@@ -100,8 +102,9 @@ import (
"strings"
"time"
"code.gitea.io/gitea/models/actions"
actions_model "code.gitea.io/gitea/models/actions"
"code.gitea.io/gitea/models/db"
"code.gitea.io/gitea/modules/actions"
"code.gitea.io/gitea/modules/httplib"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/setting"
@@ -113,12 +116,10 @@ import (
"google.golang.org/protobuf/encoding/protojson"
"google.golang.org/protobuf/reflect/protoreflect"
"google.golang.org/protobuf/types/known/timestamppb"
"xorm.io/builder"
)
const (
ArtifactV4RouteBase = "/twirp/github.actions.results.api.v1.ArtifactService"
ArtifactV4ContentEncoding = "application/zip"
)
const ArtifactV4RouteBase = "/twirp/github.actions.results.api.v1.ArtifactService"
type artifactV4Routes struct {
prefix string
@@ -219,7 +220,7 @@ func parseChunkFileItemV4(st storage.ObjectStorage, artifactID int64, fpath stri
return &item, nil
}
func (r *artifactV4Routes) verifySignature(ctx *ArtifactContext, endp string) (*actions.ActionTask, string, bool) {
func (r *artifactV4Routes) verifySignature(ctx *ArtifactContext, endp string) (*actions_model.ActionTask, string, bool) {
rawTaskID := ctx.Req.URL.Query().Get("taskID")
rawArtifactID := ctx.Req.URL.Query().Get("artifactID")
sig := ctx.Req.URL.Query().Get("sig")
@@ -246,13 +247,13 @@ func (r *artifactV4Routes) verifySignature(ctx *ArtifactContext, endp string) (*
ctx.HTTPError(http.StatusUnauthorized, "Error link expired")
return nil, "", false
}
task, err := actions.GetTaskByID(ctx, taskID)
task, err := actions_model.GetTaskByID(ctx, taskID)
if err != nil {
log.Error("Error runner api getting task by ID: %v", err)
ctx.HTTPError(http.StatusInternalServerError, "Error runner api getting task by ID")
return nil, "", false
}
if task.Status != actions.StatusRunning {
if task.Status != actions_model.StatusRunning {
log.Error("Error runner api getting task: task is not running")
ctx.HTTPError(http.StatusInternalServerError, "Error runner api getting task: task is not running")
return nil, "", false
@@ -265,9 +266,9 @@ func (r *artifactV4Routes) verifySignature(ctx *ArtifactContext, endp string) (*
return task, artifactName, true
}
func (r *artifactV4Routes) getArtifactByName(ctx *ArtifactContext, runID int64, name string) (*actions.ActionArtifact, error) {
var art actions.ActionArtifact
has, err := db.GetEngine(ctx).Where("run_id = ? AND artifact_name = ? AND artifact_path = ? AND content_encoding = ?", runID, name, name+".zip", ArtifactV4ContentEncoding).Get(&art)
func (r *artifactV4Routes) getArtifactByName(ctx *ArtifactContext, runID int64, name string) (*actions_model.ActionArtifact, error) {
var art actions_model.ActionArtifact
has, err := db.GetEngine(ctx).Where(builder.Eq{"run_id": runID, "artifact_name": name}, builder.Like{"content_encoding", "%/%"}).Get(&art)
if err != nil {
return nil, err
} else if !has {
@@ -321,26 +322,59 @@ func (r *artifactV4Routes) createArtifact(ctx *ArtifactContext) {
if req.ExpiresAt != nil {
retentionDays = int64(time.Until(req.ExpiresAt.AsTime()).Hours() / 24)
}
encoding := req.GetMimeType().GetValue()
// Validate media type
if encoding != "" {
encoding, _, _ = mime.ParseMediaType(encoding)
}
fileName := artifactName
if !strings.Contains(encoding, "/") || strings.EqualFold(encoding, actions_model.ContentTypeZip) && !strings.HasSuffix(fileName, ".zip") {
encoding = actions_model.ContentTypeZip
fileName = artifactName + ".zip"
}
// create or get artifact with name and path
artifact, err := actions.CreateArtifact(ctx, ctx.ActionTask, artifactName, artifactName+".zip", retentionDays)
artifact, err := actions_model.CreateArtifact(ctx, ctx.ActionTask, artifactName, fileName, retentionDays)
if err != nil {
log.Error("Error create or get artifact: %v", err)
ctx.HTTPError(http.StatusInternalServerError, "Error create or get artifact")
return
}
artifact.ContentEncoding = ArtifactV4ContentEncoding
artifact.ContentEncodingOrType = encoding
artifact.FileSize = 0
artifact.FileCompressedSize = 0
if err := actions.UpdateArtifactByID(ctx, artifact.ID, artifact); err != nil {
var respData CreateArtifactResponse
if setting.Actions.ArtifactStorage.ServeDirect() && setting.Actions.ArtifactStorage.Type == setting.AzureBlobStorageType {
storagePath := generateArtifactStoragePath(artifact)
if artifact.StoragePath != "" {
_ = storage.ActionsArtifacts.Delete(artifact.StoragePath)
}
artifact.StoragePath = storagePath
artifact.Status = actions_model.ArtifactStatusUploadPending
u, err := storage.ActionsArtifacts.ServeDirectURL(artifact.StoragePath, artifact.ArtifactPath, http.MethodPut, nil)
if err != nil {
log.Error("Error ServeDirectURL: %v", err)
ctx.HTTPError(http.StatusInternalServerError, "Error ServeDirectURL")
return
}
respData = CreateArtifactResponse{
Ok: true,
SignedUploadUrl: u.String(),
}
} else {
respData = CreateArtifactResponse{
Ok: true,
SignedUploadUrl: r.buildArtifactURL(ctx, "UploadArtifact", artifactName, ctx.ActionTask.ID, artifact.ID),
}
}
if err := actions_model.UpdateArtifactByID(ctx, artifact.ID, artifact); err != nil {
log.Error("Error UpdateArtifactByID: %v", err)
ctx.HTTPError(http.StatusInternalServerError, "Error UpdateArtifactByID")
return
}
respData := CreateArtifactResponse{
Ok: true,
SignedUploadUrl: r.buildArtifactURL(ctx, "UploadArtifact", artifactName, ctx.ActionTask.ID, artifact.ID),
}
r.sendProtobufBody(ctx, &respData)
}
@@ -370,7 +404,7 @@ func (r *artifactV4Routes) uploadArtifact(ctx *ArtifactContext) {
}
artifact.FileCompressedSize += uploadedLength
artifact.FileSize += uploadedLength
if err := actions.UpdateArtifactByID(ctx, artifact.ID, artifact); err != nil {
if err := actions_model.UpdateArtifactByID(ctx, artifact.ID, artifact); err != nil {
log.Error("Error UpdateArtifactByID: %v", err)
ctx.HTTPError(http.StatusInternalServerError, "Error UpdateArtifactByID")
return
@@ -448,9 +482,27 @@ func (r *artifactV4Routes) finalizeArtifact(ctx *ArtifactContext) {
return
}
var chunks []*chunkFileItem
if setting.Actions.ArtifactStorage.ServeDirect() && setting.Actions.ArtifactStorage.Type == setting.AzureBlobStorageType {
r.finalizeAzureServeDirect(ctx, &req, artifact)
} else {
r.finalizeDefaultArtifact(ctx, &req, artifact, runID)
}
// Return on finalize error
if ctx.Written() {
return
}
respData := FinalizeArtifactResponse{
Ok: true,
ArtifactId: artifact.ID,
}
r.sendProtobufBody(ctx, &respData)
}
func (r *artifactV4Routes) finalizeDefaultArtifact(ctx *ArtifactContext, req *FinalizeArtifactRequest, artifact *actions_model.ActionArtifact, runID int64) {
blockList, blockListErr := r.readBlockList(runID, artifact.ID)
chunks, err = listOrderedChunksForArtifact(r.fs, runID, artifact.ID, blockList)
chunks, err := listOrderedChunksForArtifact(r.fs, runID, artifact.ID, blockList)
if err != nil {
log.Error("Error list chunks: %v", errors.Join(blockListErr, err))
ctx.HTTPError(http.StatusInternalServerError, "Error list chunks")
@@ -465,21 +517,63 @@ func (r *artifactV4Routes) finalizeArtifact(ctx *ArtifactContext) {
return
}
checksum := ""
if req.Hash != nil {
checksum = req.Hash.Value
}
if err := mergeChunksForArtifact(ctx, chunks, r.fs, artifact, checksum); err != nil {
if err := mergeChunksForArtifact(ctx, chunks, r.fs, artifact, req.GetHash().GetValue()); err != nil {
log.Error("Error merge chunks: %v", err)
ctx.HTTPError(http.StatusInternalServerError, "Error merge chunks")
return
}
}
respData := FinalizeArtifactResponse{
Ok: true,
ArtifactId: artifact.ID,
func (r *artifactV4Routes) finalizeAzureServeDirect(ctx *ArtifactContext, req *FinalizeArtifactRequest, artifact *actions_model.ActionArtifact) {
checksumValue, hasSha256Checksum := strings.CutPrefix(req.GetHash().GetValue(), "sha256:")
var actualLength int64
if hasSha256Checksum {
hashSha256 := sha256.New()
obj, err := storage.ActionsArtifacts.Open(artifact.StoragePath)
if err != nil {
log.Error("Error read block: %v", err)
ctx.HTTPError(http.StatusInternalServerError, "Error read block")
return
}
defer obj.Close()
actualLength, err = io.Copy(hashSha256, obj)
if err != nil {
log.Error("Error read block: %v", err)
ctx.HTTPError(http.StatusInternalServerError, "Error read block")
return
}
rawChecksum := hashSha256.Sum(nil)
actualChecksum := hex.EncodeToString(rawChecksum)
if checksumValue != actualChecksum {
log.Error("Error merge chunks: checksum mismatch")
ctx.HTTPError(http.StatusInternalServerError, "Error merge chunks: checksum mismatch")
return
}
} else {
fi, err := storage.ActionsArtifacts.Stat(artifact.StoragePath)
if err != nil {
log.Error("Error stat block: %v", err)
ctx.HTTPError(http.StatusInternalServerError, "Error stat block")
return
}
actualLength = fi.Size()
}
if req.Size != actualLength {
log.Error("Error merge chunks: length mismatch")
ctx.HTTPError(http.StatusInternalServerError, "Error merge chunks: length mismatch")
return
}
// Update artifact metadata and status now that the upload is confirmed.
artifact.FileSize = actualLength
artifact.FileCompressedSize = actualLength
artifact.Status = actions_model.ArtifactStatusUploadConfirmed
if err := actions_model.UpdateArtifactByID(ctx, artifact.ID, artifact); err != nil {
log.Error("Error UpdateArtifactByID: %v", err)
ctx.HTTPError(http.StatusInternalServerError, "Error UpdateArtifactByID")
return
}
r.sendProtobufBody(ctx, &respData)
}
func (r *artifactV4Routes) listArtifacts(ctx *ArtifactContext) {
@@ -493,9 +587,10 @@ func (r *artifactV4Routes) listArtifacts(ctx *ArtifactContext) {
return
}
artifacts, err := db.Find[actions.ActionArtifact](ctx, actions.FindArtifactsOptions{
RunID: runID,
Status: int(actions.ArtifactStatusUploadConfirmed),
artifacts, err := db.Find[actions_model.ActionArtifact](ctx, actions_model.FindArtifactsOptions{
RunID: runID,
Status: int(actions_model.ArtifactStatusUploadConfirmed),
FinalizedArtifactsV4: true,
})
if err != nil {
log.Error("Error getting artifacts: %v", err)
@@ -507,7 +602,7 @@ func (r *artifactV4Routes) listArtifacts(ctx *ArtifactContext) {
table := map[string]*ListArtifactsResponse_MonolithArtifact{}
for _, artifact := range artifacts {
if _, ok := table[artifact.ArtifactName]; ok || req.IdFilter != nil && artifact.ID != req.IdFilter.Value || req.NameFilter != nil && artifact.ArtifactName != req.NameFilter.Value || artifact.ArtifactName+".zip" != artifact.ArtifactPath || artifact.ContentEncoding != ArtifactV4ContentEncoding {
if _, ok := table[artifact.ArtifactName]; ok || req.IdFilter != nil && artifact.ID != req.IdFilter.Value || req.NameFilter != nil && artifact.ArtifactName != req.NameFilter.Value {
table[artifact.ArtifactName] = nil
continue
}
@@ -553,7 +648,7 @@ func (r *artifactV4Routes) getSignedArtifactURL(ctx *ArtifactContext) {
ctx.HTTPError(http.StatusNotFound, "Error artifact not found")
return
}
if artifact.Status != actions.ArtifactStatusUploadConfirmed {
if artifact.Status != actions_model.ArtifactStatusUploadConfirmed {
log.Error("Error artifact not found: %s", artifact.Status.ToString())
ctx.HTTPError(http.StatusNotFound, "Error artifact not found")
return
@@ -563,9 +658,9 @@ func (r *artifactV4Routes) getSignedArtifactURL(ctx *ArtifactContext) {
if setting.Actions.ArtifactStorage.ServeDirect() {
// DO NOT USE the http POST method coming from the getSignedArtifactURL endpoint
u, err := storage.ActionsArtifacts.ServeDirectURL(artifact.StoragePath, artifact.ArtifactPath, http.MethodGet, nil)
if u != nil && err == nil {
respData.SignedUrl = u.String()
u, err := actions.GetArtifactV4ServeDirectURL(artifact, http.MethodGet)
if err == nil {
respData.SignedUrl = u
}
}
if respData.SignedUrl == "" {
@@ -587,15 +682,17 @@ func (r *artifactV4Routes) downloadArtifact(ctx *ArtifactContext) {
ctx.HTTPError(http.StatusNotFound, "Error artifact not found")
return
}
if artifact.Status != actions.ArtifactStatusUploadConfirmed {
if artifact.Status != actions_model.ArtifactStatusUploadConfirmed {
log.Error("Error artifact not found: %s", artifact.Status.ToString())
ctx.HTTPError(http.StatusNotFound, "Error artifact not found")
return
}
file, _ := r.fs.Open(artifact.StoragePath)
_, _ = io.Copy(ctx.Resp, file)
err = actions.DownloadArtifactV4ReadStorage(ctx.Base, artifact)
if err != nil {
log.Error("Error serve artifact: %v", err)
ctx.HTTPError(http.StatusInternalServerError, "failed to download artifact")
}
}
func (r *artifactV4Routes) deleteArtifact(ctx *ArtifactContext) {
@@ -617,7 +714,7 @@ func (r *artifactV4Routes) deleteArtifact(ctx *ArtifactContext) {
return
}
err = actions.SetArtifactNeedDelete(ctx, runID, req.Name)
err = actions_model.SetArtifactNeedDelete(ctx, runID, req.Name)
if err != nil {
log.Error("Error deleting artifacts: %v", err)
ctx.HTTPError(http.StatusInternalServerError, err.Error())

View File

@@ -1784,7 +1784,7 @@ func buildDownloadRawEndpoint(repo *repo_model.Repository, artifactID int64) str
func buildSigURL(ctx go_context.Context, endPoint string, artifactID int64) string {
// endPoint is a path like "api/v1/repos/owner/repo/actions/artifacts/1/zip/raw"
expires := time.Now().Add(60 * time.Minute).Unix()
uploadURL := httplib.GuessCurrentAppURL(ctx) + endPoint + "?sig=" + base64.URLEncoding.EncodeToString(buildSignature(endPoint, expires, artifactID)) + "&expires=" + strconv.FormatInt(expires, 10)
uploadURL := httplib.GuessCurrentAppURL(ctx) + endPoint + "?sig=" + base64.RawURLEncoding.EncodeToString(buildSignature(endPoint, expires, artifactID)) + "&expires=" + strconv.FormatInt(expires, 10)
return uploadURL
}
@@ -1829,18 +1829,16 @@ func DownloadArtifact(ctx *context.APIContext) {
ctx.APIError(http.StatusNotFound, "Artifact has expired")
return
}
ctx.Resp.Header().Set("Content-Disposition", fmt.Sprintf("attachment; filename=%s.zip; filename*=UTF-8''%s.zip", url.PathEscape(art.ArtifactName), art.ArtifactName))
if actions.IsArtifactV4(art) {
ok, err := actions.DownloadArtifactV4ServeDirectOnly(ctx.Base, art)
if ok {
return
}
if err != nil {
ctx.APIErrorInternal(err)
// @actions/toolkit asserts that downloaded artifacts of a different runid return 302
// https://github.com/actions/toolkit/blob/44d43b5490b02998bd09b0c4ff369a4cc67876c2/packages/artifact/src/internal/download/download-artifact.ts#L203-L210
if actions.DownloadArtifactV4ServeDirect(ctx.Base, art) {
return
}
// @actions/toolkit asserts a 302 for the artifact download, so we have to build a signed URL and redirect to it
// TODO: a perma link to the code for reference
redirectURL := buildSigURL(ctx, buildDownloadRawEndpoint(ctx.Repo.Repository, art.ID), art.ID)
ctx.Redirect(redirectURL, http.StatusFound)
return
@@ -1868,7 +1866,7 @@ func DownloadArtifactRaw(ctx *context.APIContext) {
sigStr := ctx.Req.URL.Query().Get("sig")
expiresStr := ctx.Req.URL.Query().Get("expires")
sigBytes, _ := base64.URLEncoding.DecodeString(sigStr)
sigBytes, _ := base64.RawURLEncoding.DecodeString(sigStr)
expires, _ := strconv.ParseInt(expiresStr, 10, 64)
expectedSig := buildSignature(buildDownloadRawEndpoint(repo, art.ID), expires, art.ID)
@@ -1887,8 +1885,6 @@ func DownloadArtifactRaw(ctx *context.APIContext) {
ctx.APIError(http.StatusNotFound, "Artifact has expired")
return
}
ctx.Resp.Header().Set("Content-Disposition", fmt.Sprintf("attachment; filename=%s.zip; filename*=UTF-8''%s.zip", url.PathEscape(art.ArtifactName), art.ArtifactName))
if actions.IsArtifactV4(art) {
err := actions.DownloadArtifactV4(ctx.Base, art)
if err != nil {

View File

@@ -17,9 +17,9 @@ import (
git_model "code.gitea.io/gitea/models/git"
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/httpcache"
"code.gitea.io/gitea/modules/httplib"
"code.gitea.io/gitea/modules/json"
"code.gitea.io/gitea/modules/lfs"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/storage"
api "code.gitea.io/gitea/modules/structs"
@@ -151,35 +151,18 @@ func GetRawFileOrLFS(ctx *context.APIContext) {
// OK, now the blob is known to have at most 1024 (lfs pointer max size) bytes,
// we can simply read this in one go (This saves reading it twice)
dataRc, err := blob.DataAsync()
lfsPointerBuf, err := blob.GetBlobBytes(lfs.MetaFileMaxSize)
if err != nil {
ctx.APIErrorInternal(err)
return
}
buf, err := io.ReadAll(dataRc)
if err != nil {
_ = dataRc.Close()
ctx.APIErrorInternal(err)
return
}
if err := dataRc.Close(); err != nil {
log.Error("Error whilst closing blob %s reader in %-v. Error: %v", blob.ID, ctx.Repo.Repository, err)
}
// Check if the blob represents a pointer
pointer, _ := lfs.ReadPointer(bytes.NewReader(buf))
pointer, _ := lfs.ReadPointerFromBuffer(lfsPointerBuf)
// if it's not a pointer, just serve the data directly
if !pointer.IsValid() {
// First handle caching for the blob
if httpcache.HandleGenericETagPrivateCache(ctx.Req, ctx.Resp, `"`+blob.ID.String()+`"`, lastModified) {
return
}
// If not cached - serve!
common.ServeContentByReader(ctx.Base, ctx.Repo.TreePath, blob.Size(), bytes.NewReader(buf))
_, _ = ctx.Resp.Write(lfsPointerBuf)
return
}
@@ -188,12 +171,7 @@ func GetRawFileOrLFS(ctx *context.APIContext) {
// If there isn't one, just serve the data directly
if errors.Is(err, git_model.ErrLFSObjectNotExist) {
// Handle caching for the blob SHA (not the LFS object OID)
if httpcache.HandleGenericETagPrivateCache(ctx.Req, ctx.Resp, `"`+blob.ID.String()+`"`, lastModified) {
return
}
common.ServeContentByReader(ctx.Base, ctx.Repo.TreePath, blob.Size(), bytes.NewReader(buf))
_, _ = ctx.Resp.Write(lfsPointerBuf)
return
} else if err != nil {
ctx.APIErrorInternal(err)
@@ -214,14 +192,13 @@ func GetRawFileOrLFS(ctx *context.APIContext) {
}
}
lfsDataRc, err := lfs.ReadMetaObject(meta.Pointer)
lfsDataFile, err := lfs.ReadMetaObject(meta.Pointer)
if err != nil {
ctx.APIErrorInternal(err)
return
}
defer lfsDataRc.Close()
common.ServeContentByReadSeeker(ctx.Base, ctx.Repo.TreePath, lastModified, lfsDataRc)
defer lfsDataFile.Close()
httplib.ServeUserContentByFile(ctx.Base.Req, ctx.Base.Resp, lfsDataFile, httplib.ServeHeaderOptions{Filename: ctx.Repo.TreePath})
}
func getBlobForEntry(ctx *context.APIContext) (blob *git.Blob, entry *git.TreeEntry, lastModified *time.Time) {

View File

@@ -10,6 +10,7 @@ import (
actions_model "code.gitea.io/gitea/models/actions"
repo_model "code.gitea.io/gitea/models/repo"
"code.gitea.io/gitea/modules/actions"
"code.gitea.io/gitea/modules/httplib"
"code.gitea.io/gitea/modules/util"
"code.gitea.io/gitea/services/context"
)
@@ -60,9 +61,8 @@ func DownloadActionsRunJobLogs(ctx *context.Base, ctxRepo *repo_model.Repository
ctx.ServeContent(reader, &context.ServeHeaderOptions{
Filename: fmt.Sprintf("%v-%v-%v.log", workflowName, curJob.Name, task.ID),
ContentLength: &task.LogSize,
ContentType: "text/plain",
ContentTypeCharset: "utf-8",
Disposition: "attachment",
ContentType: "text/plain; charset=utf-8",
ContentDisposition: httplib.ContentDispositionAttachment,
})
return nil
}

View File

@@ -4,7 +4,6 @@
package common
import (
"io"
"path"
"time"
@@ -12,7 +11,6 @@ import (
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/httpcache"
"code.gitea.io/gitea/modules/httplib"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/structs"
"code.gitea.io/gitea/services/context"
@@ -24,29 +22,24 @@ func ServeBlob(ctx *context.Base, repo *repo_model.Repository, filePath string,
return nil
}
if err := repo.LoadOwner(ctx); err != nil {
return err
}
dataRc, err := blob.DataAsync()
if err != nil {
return err
}
defer func() {
if err = dataRc.Close(); err != nil {
log.Error("ServeBlob: Close: %v", err)
}
}()
defer dataRc.Close()
_ = repo.LoadOwner(ctx)
httplib.ServeContentByReader(ctx.Req, ctx.Resp, blob.Size(), dataRc, &httplib.ServeHeaderOptions{
if lastModified == nil {
lastModified = new(time.Time)
}
httplib.ServeUserContentByReader(ctx.Req, ctx.Resp, blob.Size(), dataRc, httplib.ServeHeaderOptions{
Filename: path.Base(filePath),
CacheIsPublic: !repo.IsPrivate && repo.Owner != nil && repo.Owner.Visibility == structs.VisibleTypePublic,
CacheIsPublic: !repo.IsPrivate && repo.Owner.Visibility == structs.VisibleTypePublic,
CacheDuration: setting.StaticCacheTime,
LastModified: *lastModified,
})
return nil
}
func ServeContentByReader(ctx *context.Base, filePath string, size int64, reader io.Reader) {
httplib.ServeContentByReader(ctx.Req, ctx.Resp, size, reader, &httplib.ServeHeaderOptions{Filename: path.Base(filePath)})
}
func ServeContentByReadSeeker(ctx *context.Base, filePath string, modTime *time.Time, reader io.ReadSeeker) {
httplib.ServeContentByReadSeeker(ctx.Req, ctx.Resp, modTime, reader, &httplib.ServeHeaderOptions{Filename: path.Base(filePath)})
}

View File

@@ -18,10 +18,10 @@ import (
func MonitorDiagnosis(ctx *context.Context) {
seconds := min(max(ctx.FormInt64("seconds"), 1), 300)
httplib.ServeSetHeaders(ctx.Resp, &httplib.ServeHeaderOptions{
ContentType: "application/zip",
Disposition: "attachment",
Filename: fmt.Sprintf("gitea-diagnosis-%s.zip", time.Now().Format("20060102-150405")),
httplib.ServeSetHeaders(ctx.Resp, httplib.ServeHeaderOptions{
ContentType: "application/zip",
Filename: fmt.Sprintf("gitea-diagnosis-%s.zip", time.Now().Format("20060102-150405")),
ContentDisposition: httplib.ContentDispositionAttachment,
})
zipWriter := zip.NewWriter(ctx.Resp)

View File

@@ -24,6 +24,7 @@ import (
"code.gitea.io/gitea/modules/actions"
"code.gitea.io/gitea/modules/base"
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/httplib"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/storage"
"code.gitea.io/gitea/modules/templates"
@@ -716,8 +717,9 @@ func ArtifactsDownloadView(ctx *context_module.Context) {
}
}
ctx.Resp.Header().Set("Content-Disposition", fmt.Sprintf("attachment; filename=%s.zip; filename*=UTF-8''%s.zip", url.PathEscape(artifactName), artifactName))
// A v4 Artifact may only contain a single file
// Multiple files are uploaded as a single file archive
// All other cases fall back to the legacy v1v3 zip handling below
if len(artifacts) == 1 && actions.IsArtifactV4(artifacts[0]) {
err := actions.DownloadArtifactV4(ctx.Base, artifacts[0])
if err != nil {
@@ -729,34 +731,41 @@ func ArtifactsDownloadView(ctx *context_module.Context) {
// Artifacts using the v1-v3 backend are stored as multiple individual files per artifact on the backend
// Those need to be zipped for download
writer := zip.NewWriter(ctx.Resp)
defer writer.Close()
for _, art := range artifacts {
ctx.Resp.Header().Set("Content-Disposition", httplib.EncodeContentDispositionAttachment(artifactName+".zip"))
zipWriter := zip.NewWriter(ctx.Resp)
defer zipWriter.Close()
writeArtifactToZip := func(art *actions_model.ActionArtifact) error {
f, err := storage.ActionsArtifacts.Open(art.StoragePath)
if err != nil {
ctx.ServerError("ActionsArtifacts.Open", err)
return
return fmt.Errorf("ActionsArtifacts.Open: %w", err)
}
defer f.Close()
var r io.ReadCloser
if art.ContentEncoding == "gzip" {
var r io.ReadCloser = f
if art.ContentEncodingOrType == actions_model.ContentEncodingV3Gzip {
r, err = gzip.NewReader(f)
if err != nil {
ctx.ServerError("gzip.NewReader", err)
return
return fmt.Errorf("gzip.NewReader: %w", err)
}
} else {
r = f
}
defer r.Close()
w, err := writer.Create(art.ArtifactPath)
w, err := zipWriter.Create(art.ArtifactPath)
if err != nil {
ctx.ServerError("writer.Create", err)
return
return fmt.Errorf("zipWriter.Create: %w", err)
}
if _, err := io.Copy(w, r); err != nil {
ctx.ServerError("io.Copy", err)
_, err = io.Copy(w, r)
if err != nil {
return fmt.Errorf("io.Copy: %w", err)
}
return nil
}
for _, art := range artifacts {
err := writeArtifactToZip(art)
if err != nil {
ctx.ServerError("writeArtifactToZip", err)
return
}
}

View File

@@ -11,10 +11,10 @@ import (
repo_model "code.gitea.io/gitea/models/repo"
"code.gitea.io/gitea/models/unit"
"code.gitea.io/gitea/modules/httpcache"
"code.gitea.io/gitea/modules/httplib"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/storage"
"code.gitea.io/gitea/routers/common"
"code.gitea.io/gitea/services/attachment"
"code.gitea.io/gitea/services/context"
"code.gitea.io/gitea/services/context/upload"
@@ -199,7 +199,7 @@ func ServeAttachment(ctx *context.Context, uuid string) {
}
defer fr.Close()
common.ServeContentByReadSeeker(ctx.Base, attach.Name, new(attach.CreatedUnix.AsTime()), fr)
httplib.ServeUserContentByFile(ctx.Req, ctx.Resp, fr, httplib.ServeHeaderOptions{Filename: attach.Name})
}
// GetAttachment serve attachments

View File

@@ -10,8 +10,8 @@ import (
git_model "code.gitea.io/gitea/models/git"
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/httpcache"
"code.gitea.io/gitea/modules/httplib"
"code.gitea.io/gitea/modules/lfs"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/storage"
"code.gitea.io/gitea/routers/common"
@@ -24,28 +24,15 @@ func ServeBlobOrLFS(ctx *context.Context, blob *git.Blob, lastModified *time.Tim
return nil
}
dataRc, err := blob.DataAsync()
lfsPointerBuf, err := blob.GetBlobBytes(lfs.MetaFileMaxSize)
if err != nil {
return err
}
closed := false
defer func() {
if closed {
return
}
if err = dataRc.Close(); err != nil {
log.Error("ServeBlobOrLFS: Close: %v", err)
}
}()
pointer, _ := lfs.ReadPointer(dataRc)
pointer, _ := lfs.ReadPointerFromBuffer(lfsPointerBuf)
if pointer.IsValid() {
meta, _ := git_model.GetLFSMetaObjectByOid(ctx, ctx.Repo.Repository.ID, pointer.Oid)
if meta == nil {
if err = dataRc.Close(); err != nil {
log.Error("ServeBlobOrLFS: Close: %v", err)
}
closed = true
return common.ServeBlob(ctx.Base, ctx.Repo.Repository, ctx.Repo.TreePath, blob, lastModified)
}
if httpcache.HandleGenericETagPrivateCache(ctx.Req, ctx.Resp, `"`+pointer.Oid+`"`, meta.UpdatedUnix.AsTimePtr()) {
@@ -61,22 +48,14 @@ func ServeBlobOrLFS(ctx *context.Context, blob *git.Blob, lastModified *time.Tim
}
}
lfsDataRc, err := lfs.ReadMetaObject(meta.Pointer)
lfsDataFile, err := lfs.ReadMetaObject(meta.Pointer)
if err != nil {
return err
}
defer func() {
if err = lfsDataRc.Close(); err != nil {
log.Error("ServeBlobOrLFS: Close: %v", err)
}
}()
common.ServeContentByReadSeeker(ctx.Base, ctx.Repo.TreePath, lastModified, lfsDataRc)
defer lfsDataFile.Close()
httplib.ServeUserContentByFile(ctx.Req, ctx.Resp, lfsDataFile, httplib.ServeHeaderOptions{Filename: ctx.Repo.TreePath})
return nil
}
if err = dataRc.Close(); err != nil {
log.Error("ServeBlobOrLFS: Close: %v", err)
}
closed = true
return common.ServeBlob(ctx.Base, ctx.Repo.Repository, ctx.Repo.TreePath, blob, lastModified)
}

View File

@@ -173,12 +173,12 @@ func (b *Base) Redirect(location string, status ...int) {
type ServeHeaderOptions httplib.ServeHeaderOptions
func (b *Base) SetServeHeaders(opt *ServeHeaderOptions) {
httplib.ServeSetHeaders(b.Resp, (*httplib.ServeHeaderOptions)(opt))
httplib.ServeSetHeaders(b.Resp, *(*httplib.ServeHeaderOptions)(opt))
}
// ServeContent serves content to http request
func (b *Base) ServeContent(r io.ReadSeeker, opts *ServeHeaderOptions) {
httplib.ServeSetHeaders(b.Resp, (*httplib.ServeHeaderOptions)(opts))
httplib.ServeSetHeaders(b.Resp, *(*httplib.ServeHeaderOptions)(opts))
http.ServeContent(b.Resp, b.Req, opts.Filename, opts.LastModified, r)
}

View File

@@ -172,7 +172,7 @@ func DownloadHandler(ctx *context.Context) {
if len(filename) > 0 {
decodedFilename, err := base64.RawURLEncoding.DecodeString(filename)
if err == nil {
ctx.Resp.Header().Set("Content-Disposition", "attachment; filename=\""+string(decodedFilename)+"\"")
ctx.Resp.Header().Set("Content-Disposition", httplib.EncodeContentDispositionAttachment(string(decodedFilename)))
ctx.Resp.Header().Set("Access-Control-Expose-Headers", "Content-Disposition")
}
}

View File

@@ -328,7 +328,7 @@ func ServeRepoArchive(ctx *gitea_context.Base, archiveReq *ArchiveRequest) error
if setting.Repository.StreamArchives || len(archiveReq.Paths) > 0 {
// the header must be set before starting streaming even an error would occur,
// because errors may happen in git command and such cases aren't in our control.
httplib.ServeSetHeaders(ctx.Resp, &httplib.ServeHeaderOptions{Filename: downloadName})
httplib.ServeSetHeaders(ctx.Resp, httplib.ServeHeaderOptions{Filename: downloadName})
if err := archiveReq.Stream(ctx, ctx.Resp); err != nil && !ctx.Written() {
if gitcmd.StderrHasPrefix(err, "fatal: pathspec") {
return util.NewInvalidArgumentErrorf("path doesn't exist or is invalid")

View File

@@ -11,23 +11,28 @@ import (
"encoding/xml"
"fmt"
"io"
"mime"
"net/http"
"strings"
"testing"
"time"
actions_model "code.gitea.io/gitea/models/actions"
auth_model "code.gitea.io/gitea/models/auth"
repo_model "code.gitea.io/gitea/models/repo"
"code.gitea.io/gitea/models/unittest"
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/json"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/storage"
api "code.gitea.io/gitea/modules/structs"
"code.gitea.io/gitea/modules/test"
"code.gitea.io/gitea/modules/util"
"code.gitea.io/gitea/routers/api/actions"
actions_service "code.gitea.io/gitea/services/actions"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"google.golang.org/protobuf/encoding/protojson"
"google.golang.org/protobuf/reflect/protoreflect"
"google.golang.org/protobuf/types/known/timestamppb"
@@ -48,15 +53,18 @@ func TestActionsArtifactV4UploadSingleFile(t *testing.T) {
assert.NoError(t, err)
table := []struct {
name string
version int32
blockID bool
noLength bool
append int
name string
version int32
contentType string
blockID bool
noLength bool
append int
path string
}{
{
name: "artifact",
version: 4,
path: "artifact.zip",
},
{
name: "artifact2",
@@ -98,6 +106,23 @@ func TestActionsArtifactV4UploadSingleFile(t *testing.T) {
append: 4,
blockID: true,
},
{
name: "artifact9.json",
version: 7,
contentType: "application/json",
},
{
name: "artifact10",
version: 7,
contentType: "application/zip",
path: "artifact10.zip",
},
{
name: "artifact11.zip",
version: 7,
contentType: "application/zip",
path: "artifact11.zip",
},
}
for _, entry := range table {
@@ -108,6 +133,7 @@ func TestActionsArtifactV4UploadSingleFile(t *testing.T) {
Name: entry.name,
WorkflowRunBackendId: "792",
WorkflowJobRunBackendId: "193",
MimeType: util.Iif(entry.contentType != "", wrapperspb.String(entry.contentType), nil),
})).AddTokenAuth(token)
resp := MakeRequest(t, req, http.StatusOK)
var uploadResp actions.CreateArtifactResponse
@@ -120,9 +146,8 @@ func TestActionsArtifactV4UploadSingleFile(t *testing.T) {
blocks := make([]string, 0, util.Iif(entry.blockID, entry.append+1, 0))
// get upload url
idx := strings.Index(uploadResp.SignedUploadUrl, "/twirp/")
for i := range entry.append + 1 {
url := uploadResp.SignedUploadUrl[idx:]
url := uploadResp.SignedUploadUrl
// See https://learn.microsoft.com/en-us/rest/api/storageservices/append-block
// See https://learn.microsoft.com/en-us/rest/api/storageservices/put-block
if entry.blockID {
@@ -146,7 +171,7 @@ func TestActionsArtifactV4UploadSingleFile(t *testing.T) {
if entry.blockID && entry.append > 0 {
// https://learn.microsoft.com/en-us/rest/api/storageservices/put-block-list
blockListURL := uploadResp.SignedUploadUrl[idx:] + "&comp=blocklist"
blockListURL := uploadResp.SignedUploadUrl + "&comp=blocklist"
// upload artifact blockList
blockList := &actions.BlockList{
Latest: blocks,
@@ -174,6 +199,19 @@ func TestActionsArtifactV4UploadSingleFile(t *testing.T) {
var finalizeResp actions.FinalizeArtifactResponse
protojson.Unmarshal(resp.Body.Bytes(), &finalizeResp)
assert.True(t, finalizeResp.Ok)
artifact := unittest.AssertExistsAndLoadBean(t, &actions_model.ActionArtifact{ID: finalizeResp.ArtifactId})
if entry.contentType != "" {
assert.Equal(t, entry.contentType, artifact.ContentEncodingOrType)
} else {
assert.Equal(t, "application/zip", artifact.ContentEncodingOrType)
}
if entry.path != "" {
assert.Equal(t, entry.path, artifact.ArtifactPath)
}
assert.Equal(t, actions_model.ArtifactStatusUploadConfirmed, artifact.Status)
assert.Equal(t, int64(entry.append+1)*1024, artifact.FileSize)
assert.Equal(t, int64(entry.append+1)*1024, artifact.FileCompressedSize)
})
}
}
@@ -198,8 +236,7 @@ func TestActionsArtifactV4UploadSingleFileWrongChecksum(t *testing.T) {
assert.Contains(t, uploadResp.SignedUploadUrl, "/twirp/github.actions.results.api.v1.ArtifactService/UploadArtifact")
// get upload url
idx := strings.Index(uploadResp.SignedUploadUrl, "/twirp/")
url := uploadResp.SignedUploadUrl[idx:] + "&comp=block"
url := uploadResp.SignedUploadUrl + "&comp=block"
// upload artifact chunk
body := strings.Repeat("B", 1024)
@@ -243,8 +280,7 @@ func TestActionsArtifactV4UploadSingleFileWithRetentionDays(t *testing.T) {
assert.Contains(t, uploadResp.SignedUploadUrl, "/twirp/github.actions.results.api.v1.ArtifactService/UploadArtifact")
// get upload url
idx := strings.Index(uploadResp.SignedUploadUrl, "/twirp/")
url := uploadResp.SignedUploadUrl[idx:] + "&comp=block"
url := uploadResp.SignedUploadUrl + "&comp=block"
// upload artifact chunk
body := strings.Repeat("A", 1024)
@@ -290,9 +326,8 @@ func TestActionsArtifactV4UploadSingleFileWithPotentialHarmfulBlockID(t *testing
assert.Contains(t, uploadResp.SignedUploadUrl, "/twirp/github.actions.results.api.v1.ArtifactService/UploadArtifact")
// get upload urls
idx := strings.Index(uploadResp.SignedUploadUrl, "/twirp/")
url := uploadResp.SignedUploadUrl[idx:] + "&comp=block&blockid=%2f..%2fmyfile"
blockListURL := uploadResp.SignedUploadUrl[idx:] + "&comp=blocklist"
url := uploadResp.SignedUploadUrl + "&comp=block&blockid=%2f..%2fmyfile"
blockListURL := uploadResp.SignedUploadUrl + "&comp=blocklist"
// upload artifact chunk
body := strings.Repeat("A", 1024)
@@ -339,63 +374,126 @@ func TestActionsArtifactV4UploadSingleFileWithChunksOutOfOrder(t *testing.T) {
token, err := actions_service.CreateAuthorizationToken(48, 792, 193)
assert.NoError(t, err)
// acquire artifact upload url
req := NewRequestWithBody(t, "POST", "/twirp/github.actions.results.api.v1.ArtifactService/CreateArtifact", toProtoJSON(&actions.CreateArtifactRequest{
Version: 4,
Name: "artifactWithChunksOutOfOrder",
WorkflowRunBackendId: "792",
WorkflowJobRunBackendId: "193",
})).AddTokenAuth(token)
resp := MakeRequest(t, req, http.StatusOK)
var uploadResp actions.CreateArtifactResponse
protojson.Unmarshal(resp.Body.Bytes(), &uploadResp)
assert.True(t, uploadResp.Ok)
assert.Contains(t, uploadResp.SignedUploadUrl, "/twirp/github.actions.results.api.v1.ArtifactService/UploadArtifact")
// get upload urls
idx := strings.Index(uploadResp.SignedUploadUrl, "/twirp/")
block1URL := uploadResp.SignedUploadUrl[idx:] + "&comp=block&blockid=block1"
block2URL := uploadResp.SignedUploadUrl[idx:] + "&comp=block&blockid=block2"
blockListURL := uploadResp.SignedUploadUrl[idx:] + "&comp=blocklist"
// upload artifact chunks
bodyb := strings.Repeat("B", 1024)
req = NewRequestWithBody(t, "PUT", block2URL, strings.NewReader(bodyb))
MakeRequest(t, req, http.StatusCreated)
bodya := strings.Repeat("A", 1024)
req = NewRequestWithBody(t, "PUT", block1URL, strings.NewReader(bodya))
MakeRequest(t, req, http.StatusCreated)
// upload artifact blockList
blockList := &actions.BlockList{
Latest: []string{
"block1",
"block2",
},
table := []struct {
name string
artifactName string
serveDirect bool
contentType string
}{
{name: "Upload-Zip", artifactName: "artifact-v4-upload", contentType: ""},
{name: "Upload-Pdf", artifactName: "report-upload.pdf", contentType: "application/pdf"},
{name: "Upload-Html", artifactName: "report-upload.html", contentType: "application/html"},
{name: "ServeDirect-Zip", artifactName: "artifact-v4-upload-serve-direct", contentType: "", serveDirect: true},
{name: "ServeDirect-Pdf", artifactName: "report-upload-serve-direct.pdf", contentType: "application/pdf", serveDirect: true},
{name: "ServeDirect-Html", artifactName: "report-upload-serve-direct.html", contentType: "application/html", serveDirect: true},
}
rawBlockList, err := xml.Marshal(blockList)
assert.NoError(t, err)
req = NewRequestWithBody(t, "PUT", blockListURL, bytes.NewReader(rawBlockList))
MakeRequest(t, req, http.StatusCreated)
t.Logf("Create artifact confirm")
for _, entry := range table {
t.Run(entry.name, func(t *testing.T) {
// Only AzureBlobStorageType supports ServeDirect Uploads
switch setting.Actions.ArtifactStorage.Type {
case setting.AzureBlobStorageType:
defer test.MockVariableValue(&setting.Actions.ArtifactStorage.AzureBlobConfig.ServeDirect, entry.serveDirect)()
default:
if entry.serveDirect {
t.Skip()
}
}
// acquire artifact upload url
req := NewRequestWithBody(t, "POST", "/twirp/github.actions.results.api.v1.ArtifactService/CreateArtifact", toProtoJSON(&actions.CreateArtifactRequest{
Version: util.Iif[int32](entry.contentType != "", 7, 4),
Name: entry.artifactName,
WorkflowRunBackendId: "792",
WorkflowJobRunBackendId: "193",
MimeType: util.Iif(entry.contentType != "", wrapperspb.String(entry.contentType), nil),
})).AddTokenAuth(token)
resp := MakeRequest(t, req, http.StatusOK)
var uploadResp actions.CreateArtifactResponse
protojson.Unmarshal(resp.Body.Bytes(), &uploadResp)
assert.True(t, uploadResp.Ok)
if !entry.serveDirect {
assert.Contains(t, uploadResp.SignedUploadUrl, "/twirp/github.actions.results.api.v1.ArtifactService/UploadArtifact")
}
sha := sha256.Sum256([]byte(bodya + bodyb))
// get upload urls
block1URL := uploadResp.SignedUploadUrl + "&comp=block&blockid=" + base64.RawURLEncoding.EncodeToString([]byte("block1"))
block2URL := uploadResp.SignedUploadUrl + "&comp=block&blockid=" + base64.RawURLEncoding.EncodeToString([]byte("block2"))
blockListURL := uploadResp.SignedUploadUrl + "&comp=blocklist"
// confirm artifact upload
req = NewRequestWithBody(t, "POST", "/twirp/github.actions.results.api.v1.ArtifactService/FinalizeArtifact", toProtoJSON(&actions.FinalizeArtifactRequest{
Name: "artifactWithChunksOutOfOrder",
Size: 2048,
Hash: wrapperspb.String("sha256:" + hex.EncodeToString(sha[:])),
WorkflowRunBackendId: "792",
WorkflowJobRunBackendId: "193",
})).
AddTokenAuth(token)
resp = MakeRequest(t, req, http.StatusOK)
var finalizeResp actions.FinalizeArtifactResponse
protojson.Unmarshal(resp.Body.Bytes(), &finalizeResp)
assert.True(t, finalizeResp.Ok)
// upload artifact chunks
bodyb := strings.Repeat("B", 1024)
req = NewRequestWithBody(t, "PUT", block2URL, strings.NewReader(bodyb))
if entry.serveDirect {
req.Request.RequestURI = ""
nresp, err := http.DefaultClient.Do(req.Request)
require.NoError(t, err)
nresp.Body.Close()
require.Equal(t, http.StatusCreated, nresp.StatusCode)
} else {
MakeRequest(t, req, http.StatusCreated)
}
bodya := strings.Repeat("A", 1024)
req = NewRequestWithBody(t, "PUT", block1URL, strings.NewReader(bodya))
if entry.serveDirect {
req.Request.RequestURI = ""
nresp, err := http.DefaultClient.Do(req.Request)
require.NoError(t, err)
nresp.Body.Close()
require.Equal(t, http.StatusCreated, nresp.StatusCode)
} else {
MakeRequest(t, req, http.StatusCreated)
}
// upload artifact blockList
blockList := &actions.BlockList{
Latest: []string{
base64.RawURLEncoding.EncodeToString([]byte("block1")),
base64.RawURLEncoding.EncodeToString([]byte("block2")),
},
}
rawBlockList, err := xml.Marshal(blockList)
assert.NoError(t, err)
req = NewRequestWithBody(t, "PUT", blockListURL, bytes.NewReader(rawBlockList))
if entry.serveDirect {
req.Request.RequestURI = ""
nresp, err := http.DefaultClient.Do(req.Request)
require.NoError(t, err)
nresp.Body.Close()
require.Equal(t, http.StatusCreated, nresp.StatusCode)
} else {
MakeRequest(t, req, http.StatusCreated)
}
t.Logf("Create artifact confirm")
sha := sha256.Sum256([]byte(bodya + bodyb))
// confirm artifact upload
req = NewRequestWithBody(t, "POST", "/twirp/github.actions.results.api.v1.ArtifactService/FinalizeArtifact", toProtoJSON(&actions.FinalizeArtifactRequest{
Name: entry.artifactName,
Size: 2048,
Hash: wrapperspb.String("sha256:" + hex.EncodeToString(sha[:])),
WorkflowRunBackendId: "792",
WorkflowJobRunBackendId: "193",
})).
AddTokenAuth(token)
resp = MakeRequest(t, req, http.StatusOK)
var finalizeResp actions.FinalizeArtifactResponse
protojson.Unmarshal(resp.Body.Bytes(), &finalizeResp)
assert.True(t, finalizeResp.Ok)
artifact := unittest.AssertExistsAndLoadBean(t, &actions_model.ActionArtifact{ID: finalizeResp.ArtifactId})
if entry.contentType != "" {
assert.Equal(t, entry.contentType, artifact.ContentEncodingOrType)
} else {
assert.Equal(t, "application/zip", artifact.ContentEncodingOrType)
}
assert.Equal(t, actions_model.ArtifactStatusUploadConfirmed, artifact.Status)
assert.Equal(t, int64(2048), artifact.FileSize)
assert.Equal(t, int64(2048), artifact.FileCompressedSize)
})
}
}
func TestActionsArtifactV4DownloadSingle(t *testing.T) {
@@ -404,33 +502,97 @@ func TestActionsArtifactV4DownloadSingle(t *testing.T) {
token, err := actions_service.CreateAuthorizationToken(48, 792, 193)
assert.NoError(t, err)
// list artifacts by name
req := NewRequestWithBody(t, "POST", "/twirp/github.actions.results.api.v1.ArtifactService/ListArtifacts", toProtoJSON(&actions.ListArtifactsRequest{
NameFilter: wrapperspb.String("artifact-v4-download"),
WorkflowRunBackendId: "792",
WorkflowJobRunBackendId: "193",
})).AddTokenAuth(token)
resp := MakeRequest(t, req, http.StatusOK)
var listResp actions.ListArtifactsResponse
protojson.Unmarshal(resp.Body.Bytes(), &listResp)
assert.Len(t, listResp.Artifacts, 1)
table := []struct {
Name string
ArtifactName string
FileName string
ServeDirect bool
ContentType string
ContentDisposition string
}{
{Name: "Download-Zip", ArtifactName: "artifact-v4-download", FileName: "artifact-v4-download.zip", ContentType: "application/zip"},
{Name: "Download-Pdf", ArtifactName: "report.pdf", FileName: "report.pdf", ContentType: "application/pdf"},
{Name: "Download-Html", ArtifactName: "report.html", FileName: "report.html", ContentType: "application/html"},
{Name: "ServeDirect-Zip", ArtifactName: "artifact-v4-download", FileName: "artifact-v4-download.zip", ContentType: "application/zip", ServeDirect: true},
{Name: "ServeDirect-Pdf", ArtifactName: "report.pdf", FileName: "report.pdf", ContentType: "application/pdf", ServeDirect: true},
{Name: "ServeDirect-Html", ArtifactName: "report.html", FileName: "report.html", ContentType: "application/html", ServeDirect: true},
}
// acquire artifact download url
req = NewRequestWithBody(t, "POST", "/twirp/github.actions.results.api.v1.ArtifactService/GetSignedArtifactURL", toProtoJSON(&actions.GetSignedArtifactURLRequest{
Name: "artifact-v4-download",
WorkflowRunBackendId: "792",
WorkflowJobRunBackendId: "193",
})).
AddTokenAuth(token)
resp = MakeRequest(t, req, http.StatusOK)
var finalizeResp actions.GetSignedArtifactURLResponse
protojson.Unmarshal(resp.Body.Bytes(), &finalizeResp)
assert.NotEmpty(t, finalizeResp.SignedUrl)
for _, entry := range table {
t.Run(entry.Name, func(t *testing.T) {
switch setting.Actions.ArtifactStorage.Type {
case setting.AzureBlobStorageType:
defer test.MockVariableValue(&setting.Actions.ArtifactStorage.AzureBlobConfig.ServeDirect, entry.ServeDirect)()
case setting.MinioStorageType:
defer test.MockVariableValue(&setting.Actions.ArtifactStorage.MinioConfig.ServeDirect, entry.ServeDirect)()
default:
if entry.ServeDirect {
t.Skip()
}
}
req = NewRequest(t, "GET", finalizeResp.SignedUrl)
resp = MakeRequest(t, req, http.StatusOK)
body := strings.Repeat("D", 1024)
assert.Equal(t, body, resp.Body.String())
// list artifacts by name
req := NewRequestWithBody(t, "POST", "/twirp/github.actions.results.api.v1.ArtifactService/ListArtifacts", toProtoJSON(&actions.ListArtifactsRequest{
NameFilter: wrapperspb.String(entry.ArtifactName),
WorkflowRunBackendId: "792",
WorkflowJobRunBackendId: "193",
})).AddTokenAuth(token)
resp := MakeRequest(t, req, http.StatusOK)
var listResp actions.ListArtifactsResponse
require.NoError(t, protojson.Unmarshal(resp.Body.Bytes(), &listResp))
require.Len(t, listResp.Artifacts, 1)
// list artifacts by id
req = NewRequestWithBody(t, "POST", "/twirp/github.actions.results.api.v1.ArtifactService/ListArtifacts", toProtoJSON(&actions.ListArtifactsRequest{
IdFilter: wrapperspb.Int64(listResp.Artifacts[0].DatabaseId),
WorkflowRunBackendId: "792",
WorkflowJobRunBackendId: "193",
})).AddTokenAuth(token)
resp = MakeRequest(t, req, http.StatusOK)
require.NoError(t, protojson.Unmarshal(resp.Body.Bytes(), &listResp))
assert.Len(t, listResp.Artifacts, 1)
// acquire artifact download url
req = NewRequestWithBody(t, "POST", "/twirp/github.actions.results.api.v1.ArtifactService/GetSignedArtifactURL", toProtoJSON(&actions.GetSignedArtifactURLRequest{
Name: entry.ArtifactName,
WorkflowRunBackendId: "792",
WorkflowJobRunBackendId: "193",
})).
AddTokenAuth(token)
resp = MakeRequest(t, req, http.StatusOK)
var finalizeResp actions.GetSignedArtifactURLResponse
require.NoError(t, protojson.Unmarshal(resp.Body.Bytes(), &finalizeResp))
assert.NotEmpty(t, finalizeResp.SignedUrl)
body := strings.Repeat("D", 1024)
var contentDisposition string
if entry.ServeDirect {
externalReq, err := http.NewRequestWithContext(t.Context(), http.MethodGet, finalizeResp.SignedUrl, nil)
require.NoError(t, err)
externalResp, err := http.DefaultClient.Do(externalReq)
require.NoError(t, err)
assert.Equal(t, http.StatusOK, externalResp.StatusCode)
assert.Equal(t, entry.ContentType, externalResp.Header.Get("Content-Type"))
contentDisposition = externalResp.Header.Get("Content-Disposition")
buf := make([]byte, 1024)
n, err := io.ReadAtLeast(externalResp.Body, buf, len(buf))
externalResp.Body.Close()
require.NoError(t, err)
assert.Equal(t, len(buf), n)
assert.Equal(t, body, string(buf))
} else {
req = NewRequest(t, "GET", finalizeResp.SignedUrl)
resp = MakeRequest(t, req, http.StatusOK)
assert.Equal(t, entry.ContentType, resp.Header().Get("Content-Type"))
contentDisposition = resp.Header().Get("Content-Disposition")
assert.Equal(t, body, resp.Body.String())
}
disposition, param, err := mime.ParseMediaType(contentDisposition)
require.NoError(t, err)
assert.Equal(t, "inline", disposition)
assert.Equal(t, entry.FileName, param["filename"])
})
}
}
func TestActionsArtifactV4RunDownloadSinglePublicApi(t *testing.T) {
@@ -561,7 +723,7 @@ func TestActionsArtifactV4ListAndGetPublicApi(t *testing.T) {
for _, artifact := range listResp.Entries {
assert.Contains(t, artifact.URL, fmt.Sprintf("/api/v1/repos/%s/actions/artifacts/%d", repo.FullName(), artifact.ID))
assert.Contains(t, artifact.ArchiveDownloadURL, fmt.Sprintf("/api/v1/repos/%s/actions/artifacts/%d/zip", repo.FullName(), artifact.ID))
req = NewRequestWithBody(t, "GET", listResp.Entries[0].URL, nil).
req = NewRequestWithBody(t, "GET", artifact.URL, nil).
AddTokenAuth(token)
resp = MakeRequest(t, req, http.StatusOK)