summaryrefslogtreecommitdiff
path: root/workhorse
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2022-12-23 15:09:45 +0000
committerGitLab Bot <gitlab-bot@gitlab.com>2022-12-23 15:09:45 +0000
commit99c1dfd5e3f39868d65cb006078a8d091992fa54 (patch)
tree18d970852272c6d5ba303911da13d16cbff2514c /workhorse
parent23634aa773e10e7df79247fb6fddbce88b825909 (diff)
downloadgitlab-ce-99c1dfd5e3f39868d65cb006078a8d091992fa54.tar.gz
Add latest changes from gitlab-org/gitlab@master
Diffstat (limited to 'workhorse')
-rw-r--r--workhorse/internal/api/api.go2
-rw-r--r--workhorse/internal/upload/body_uploader_test.go12
-rw-r--r--workhorse/internal/upload/destination/destination.go2
-rw-r--r--workhorse/internal/upload/destination/destination_test.go12
-rw-r--r--workhorse/internal/upload/destination/multi_hash.go39
-rw-r--r--workhorse/internal/upload/destination/multi_hash_test.go52
-rw-r--r--workhorse/internal/upload/destination/upload_opts.go25
-rw-r--r--workhorse/internal/upload/uploads_test.go9
-rw-r--r--workhorse/upload_test.go163
9 files changed, 187 insertions, 129 deletions
diff --git a/workhorse/internal/api/api.go b/workhorse/internal/api/api.go
index 92e88ae3f70..8a7fb191ec4 100644
--- a/workhorse/internal/api/api.go
+++ b/workhorse/internal/api/api.go
@@ -163,6 +163,8 @@ type Response struct {
ProcessLsif bool
// The maximum accepted size in bytes of the upload
MaximumSize int64
+ // A list of permitted hash functions. If empty, then all available are permitted.
+ UploadHashFunctions []string
}
type GitalyServer struct {
diff --git a/workhorse/internal/upload/body_uploader_test.go b/workhorse/internal/upload/body_uploader_test.go
index eff33757845..837d119e72e 100644
--- a/workhorse/internal/upload/body_uploader_test.go
+++ b/workhorse/internal/upload/body_uploader_test.go
@@ -92,11 +92,7 @@ func echoProxy(t *testing.T, expectedBodyLength int) http.Handler {
require.Equal(t, "application/x-www-form-urlencoded", r.Header.Get("Content-Type"), "Wrong Content-Type header")
- if destination.FIPSEnabled() {
- require.NotContains(t, r.PostForm, "file.md5")
- } else {
- require.Contains(t, r.PostForm, "file.md5")
- }
+ require.Contains(t, r.PostForm, "file.md5")
require.Contains(t, r.PostForm, "file.sha1")
require.Contains(t, r.PostForm, "file.sha256")
require.Contains(t, r.PostForm, "file.sha512")
@@ -123,11 +119,7 @@ func echoProxy(t *testing.T, expectedBodyLength int) http.Handler {
require.Contains(t, uploadFields, "remote_url")
require.Contains(t, uploadFields, "remote_id")
require.Contains(t, uploadFields, "size")
- if destination.FIPSEnabled() {
- require.NotContains(t, uploadFields, "md5")
- } else {
- require.Contains(t, uploadFields, "md5")
- }
+ require.Contains(t, uploadFields, "md5")
require.Contains(t, uploadFields, "sha1")
require.Contains(t, uploadFields, "sha256")
require.Contains(t, uploadFields, "sha512")
diff --git a/workhorse/internal/upload/destination/destination.go b/workhorse/internal/upload/destination/destination.go
index 0a7e2493cd6..a9fb81540d5 100644
--- a/workhorse/internal/upload/destination/destination.go
+++ b/workhorse/internal/upload/destination/destination.go
@@ -121,7 +121,7 @@ func Upload(ctx context.Context, reader io.Reader, size int64, name string, opts
}
uploadStartTime := time.Now()
defer func() { fh.uploadDuration = time.Since(uploadStartTime).Seconds() }()
- hashes := newMultiHash()
+ hashes := newMultiHash(opts.UploadHashFunctions)
reader = io.TeeReader(reader, hashes.Writer)
var clientMode string
diff --git a/workhorse/internal/upload/destination/destination_test.go b/workhorse/internal/upload/destination/destination_test.go
index 928ffdb9f5a..69dd02ca7c2 100644
--- a/workhorse/internal/upload/destination/destination_test.go
+++ b/workhorse/internal/upload/destination/destination_test.go
@@ -215,11 +215,7 @@ func TestUpload(t *testing.T) {
}
require.Equal(t, test.ObjectSize, fh.Size)
- if FIPSEnabled() {
- require.Empty(t, fh.MD5())
- } else {
- require.Equal(t, test.ObjectMD5, fh.MD5())
- }
+ require.Equal(t, test.ObjectMD5, fh.MD5())
require.Equal(t, test.ObjectSHA256, fh.SHA256())
require.Equal(t, expectedPuts, osStub.PutsCnt(), "ObjectStore PutObject count mismatch")
@@ -508,11 +504,7 @@ func checkFileHandlerWithFields(t *testing.T, fh *FileHandler, fields map[string
require.Equal(t, fh.RemoteURL, fields[key("remote_url")])
require.Equal(t, fh.RemoteID, fields[key("remote_id")])
require.Equal(t, strconv.FormatInt(test.ObjectSize, 10), fields[key("size")])
- if FIPSEnabled() {
- require.Empty(t, fields[key("md5")])
- } else {
- require.Equal(t, test.ObjectMD5, fields[key("md5")])
- }
+ require.Equal(t, test.ObjectMD5, fields[key("md5")])
require.Equal(t, test.ObjectSHA1, fields[key("sha1")])
require.Equal(t, test.ObjectSHA256, fields[key("sha256")])
require.Equal(t, test.ObjectSHA512, fields[key("sha512")])
diff --git a/workhorse/internal/upload/destination/multi_hash.go b/workhorse/internal/upload/destination/multi_hash.go
index 8d5bf4424a8..3f8b0cbd903 100644
--- a/workhorse/internal/upload/destination/multi_hash.go
+++ b/workhorse/internal/upload/destination/multi_hash.go
@@ -8,9 +8,6 @@ import (
"encoding/hex"
"hash"
"io"
- "os"
-
- "gitlab.com/gitlab-org/labkit/fips"
)
var hashFactories = map[string](func() hash.Hash){
@@ -20,39 +17,39 @@ var hashFactories = map[string](func() hash.Hash){
"sha512": sha512.New,
}
-var fipsHashFactories = map[string](func() hash.Hash){
- "sha1": sha1.New,
- "sha256": sha256.New,
- "sha512": sha512.New,
-}
-
func factories() map[string](func() hash.Hash) {
- if FIPSEnabled() {
- return fipsHashFactories
- }
-
return hashFactories
}
-func FIPSEnabled() bool {
- if fips.Enabled() {
+type multiHash struct {
+ io.Writer
+ hashes map[string]hash.Hash
+}
+
+func permittedHashFunction(hashFunctions []string, hash string) bool {
+ if len(hashFunctions) == 0 {
return true
}
- return os.Getenv("WORKHORSE_TEST_FIPS_ENABLED") == "1"
-}
+ for _, name := range hashFunctions {
+ if name == hash {
+ return true
+ }
+ }
-type multiHash struct {
- io.Writer
- hashes map[string]hash.Hash
+ return false
}
-func newMultiHash() (m *multiHash) {
+func newMultiHash(hashFunctions []string) (m *multiHash) {
m = &multiHash{}
m.hashes = make(map[string]hash.Hash)
var writers []io.Writer
for hash, hashFactory := range factories() {
+ if !permittedHashFunction(hashFunctions, hash) {
+ continue
+ }
+
writer := hashFactory()
m.hashes[hash] = writer
diff --git a/workhorse/internal/upload/destination/multi_hash_test.go b/workhorse/internal/upload/destination/multi_hash_test.go
new file mode 100644
index 00000000000..9a976f5d25d
--- /dev/null
+++ b/workhorse/internal/upload/destination/multi_hash_test.go
@@ -0,0 +1,52 @@
+package destination
+
+import (
+ "sort"
+ "testing"
+
+ "github.com/stretchr/testify/require"
+)
+
+func TestNewMultiHash(t *testing.T) {
+ tests := []struct {
+ name string
+ allowedHashes []string
+ expectedHashes []string
+ }{
+ {
+ name: "default",
+ allowedHashes: nil,
+ expectedHashes: []string{"md5", "sha1", "sha256", "sha512"},
+ },
+ {
+ name: "blank",
+ allowedHashes: []string{},
+ expectedHashes: []string{"md5", "sha1", "sha256", "sha512"},
+ },
+ {
+ name: "no MD5",
+ allowedHashes: []string{"sha1", "sha256", "sha512"},
+ expectedHashes: []string{"sha1", "sha256", "sha512"},
+ },
+
+ {
+ name: "unlisted hash",
+ allowedHashes: []string{"sha1", "sha256", "sha512", "sha3-256"},
+ expectedHashes: []string{"sha1", "sha256", "sha512"},
+ },
+ }
+
+ for _, test := range tests {
+ mh := newMultiHash(test.allowedHashes)
+
+ require.Equal(t, len(test.expectedHashes), len(mh.hashes))
+
+ var keys []string
+ for key := range mh.hashes {
+ keys = append(keys, key)
+ }
+
+ sort.Strings(keys)
+ require.Equal(t, test.expectedHashes, keys)
+ }
+}
diff --git a/workhorse/internal/upload/destination/upload_opts.go b/workhorse/internal/upload/destination/upload_opts.go
index d81fa10e97c..72efaebc16c 100644
--- a/workhorse/internal/upload/destination/upload_opts.go
+++ b/workhorse/internal/upload/destination/upload_opts.go
@@ -63,6 +63,8 @@ type UploadOpts struct {
PresignedCompleteMultipart string
// PresignedAbortMultipart is a presigned URL for AbortMultipartUpload
PresignedAbortMultipart string
+ // UploadHashFunctions contains a list of allowed hash functions (md5, sha1, etc.)
+ UploadHashFunctions []string
}
// UseWorkhorseClientEnabled checks if the options require direct access to object storage
@@ -92,17 +94,18 @@ func GetOpts(apiResponse *api.Response) (*UploadOpts, error) {
}
opts := UploadOpts{
- LocalTempPath: apiResponse.TempPath,
- RemoteID: apiResponse.RemoteObject.ID,
- RemoteURL: apiResponse.RemoteObject.GetURL,
- PresignedPut: apiResponse.RemoteObject.StoreURL,
- PresignedDelete: apiResponse.RemoteObject.DeleteURL,
- SkipDelete: apiResponse.RemoteObject.SkipDelete,
- PutHeaders: apiResponse.RemoteObject.PutHeaders,
- UseWorkhorseClient: apiResponse.RemoteObject.UseWorkhorseClient,
- RemoteTempObjectID: apiResponse.RemoteObject.RemoteTempObjectID,
- Deadline: time.Now().Add(timeout),
- MaximumSize: apiResponse.MaximumSize,
+ LocalTempPath: apiResponse.TempPath,
+ RemoteID: apiResponse.RemoteObject.ID,
+ RemoteURL: apiResponse.RemoteObject.GetURL,
+ PresignedPut: apiResponse.RemoteObject.StoreURL,
+ PresignedDelete: apiResponse.RemoteObject.DeleteURL,
+ SkipDelete: apiResponse.RemoteObject.SkipDelete,
+ PutHeaders: apiResponse.RemoteObject.PutHeaders,
+ UseWorkhorseClient: apiResponse.RemoteObject.UseWorkhorseClient,
+ RemoteTempObjectID: apiResponse.RemoteObject.RemoteTempObjectID,
+ Deadline: time.Now().Add(timeout),
+ MaximumSize: apiResponse.MaximumSize,
+ UploadHashFunctions: apiResponse.UploadHashFunctions,
}
if opts.LocalTempPath != "" && opts.RemoteID != "" {
diff --git a/workhorse/internal/upload/uploads_test.go b/workhorse/internal/upload/uploads_test.go
index cc786079e36..69baa2dab6e 100644
--- a/workhorse/internal/upload/uploads_test.go
+++ b/workhorse/internal/upload/uploads_test.go
@@ -24,7 +24,6 @@ import (
"gitlab.com/gitlab-org/gitlab/workhorse/internal/helper"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/proxy"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/testhelper"
- "gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination/objectstore/test"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/upstream/roundtripper"
)
@@ -111,13 +110,7 @@ func TestUploadHandlerRewritingMultiPartData(t *testing.T) {
expectedLen := 12
- if destination.FIPSEnabled() {
- expectedLen--
- require.Empty(t, r.FormValue("file.md5"), "file hash md5")
- } else {
- require.Equal(t, "098f6bcd4621d373cade4e832627b4f6", r.FormValue("file.md5"), "file hash md5")
- }
-
+ require.Equal(t, "098f6bcd4621d373cade4e832627b4f6", r.FormValue("file.md5"), "file hash md5")
require.Len(t, r.MultipartForm.Value, expectedLen, "multipart form values")
w.WriteHeader(202)
diff --git a/workhorse/upload_test.go b/workhorse/upload_test.go
index 22abed25928..333301091c7 100644
--- a/workhorse/upload_test.go
+++ b/workhorse/upload_test.go
@@ -20,7 +20,6 @@ import (
"gitlab.com/gitlab-org/gitlab/workhorse/internal/secret"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/testhelper"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/upload"
- "gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination"
)
type uploadArtifactsFunction func(url, contentType string, body io.Reader) (*http.Response, string, error)
@@ -66,13 +65,20 @@ func expectSignedRequest(t *testing.T, r *http.Request) {
require.NoError(t, err)
}
-func uploadTestServer(t *testing.T, authorizeTests func(r *http.Request), extraTests func(r *http.Request)) *httptest.Server {
+func uploadTestServer(t *testing.T, allowedHashFunctions []string, authorizeTests func(r *http.Request), extraTests func(r *http.Request)) *httptest.Server {
return testhelper.TestServerWithHandler(regexp.MustCompile(`.`), func(w http.ResponseWriter, r *http.Request) {
if strings.HasSuffix(r.URL.Path, "/authorize") || r.URL.Path == "/api/v4/internal/workhorse/authorize_upload" {
expectSignedRequest(t, r)
w.Header().Set("Content-Type", api.ResponseContentType)
- _, err := fmt.Fprintf(w, `{"TempPath":"%s"}`, scratchDir)
+ var err error
+
+ if len(allowedHashFunctions) == 0 {
+ _, err = fmt.Fprintf(w, `{"TempPath":"%s"}`, scratchDir)
+ } else {
+ _, err = fmt.Fprintf(w, `{"TempPath":"%s", "UploadHashFunctions": ["%s"]}`, scratchDir, strings.Join(allowedHashFunctions, `","`))
+ }
+
require.NoError(t, err)
if authorizeTests != nil {
@@ -83,15 +89,23 @@ func uploadTestServer(t *testing.T, authorizeTests func(r *http.Request), extraT
require.NoError(t, r.ParseMultipartForm(100000))
- var nValues int // file name, path, remote_url, remote_id, size, md5, sha1, sha256, sha512, upload_duration, gitlab-workhorse-upload for just the upload (no metadata because we are not POSTing a valid zip file)
- if destination.FIPSEnabled() {
- nValues = 10
+ nValues := len([]string{
+ "name",
+ "path",
+ "remote_url",
+ "remote_id",
+ "size",
+ "upload_duration",
+ "gitlab-workhorse-upload",
+ })
+
+ if n := len(allowedHashFunctions); n > 0 {
+ nValues += n
} else {
- nValues = 11
+ nValues += len([]string{"md5", "sha1", "sha256", "sha512"}) // Default hash functions
}
require.Len(t, r.MultipartForm.Value, nValues)
-
require.Empty(t, r.MultipartForm.File, "multipart form files")
if extraTests != nil {
@@ -104,7 +118,7 @@ func uploadTestServer(t *testing.T, authorizeTests func(r *http.Request), extraT
func signedUploadTestServer(t *testing.T, authorizeTests func(r *http.Request), extraTests func(r *http.Request)) *httptest.Server {
t.Helper()
- return uploadTestServer(t, authorizeTests, func(r *http.Request) {
+ return uploadTestServer(t, nil, authorizeTests, func(r *http.Request) {
expectSignedRequest(t, r)
if extraTests != nil {
@@ -167,67 +181,80 @@ func TestAcceleratedUpload(t *testing.T) {
{"POST", "/api/v4/projects/group%2Fsubgroup%2Fproject/packages/helm/api/stable/charts", true},
}
- for _, tt := range tests {
- t.Run(tt.resource, func(t *testing.T) {
- ts := uploadTestServer(t,
- func(r *http.Request) {
- if r.URL.Path == "/api/v4/internal/workhorse/authorize_upload" {
- // Nothing to validate: this is a hard coded URL
- return
- }
- resource := strings.TrimRight(tt.resource, "/")
- // Validate %2F characters haven't been unescaped
- require.Equal(t, resource+"/authorize", r.URL.String())
- },
- func(r *http.Request) {
- if tt.signedFinalization {
- expectSignedRequest(t, r)
- }
-
- token, err := jwt.ParseWithClaims(r.Header.Get(upload.RewrittenFieldsHeader), &upload.MultipartClaims{}, testhelper.ParseJWT)
- require.NoError(t, err)
-
- rewrittenFields := token.Claims.(*upload.MultipartClaims).RewrittenFields
- if len(rewrittenFields) != 1 || len(rewrittenFields["file"]) == 0 {
- t.Fatalf("Unexpected rewritten_fields value: %v", rewrittenFields)
- }
-
- token, jwtErr := jwt.ParseWithClaims(r.PostFormValue("file.gitlab-workhorse-upload"), &testhelper.UploadClaims{}, testhelper.ParseJWT)
- require.NoError(t, jwtErr)
-
- uploadFields := token.Claims.(*testhelper.UploadClaims).Upload
- require.Contains(t, uploadFields, "name")
- require.Contains(t, uploadFields, "path")
- require.Contains(t, uploadFields, "remote_url")
- require.Contains(t, uploadFields, "remote_id")
- require.Contains(t, uploadFields, "size")
- if destination.FIPSEnabled() {
- require.NotContains(t, uploadFields, "md5")
- } else {
- require.Contains(t, uploadFields, "md5")
- }
- require.Contains(t, uploadFields, "sha1")
- require.Contains(t, uploadFields, "sha256")
- require.Contains(t, uploadFields, "sha512")
- })
-
- defer ts.Close()
- ws := startWorkhorseServer(ts.URL)
- defer ws.Close()
-
- reqBody, contentType, err := multipartBodyWithFile()
- require.NoError(t, err)
-
- req, err := http.NewRequest(tt.method, ws.URL+tt.resource, reqBody)
- require.NoError(t, err)
+ allowedHashFunctions := map[string][]string{
+ "default": nil,
+ "sha2_only": {"sha256", "sha512"},
+ }
- req.Header.Set("Content-Type", contentType)
- resp, err := http.DefaultClient.Do(req)
- require.NoError(t, err)
- require.Equal(t, 200, resp.StatusCode)
+ for _, tt := range tests {
+ for hashSet, hashFunctions := range allowedHashFunctions {
+ t.Run(tt.resource, func(t *testing.T) {
+
+ ts := uploadTestServer(t,
+ hashFunctions,
+ func(r *http.Request) {
+ if r.URL.Path == "/api/v4/internal/workhorse/authorize_upload" {
+ // Nothing to validate: this is a hard coded URL
+ return
+ }
+ resource := strings.TrimRight(tt.resource, "/")
+ // Validate %2F characters haven't been unescaped
+ require.Equal(t, resource+"/authorize", r.URL.String())
+ },
+ func(r *http.Request) {
+ if tt.signedFinalization {
+ expectSignedRequest(t, r)
+ }
+
+ token, err := jwt.ParseWithClaims(r.Header.Get(upload.RewrittenFieldsHeader), &upload.MultipartClaims{}, testhelper.ParseJWT)
+ require.NoError(t, err)
+
+ rewrittenFields := token.Claims.(*upload.MultipartClaims).RewrittenFields
+ if len(rewrittenFields) != 1 || len(rewrittenFields["file"]) == 0 {
+ t.Fatalf("Unexpected rewritten_fields value: %v", rewrittenFields)
+ }
+
+ token, jwtErr := jwt.ParseWithClaims(r.PostFormValue("file.gitlab-workhorse-upload"), &testhelper.UploadClaims{}, testhelper.ParseJWT)
+ require.NoError(t, jwtErr)
+
+ uploadFields := token.Claims.(*testhelper.UploadClaims).Upload
+ require.Contains(t, uploadFields, "name")
+ require.Contains(t, uploadFields, "path")
+ require.Contains(t, uploadFields, "remote_url")
+ require.Contains(t, uploadFields, "remote_id")
+ require.Contains(t, uploadFields, "size")
+
+ if hashSet == "default" {
+ require.Contains(t, uploadFields, "md5")
+ require.Contains(t, uploadFields, "sha1")
+ require.Contains(t, uploadFields, "sha256")
+ require.Contains(t, uploadFields, "sha512")
+ } else {
+ require.NotContains(t, uploadFields, "md5")
+ require.NotContains(t, uploadFields, "sha1")
+ require.Contains(t, uploadFields, "sha256")
+ require.Contains(t, uploadFields, "sha512")
+ }
+ })
+
+ defer ts.Close()
+ ws := startWorkhorseServer(ts.URL)
+ defer ws.Close()
+
+ reqBody, contentType, err := multipartBodyWithFile()
+ require.NoError(t, err)
+
+ req, err := http.NewRequest(tt.method, ws.URL+tt.resource, reqBody)
+ require.NoError(t, err)
+
+ req.Header.Set("Content-Type", contentType)
+ resp, err := http.DefaultClient.Do(req)
+ require.NoError(t, err)
+ require.Equal(t, 200, resp.StatusCode)
- resp.Body.Close()
- })
+ resp.Body.Close()
+ })
+ }
}
}