From e65e8a1bcdfa9578c9db86f458eeddb08a5a8f05 Mon Sep 17 00:00:00 2001
From: Stan Hu <stanhu@gmail.com>
Date: Thu, 15 Dec 2022 00:24:59 -0800
Subject: [PATCH] Fix Maven packages not working on FIPS kernels

On a FIPS kernel with a non-FIPS GitLab build, Maven package uploads
previously would fail with 422 Unprocessible Entity errors. This
occurred because Workhorse was generating an MD5 sum for the
accelerated upload, but the Rails API rejects any use of MD5 in FIPS
mode.

Workhorse was generating an MD5 sum because its on check for FIPS
involves more conditions:

1. The binary has been compiled with the `fips` build tag.
2. The platform is amd64 running on a Linux runtime.
3. The kernel has FIPS enabled (e.g. `/proc/sys/crypto/fips_enabled` is 1).
4. A system OpenSSL can be dynamically loaded via ldopen().

On a standard GitLab installation, 1 is not true, so Workhorse
believed it was working in a standard environment. However, GitLab
Rails only checks whether the OpenSSL library is FIPS-enabled.

To resolve this discrepancy, we now:

1. Pass `UploadHashFunctions` to indicate which hashes should be used
   in the Workhorse `/authorize` response.
2. Workhorse will use this list to determine which hashes to compute.
   If the list is empty, Workhorse will assume all hashes can be used.

Relates to https://gitlab.com/gitlab-org/gitlab/-/issues/380559

Changelog: fixed
---
 .gitlab/ci/workhorse.gitlab-ci.yml            |   1 -
 app/uploaders/object_storage.rb               |   1 +
 spec/uploaders/object_storage_spec.rb         |  12 ++
 workhorse/internal/api/api.go                 |   2 +
 .../internal/upload/body_uploader_test.go     |  12 +-
 .../upload/destination/destination.go         |   2 +-
 .../upload/destination/destination_test.go    |  12 +-
 .../internal/upload/destination/multi_hash.go |  39 ++---
 .../upload/destination/multi_hash_test.go     |  52 ++++++
 .../upload/destination/upload_opts.go         |  25 +--
 workhorse/internal/upload/uploads_test.go     |   9 +-
 workhorse/upload_test.go                      | 163 ++++++++++--------
 12 files changed, 200 insertions(+), 130 deletions(-)
 create mode 100644 workhorse/internal/upload/destination/multi_hash_test.go

diff --git a/.gitlab/ci/workhorse.gitlab-ci.yml b/.gitlab/ci/workhorse.gitlab-ci.yml
index 5f58694a7bf84..389906dbbffa4 100644
--- a/.gitlab/ci/workhorse.gitlab-ci.yml
+++ b/.gitlab/ci/workhorse.gitlab-ci.yml
@@ -39,7 +39,6 @@ workhorse:test fips:
   extends: .workhorse:test
   image: registry.gitlab.com/gitlab-org/gitlab-omnibus-builder/ubuntu_20.04_fips:4.0.0
   variables:
-    WORKHORSE_TEST_FIPS_ENABLED: 1
     FIPS_MODE: 1
 
 workhorse:test race:
diff --git a/app/uploaders/object_storage.rb b/app/uploaders/object_storage.rb
index e74998ce4a8d7..1b47400d5e808 100644
--- a/app/uploaders/object_storage.rb
+++ b/app/uploaders/object_storage.rb
@@ -139,6 +139,7 @@ def workhorse_authorize(has_length:, maximum_size: nil)
             hash[:TempPath] = workhorse_local_upload_path
           end
 
+          hash[:UploadHashFunctions] = %w[sha1 sha256 sha512] if ::Gitlab::FIPS.enabled?
           hash[:MaximumSize] = maximum_size if maximum_size.present?
         end
       end
diff --git a/spec/uploaders/object_storage_spec.rb b/spec/uploaders/object_storage_spec.rb
index a4f6116f7d719..5344dbeb51229 100644
--- a/spec/uploaders/object_storage_spec.rb
+++ b/spec/uploaders/object_storage_spec.rb
@@ -497,6 +497,18 @@ def when_file_is_in_use
 
     subject { uploader_class.workhorse_authorize(has_length: has_length, maximum_size: maximum_size) }
 
+    context 'when FIPS is enabled', :fips_mode do
+      it 'response enables FIPS' do
+        expect(subject[:UploadHashFunctions]).to match_array(%w[sha1 sha256 sha512])
+      end
+    end
+
+    context 'when FIPS is disabled' do
+      it 'response disables FIPS' do
+        expect(subject[:UploadHashFunctions]).to be nil
+      end
+    end
+
     shared_examples 'returns the maximum size given' do
       it "returns temporary path" do
         expect(subject[:MaximumSize]).to eq(maximum_size)
diff --git a/workhorse/internal/api/api.go b/workhorse/internal/api/api.go
index 92e88ae3f7058..8a7fb191ec49a 100644
--- a/workhorse/internal/api/api.go
+++ b/workhorse/internal/api/api.go
@@ -163,6 +163,8 @@ type Response struct {
 	ProcessLsif bool
 	// The maximum accepted size in bytes of the upload
 	MaximumSize int64
+	// A list of permitted hash functions. If empty, then all available are permitted.
+	UploadHashFunctions []string
 }
 
 type GitalyServer struct {
diff --git a/workhorse/internal/upload/body_uploader_test.go b/workhorse/internal/upload/body_uploader_test.go
index eff3375784560..837d119e72ea8 100644
--- a/workhorse/internal/upload/body_uploader_test.go
+++ b/workhorse/internal/upload/body_uploader_test.go
@@ -92,11 +92,7 @@ func echoProxy(t *testing.T, expectedBodyLength int) http.Handler {
 
 		require.Equal(t, "application/x-www-form-urlencoded", r.Header.Get("Content-Type"), "Wrong Content-Type header")
 
-		if destination.FIPSEnabled() {
-			require.NotContains(t, r.PostForm, "file.md5")
-		} else {
-			require.Contains(t, r.PostForm, "file.md5")
-		}
+		require.Contains(t, r.PostForm, "file.md5")
 		require.Contains(t, r.PostForm, "file.sha1")
 		require.Contains(t, r.PostForm, "file.sha256")
 		require.Contains(t, r.PostForm, "file.sha512")
@@ -123,11 +119,7 @@ func echoProxy(t *testing.T, expectedBodyLength int) http.Handler {
 		require.Contains(t, uploadFields, "remote_url")
 		require.Contains(t, uploadFields, "remote_id")
 		require.Contains(t, uploadFields, "size")
-		if destination.FIPSEnabled() {
-			require.NotContains(t, uploadFields, "md5")
-		} else {
-			require.Contains(t, uploadFields, "md5")
-		}
+		require.Contains(t, uploadFields, "md5")
 		require.Contains(t, uploadFields, "sha1")
 		require.Contains(t, uploadFields, "sha256")
 		require.Contains(t, uploadFields, "sha512")
diff --git a/workhorse/internal/upload/destination/destination.go b/workhorse/internal/upload/destination/destination.go
index 0a7e2493cd6e2..a9fb81540d5de 100644
--- a/workhorse/internal/upload/destination/destination.go
+++ b/workhorse/internal/upload/destination/destination.go
@@ -121,7 +121,7 @@ func Upload(ctx context.Context, reader io.Reader, size int64, name string, opts
 	}
 	uploadStartTime := time.Now()
 	defer func() { fh.uploadDuration = time.Since(uploadStartTime).Seconds() }()
-	hashes := newMultiHash()
+	hashes := newMultiHash(opts.UploadHashFunctions)
 	reader = io.TeeReader(reader, hashes.Writer)
 
 	var clientMode string
diff --git a/workhorse/internal/upload/destination/destination_test.go b/workhorse/internal/upload/destination/destination_test.go
index 928ffdb9f5a1c..69dd02ca7c2fd 100644
--- a/workhorse/internal/upload/destination/destination_test.go
+++ b/workhorse/internal/upload/destination/destination_test.go
@@ -215,11 +215,7 @@ func TestUpload(t *testing.T) {
 			}
 
 			require.Equal(t, test.ObjectSize, fh.Size)
-			if FIPSEnabled() {
-				require.Empty(t, fh.MD5())
-			} else {
-				require.Equal(t, test.ObjectMD5, fh.MD5())
-			}
+			require.Equal(t, test.ObjectMD5, fh.MD5())
 			require.Equal(t, test.ObjectSHA256, fh.SHA256())
 
 			require.Equal(t, expectedPuts, osStub.PutsCnt(), "ObjectStore PutObject count mismatch")
@@ -508,11 +504,7 @@ func checkFileHandlerWithFields(t *testing.T, fh *FileHandler, fields map[string
 	require.Equal(t, fh.RemoteURL, fields[key("remote_url")])
 	require.Equal(t, fh.RemoteID, fields[key("remote_id")])
 	require.Equal(t, strconv.FormatInt(test.ObjectSize, 10), fields[key("size")])
-	if FIPSEnabled() {
-		require.Empty(t, fields[key("md5")])
-	} else {
-		require.Equal(t, test.ObjectMD5, fields[key("md5")])
-	}
+	require.Equal(t, test.ObjectMD5, fields[key("md5")])
 	require.Equal(t, test.ObjectSHA1, fields[key("sha1")])
 	require.Equal(t, test.ObjectSHA256, fields[key("sha256")])
 	require.Equal(t, test.ObjectSHA512, fields[key("sha512")])
diff --git a/workhorse/internal/upload/destination/multi_hash.go b/workhorse/internal/upload/destination/multi_hash.go
index 8d5bf4424a8f1..3f8b0cbd9036d 100644
--- a/workhorse/internal/upload/destination/multi_hash.go
+++ b/workhorse/internal/upload/destination/multi_hash.go
@@ -8,9 +8,6 @@ import (
 	"encoding/hex"
 	"hash"
 	"io"
-	"os"
-
-	"gitlab.com/gitlab-org/labkit/fips"
 )
 
 var hashFactories = map[string](func() hash.Hash){
@@ -20,39 +17,39 @@ var hashFactories = map[string](func() hash.Hash){
 	"sha512": sha512.New,
 }
 
-var fipsHashFactories = map[string](func() hash.Hash){
-	"sha1":   sha1.New,
-	"sha256": sha256.New,
-	"sha512": sha512.New,
-}
-
 func factories() map[string](func() hash.Hash) {
-	if FIPSEnabled() {
-		return fipsHashFactories
-	}
-
 	return hashFactories
 }
 
-func FIPSEnabled() bool {
-	if fips.Enabled() {
+type multiHash struct {
+	io.Writer
+	hashes map[string]hash.Hash
+}
+
+func permittedHashFunction(hashFunctions []string, hash string) bool {
+	if len(hashFunctions) == 0 {
 		return true
 	}
 
-	return os.Getenv("WORKHORSE_TEST_FIPS_ENABLED") == "1"
-}
+	for _, name := range hashFunctions {
+		if name == hash {
+			return true
+		}
+	}
 
-type multiHash struct {
-	io.Writer
-	hashes map[string]hash.Hash
+	return false
 }
 
-func newMultiHash() (m *multiHash) {
+func newMultiHash(hashFunctions []string) (m *multiHash) {
 	m = &multiHash{}
 	m.hashes = make(map[string]hash.Hash)
 
 	var writers []io.Writer
 	for hash, hashFactory := range factories() {
+		if !permittedHashFunction(hashFunctions, hash) {
+			continue
+		}
+
 		writer := hashFactory()
 
 		m.hashes[hash] = writer
diff --git a/workhorse/internal/upload/destination/multi_hash_test.go b/workhorse/internal/upload/destination/multi_hash_test.go
new file mode 100644
index 0000000000000..9a976f5d25dc5
--- /dev/null
+++ b/workhorse/internal/upload/destination/multi_hash_test.go
@@ -0,0 +1,52 @@
+package destination
+
+import (
+	"sort"
+	"testing"
+
+	"github.com/stretchr/testify/require"
+)
+
+func TestNewMultiHash(t *testing.T) {
+	tests := []struct {
+		name           string
+		allowedHashes  []string
+		expectedHashes []string
+	}{
+		{
+			name:           "default",
+			allowedHashes:  nil,
+			expectedHashes: []string{"md5", "sha1", "sha256", "sha512"},
+		},
+		{
+			name:           "blank",
+			allowedHashes:  []string{},
+			expectedHashes: []string{"md5", "sha1", "sha256", "sha512"},
+		},
+		{
+			name:           "no MD5",
+			allowedHashes:  []string{"sha1", "sha256", "sha512"},
+			expectedHashes: []string{"sha1", "sha256", "sha512"},
+		},
+
+		{
+			name:           "unlisted hash",
+			allowedHashes:  []string{"sha1", "sha256", "sha512", "sha3-256"},
+			expectedHashes: []string{"sha1", "sha256", "sha512"},
+		},
+	}
+
+	for _, test := range tests {
+		mh := newMultiHash(test.allowedHashes)
+
+		require.Equal(t, len(test.expectedHashes), len(mh.hashes))
+
+		var keys []string
+		for key := range mh.hashes {
+			keys = append(keys, key)
+		}
+
+		sort.Strings(keys)
+		require.Equal(t, test.expectedHashes, keys)
+	}
+}
diff --git a/workhorse/internal/upload/destination/upload_opts.go b/workhorse/internal/upload/destination/upload_opts.go
index d81fa10e97c86..72efaebc16c7d 100644
--- a/workhorse/internal/upload/destination/upload_opts.go
+++ b/workhorse/internal/upload/destination/upload_opts.go
@@ -63,6 +63,8 @@ type UploadOpts struct {
 	PresignedCompleteMultipart string
 	// PresignedAbortMultipart is a presigned URL for AbortMultipartUpload
 	PresignedAbortMultipart string
+	// UploadHashFunctions contains a list of allowed hash functions (md5, sha1, etc.)
+	UploadHashFunctions []string
 }
 
 // UseWorkhorseClientEnabled checks if the options require direct access to object storage
@@ -92,17 +94,18 @@ func GetOpts(apiResponse *api.Response) (*UploadOpts, error) {
 	}
 
 	opts := UploadOpts{
-		LocalTempPath:      apiResponse.TempPath,
-		RemoteID:           apiResponse.RemoteObject.ID,
-		RemoteURL:          apiResponse.RemoteObject.GetURL,
-		PresignedPut:       apiResponse.RemoteObject.StoreURL,
-		PresignedDelete:    apiResponse.RemoteObject.DeleteURL,
-		SkipDelete:         apiResponse.RemoteObject.SkipDelete,
-		PutHeaders:         apiResponse.RemoteObject.PutHeaders,
-		UseWorkhorseClient: apiResponse.RemoteObject.UseWorkhorseClient,
-		RemoteTempObjectID: apiResponse.RemoteObject.RemoteTempObjectID,
-		Deadline:           time.Now().Add(timeout),
-		MaximumSize:        apiResponse.MaximumSize,
+		LocalTempPath:       apiResponse.TempPath,
+		RemoteID:            apiResponse.RemoteObject.ID,
+		RemoteURL:           apiResponse.RemoteObject.GetURL,
+		PresignedPut:        apiResponse.RemoteObject.StoreURL,
+		PresignedDelete:     apiResponse.RemoteObject.DeleteURL,
+		SkipDelete:          apiResponse.RemoteObject.SkipDelete,
+		PutHeaders:          apiResponse.RemoteObject.PutHeaders,
+		UseWorkhorseClient:  apiResponse.RemoteObject.UseWorkhorseClient,
+		RemoteTempObjectID:  apiResponse.RemoteObject.RemoteTempObjectID,
+		Deadline:            time.Now().Add(timeout),
+		MaximumSize:         apiResponse.MaximumSize,
+		UploadHashFunctions: apiResponse.UploadHashFunctions,
 	}
 
 	if opts.LocalTempPath != "" && opts.RemoteID != "" {
diff --git a/workhorse/internal/upload/uploads_test.go b/workhorse/internal/upload/uploads_test.go
index cc786079e3663..69baa2dab6e07 100644
--- a/workhorse/internal/upload/uploads_test.go
+++ b/workhorse/internal/upload/uploads_test.go
@@ -24,7 +24,6 @@ import (
 	"gitlab.com/gitlab-org/gitlab/workhorse/internal/helper"
 	"gitlab.com/gitlab-org/gitlab/workhorse/internal/proxy"
 	"gitlab.com/gitlab-org/gitlab/workhorse/internal/testhelper"
-	"gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination"
 	"gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination/objectstore/test"
 	"gitlab.com/gitlab-org/gitlab/workhorse/internal/upstream/roundtripper"
 )
@@ -111,13 +110,7 @@ func TestUploadHandlerRewritingMultiPartData(t *testing.T) {
 
 		expectedLen := 12
 
-		if destination.FIPSEnabled() {
-			expectedLen--
-			require.Empty(t, r.FormValue("file.md5"), "file hash md5")
-		} else {
-			require.Equal(t, "098f6bcd4621d373cade4e832627b4f6", r.FormValue("file.md5"), "file hash md5")
-		}
-
+		require.Equal(t, "098f6bcd4621d373cade4e832627b4f6", r.FormValue("file.md5"), "file hash md5")
 		require.Len(t, r.MultipartForm.Value, expectedLen, "multipart form values")
 
 		w.WriteHeader(202)
diff --git a/workhorse/upload_test.go b/workhorse/upload_test.go
index 22abed25928a8..333301091c729 100644
--- a/workhorse/upload_test.go
+++ b/workhorse/upload_test.go
@@ -20,7 +20,6 @@ import (
 	"gitlab.com/gitlab-org/gitlab/workhorse/internal/secret"
 	"gitlab.com/gitlab-org/gitlab/workhorse/internal/testhelper"
 	"gitlab.com/gitlab-org/gitlab/workhorse/internal/upload"
-	"gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination"
 )
 
 type uploadArtifactsFunction func(url, contentType string, body io.Reader) (*http.Response, string, error)
@@ -66,13 +65,20 @@ func expectSignedRequest(t *testing.T, r *http.Request) {
 	require.NoError(t, err)
 }
 
-func uploadTestServer(t *testing.T, authorizeTests func(r *http.Request), extraTests func(r *http.Request)) *httptest.Server {
+func uploadTestServer(t *testing.T, allowedHashFunctions []string, authorizeTests func(r *http.Request), extraTests func(r *http.Request)) *httptest.Server {
 	return testhelper.TestServerWithHandler(regexp.MustCompile(`.`), func(w http.ResponseWriter, r *http.Request) {
 		if strings.HasSuffix(r.URL.Path, "/authorize") || r.URL.Path == "/api/v4/internal/workhorse/authorize_upload" {
 			expectSignedRequest(t, r)
 
 			w.Header().Set("Content-Type", api.ResponseContentType)
-			_, err := fmt.Fprintf(w, `{"TempPath":"%s"}`, scratchDir)
+			var err error
+
+			if len(allowedHashFunctions) == 0 {
+				_, err = fmt.Fprintf(w, `{"TempPath":"%s"}`, scratchDir)
+			} else {
+				_, err = fmt.Fprintf(w, `{"TempPath":"%s", "UploadHashFunctions": ["%s"]}`, scratchDir, strings.Join(allowedHashFunctions, `","`))
+			}
+
 			require.NoError(t, err)
 
 			if authorizeTests != nil {
@@ -83,15 +89,23 @@ func uploadTestServer(t *testing.T, authorizeTests func(r *http.Request), extraT
 
 		require.NoError(t, r.ParseMultipartForm(100000))
 
-		var nValues int // file name, path, remote_url, remote_id, size, md5, sha1, sha256, sha512, upload_duration, gitlab-workhorse-upload for just the upload (no metadata because we are not POSTing a valid zip file)
-		if destination.FIPSEnabled() {
-			nValues = 10
+		nValues := len([]string{
+			"name",
+			"path",
+			"remote_url",
+			"remote_id",
+			"size",
+			"upload_duration",
+			"gitlab-workhorse-upload",
+		})
+
+		if n := len(allowedHashFunctions); n > 0 {
+			nValues += n
 		} else {
-			nValues = 11
+			nValues += len([]string{"md5", "sha1", "sha256", "sha512"}) // Default hash functions
 		}
 
 		require.Len(t, r.MultipartForm.Value, nValues)
-
 		require.Empty(t, r.MultipartForm.File, "multipart form files")
 
 		if extraTests != nil {
@@ -104,7 +118,7 @@ func uploadTestServer(t *testing.T, authorizeTests func(r *http.Request), extraT
 func signedUploadTestServer(t *testing.T, authorizeTests func(r *http.Request), extraTests func(r *http.Request)) *httptest.Server {
 	t.Helper()
 
-	return uploadTestServer(t, authorizeTests, func(r *http.Request) {
+	return uploadTestServer(t, nil, authorizeTests, func(r *http.Request) {
 		expectSignedRequest(t, r)
 
 		if extraTests != nil {
@@ -167,67 +181,80 @@ func TestAcceleratedUpload(t *testing.T) {
 		{"POST", "/api/v4/projects/group%2Fsubgroup%2Fproject/packages/helm/api/stable/charts", true},
 	}
 
-	for _, tt := range tests {
-		t.Run(tt.resource, func(t *testing.T) {
-			ts := uploadTestServer(t,
-				func(r *http.Request) {
-					if r.URL.Path == "/api/v4/internal/workhorse/authorize_upload" {
-						// Nothing to validate: this is a hard coded URL
-						return
-					}
-					resource := strings.TrimRight(tt.resource, "/")
-					// Validate %2F characters haven't been unescaped
-					require.Equal(t, resource+"/authorize", r.URL.String())
-				},
-				func(r *http.Request) {
-					if tt.signedFinalization {
-						expectSignedRequest(t, r)
-					}
-
-					token, err := jwt.ParseWithClaims(r.Header.Get(upload.RewrittenFieldsHeader), &upload.MultipartClaims{}, testhelper.ParseJWT)
-					require.NoError(t, err)
-
-					rewrittenFields := token.Claims.(*upload.MultipartClaims).RewrittenFields
-					if len(rewrittenFields) != 1 || len(rewrittenFields["file"]) == 0 {
-						t.Fatalf("Unexpected rewritten_fields value: %v", rewrittenFields)
-					}
-
-					token, jwtErr := jwt.ParseWithClaims(r.PostFormValue("file.gitlab-workhorse-upload"), &testhelper.UploadClaims{}, testhelper.ParseJWT)
-					require.NoError(t, jwtErr)
-
-					uploadFields := token.Claims.(*testhelper.UploadClaims).Upload
-					require.Contains(t, uploadFields, "name")
-					require.Contains(t, uploadFields, "path")
-					require.Contains(t, uploadFields, "remote_url")
-					require.Contains(t, uploadFields, "remote_id")
-					require.Contains(t, uploadFields, "size")
-					if destination.FIPSEnabled() {
-						require.NotContains(t, uploadFields, "md5")
-					} else {
-						require.Contains(t, uploadFields, "md5")
-					}
-					require.Contains(t, uploadFields, "sha1")
-					require.Contains(t, uploadFields, "sha256")
-					require.Contains(t, uploadFields, "sha512")
-				})
-
-			defer ts.Close()
-			ws := startWorkhorseServer(ts.URL)
-			defer ws.Close()
-
-			reqBody, contentType, err := multipartBodyWithFile()
-			require.NoError(t, err)
-
-			req, err := http.NewRequest(tt.method, ws.URL+tt.resource, reqBody)
-			require.NoError(t, err)
+	allowedHashFunctions := map[string][]string{
+		"default":   nil,
+		"sha2_only": {"sha256", "sha512"},
+	}
 
-			req.Header.Set("Content-Type", contentType)
-			resp, err := http.DefaultClient.Do(req)
-			require.NoError(t, err)
-			require.Equal(t, 200, resp.StatusCode)
+	for _, tt := range tests {
+		for hashSet, hashFunctions := range allowedHashFunctions {
+			t.Run(tt.resource, func(t *testing.T) {
+
+				ts := uploadTestServer(t,
+					hashFunctions,
+					func(r *http.Request) {
+						if r.URL.Path == "/api/v4/internal/workhorse/authorize_upload" {
+							// Nothing to validate: this is a hard coded URL
+							return
+						}
+						resource := strings.TrimRight(tt.resource, "/")
+						// Validate %2F characters haven't been unescaped
+						require.Equal(t, resource+"/authorize", r.URL.String())
+					},
+					func(r *http.Request) {
+						if tt.signedFinalization {
+							expectSignedRequest(t, r)
+						}
+
+						token, err := jwt.ParseWithClaims(r.Header.Get(upload.RewrittenFieldsHeader), &upload.MultipartClaims{}, testhelper.ParseJWT)
+						require.NoError(t, err)
+
+						rewrittenFields := token.Claims.(*upload.MultipartClaims).RewrittenFields
+						if len(rewrittenFields) != 1 || len(rewrittenFields["file"]) == 0 {
+							t.Fatalf("Unexpected rewritten_fields value: %v", rewrittenFields)
+						}
+
+						token, jwtErr := jwt.ParseWithClaims(r.PostFormValue("file.gitlab-workhorse-upload"), &testhelper.UploadClaims{}, testhelper.ParseJWT)
+						require.NoError(t, jwtErr)
+
+						uploadFields := token.Claims.(*testhelper.UploadClaims).Upload
+						require.Contains(t, uploadFields, "name")
+						require.Contains(t, uploadFields, "path")
+						require.Contains(t, uploadFields, "remote_url")
+						require.Contains(t, uploadFields, "remote_id")
+						require.Contains(t, uploadFields, "size")
+
+						if hashSet == "default" {
+							require.Contains(t, uploadFields, "md5")
+							require.Contains(t, uploadFields, "sha1")
+							require.Contains(t, uploadFields, "sha256")
+							require.Contains(t, uploadFields, "sha512")
+						} else {
+							require.NotContains(t, uploadFields, "md5")
+							require.NotContains(t, uploadFields, "sha1")
+							require.Contains(t, uploadFields, "sha256")
+							require.Contains(t, uploadFields, "sha512")
+						}
+					})
+
+				defer ts.Close()
+				ws := startWorkhorseServer(ts.URL)
+				defer ws.Close()
+
+				reqBody, contentType, err := multipartBodyWithFile()
+				require.NoError(t, err)
+
+				req, err := http.NewRequest(tt.method, ws.URL+tt.resource, reqBody)
+				require.NoError(t, err)
+
+				req.Header.Set("Content-Type", contentType)
+				resp, err := http.DefaultClient.Do(req)
+				require.NoError(t, err)
+				require.Equal(t, 200, resp.StatusCode)
 
-			resp.Body.Close()
-		})
+				resp.Body.Close()
+			})
+		}
 	}
 }
 
-- 
GitLab