diff --git a/.gitlab/ci/workhorse.gitlab-ci.yml b/.gitlab/ci/workhorse.gitlab-ci.yml
index 5f58694a7bf84e242c77ace25009b857b0d7a60e..389906dbbffa465158db87ad5622bd9ac8cb67d3 100644
--- a/.gitlab/ci/workhorse.gitlab-ci.yml
+++ b/.gitlab/ci/workhorse.gitlab-ci.yml
@@ -39,7 +39,6 @@ workhorse:test fips:
   extends: .workhorse:test
   image: registry.gitlab.com/gitlab-org/gitlab-omnibus-builder/ubuntu_20.04_fips:4.0.0
   variables:
-    WORKHORSE_TEST_FIPS_ENABLED: 1
     FIPS_MODE: 1
 
 workhorse:test race:
diff --git a/app/uploaders/object_storage.rb b/app/uploaders/object_storage.rb
index e74998ce4a8d7143ab0db20ad5ef26e6c872c76b..1b47400d5e8081492e1ba1903431d1ef8fc48f50 100644
--- a/app/uploaders/object_storage.rb
+++ b/app/uploaders/object_storage.rb
@@ -139,6 +139,7 @@ def workhorse_authorize(has_length:, maximum_size: nil)
             hash[:TempPath] = workhorse_local_upload_path
           end
 
+          hash[:UploadHashFunctions] = %w[sha1 sha256 sha512] if ::Gitlab::FIPS.enabled?
           hash[:MaximumSize] = maximum_size if maximum_size.present?
         end
       end
diff --git a/spec/uploaders/object_storage_spec.rb b/spec/uploaders/object_storage_spec.rb
index a4f6116f7d719afa7679ce10be31b40258def683..5344dbeb5122993d07aec5a2d5160d2579e37ed3 100644
--- a/spec/uploaders/object_storage_spec.rb
+++ b/spec/uploaders/object_storage_spec.rb
@@ -497,6 +497,18 @@ def when_file_is_in_use
 
     subject { uploader_class.workhorse_authorize(has_length: has_length, maximum_size: maximum_size) }
 
+    context 'when FIPS is enabled', :fips_mode do
+      it 'response enables FIPS' do
+        expect(subject[:UploadHashFunctions]).to match_array(%w[sha1 sha256 sha512])
+      end
+    end
+
+    context 'when FIPS is disabled' do
+      it 'response disables FIPS' do
+        expect(subject[:UploadHashFunctions]).to be nil
+      end
+    end
+
     shared_examples 'returns the maximum size given' do
       it "returns temporary path" do
         expect(subject[:MaximumSize]).to eq(maximum_size)
diff --git a/workhorse/internal/api/api.go b/workhorse/internal/api/api.go
index 92e88ae3f70585901f329a2375adf0e3574d4df3..8a7fb191ec49a7eddd1417dcb0215f9803a93e75 100644
--- a/workhorse/internal/api/api.go
+++ b/workhorse/internal/api/api.go
@@ -163,6 +163,8 @@ type Response struct {
 	ProcessLsif bool
 	// The maximum accepted size in bytes of the upload
 	MaximumSize int64
+	// A list of permitted hash functions. If empty, then all available are permitted.
+	UploadHashFunctions []string
 }
 
 type GitalyServer struct {
diff --git a/workhorse/internal/upload/body_uploader_test.go b/workhorse/internal/upload/body_uploader_test.go
index eff3375784560fe7986e1b1d244ab6ba184ef062..837d119e72ea8cc24f0bbc81cb397703ad2e35b9 100644
--- a/workhorse/internal/upload/body_uploader_test.go
+++ b/workhorse/internal/upload/body_uploader_test.go
@@ -92,11 +92,7 @@ func echoProxy(t *testing.T, expectedBodyLength int) http.Handler {
 
 		require.Equal(t, "application/x-www-form-urlencoded", r.Header.Get("Content-Type"), "Wrong Content-Type header")
 
-		if destination.FIPSEnabled() {
-			require.NotContains(t, r.PostForm, "file.md5")
-		} else {
-			require.Contains(t, r.PostForm, "file.md5")
-		}
+		require.Contains(t, r.PostForm, "file.md5")
 		require.Contains(t, r.PostForm, "file.sha1")
 		require.Contains(t, r.PostForm, "file.sha256")
 		require.Contains(t, r.PostForm, "file.sha512")
@@ -123,11 +119,7 @@ func echoProxy(t *testing.T, expectedBodyLength int) http.Handler {
 		require.Contains(t, uploadFields, "remote_url")
 		require.Contains(t, uploadFields, "remote_id")
 		require.Contains(t, uploadFields, "size")
-		if destination.FIPSEnabled() {
-			require.NotContains(t, uploadFields, "md5")
-		} else {
-			require.Contains(t, uploadFields, "md5")
-		}
+		require.Contains(t, uploadFields, "md5")
 		require.Contains(t, uploadFields, "sha1")
 		require.Contains(t, uploadFields, "sha256")
 		require.Contains(t, uploadFields, "sha512")
diff --git a/workhorse/internal/upload/destination/destination.go b/workhorse/internal/upload/destination/destination.go
index 0a7e2493cd6e293389d775b0f5e59491ca884d81..a9fb81540d5de79064bb81156c0a5f1ae5d636e9 100644
--- a/workhorse/internal/upload/destination/destination.go
+++ b/workhorse/internal/upload/destination/destination.go
@@ -121,7 +121,7 @@ func Upload(ctx context.Context, reader io.Reader, size int64, name string, opts
 	}
 	uploadStartTime := time.Now()
 	defer func() { fh.uploadDuration = time.Since(uploadStartTime).Seconds() }()
-	hashes := newMultiHash()
+	hashes := newMultiHash(opts.UploadHashFunctions)
 	reader = io.TeeReader(reader, hashes.Writer)
 
 	var clientMode string
diff --git a/workhorse/internal/upload/destination/destination_test.go b/workhorse/internal/upload/destination/destination_test.go
index 928ffdb9f5a1ce304d22a646c741ea6a3a399c7a..69dd02ca7c2fd3f8dca68012a90a88937f985755 100644
--- a/workhorse/internal/upload/destination/destination_test.go
+++ b/workhorse/internal/upload/destination/destination_test.go
@@ -215,11 +215,7 @@ func TestUpload(t *testing.T) {
 			}
 
 			require.Equal(t, test.ObjectSize, fh.Size)
-			if FIPSEnabled() {
-				require.Empty(t, fh.MD5())
-			} else {
-				require.Equal(t, test.ObjectMD5, fh.MD5())
-			}
+			require.Equal(t, test.ObjectMD5, fh.MD5())
 			require.Equal(t, test.ObjectSHA256, fh.SHA256())
 
 			require.Equal(t, expectedPuts, osStub.PutsCnt(), "ObjectStore PutObject count mismatch")
@@ -508,11 +504,7 @@ func checkFileHandlerWithFields(t *testing.T, fh *FileHandler, fields map[string
 	require.Equal(t, fh.RemoteURL, fields[key("remote_url")])
 	require.Equal(t, fh.RemoteID, fields[key("remote_id")])
 	require.Equal(t, strconv.FormatInt(test.ObjectSize, 10), fields[key("size")])
-	if FIPSEnabled() {
-		require.Empty(t, fields[key("md5")])
-	} else {
-		require.Equal(t, test.ObjectMD5, fields[key("md5")])
-	}
+	require.Equal(t, test.ObjectMD5, fields[key("md5")])
 	require.Equal(t, test.ObjectSHA1, fields[key("sha1")])
 	require.Equal(t, test.ObjectSHA256, fields[key("sha256")])
 	require.Equal(t, test.ObjectSHA512, fields[key("sha512")])
diff --git a/workhorse/internal/upload/destination/multi_hash.go b/workhorse/internal/upload/destination/multi_hash.go
index 8d5bf4424a8f126dd9dc4776260cfcd5ac408493..3f8b0cbd9036d28cefc8dc631594455653f917c4 100644
--- a/workhorse/internal/upload/destination/multi_hash.go
+++ b/workhorse/internal/upload/destination/multi_hash.go
@@ -8,9 +8,6 @@ import (
 	"encoding/hex"
 	"hash"
 	"io"
-	"os"
-
-	"gitlab.com/gitlab-org/labkit/fips"
 )
 
 var hashFactories = map[string](func() hash.Hash){
@@ -20,39 +17,39 @@ var hashFactories = map[string](func() hash.Hash){
 	"sha512": sha512.New,
 }
 
-var fipsHashFactories = map[string](func() hash.Hash){
-	"sha1":   sha1.New,
-	"sha256": sha256.New,
-	"sha512": sha512.New,
-}
-
 func factories() map[string](func() hash.Hash) {
-	if FIPSEnabled() {
-		return fipsHashFactories
-	}
-
 	return hashFactories
 }
 
-func FIPSEnabled() bool {
-	if fips.Enabled() {
+type multiHash struct {
+	io.Writer
+	hashes map[string]hash.Hash
+}
+
+func permittedHashFunction(hashFunctions []string, hash string) bool {
+	if len(hashFunctions) == 0 {
 		return true
 	}
 
-	return os.Getenv("WORKHORSE_TEST_FIPS_ENABLED") == "1"
-}
+	for _, name := range hashFunctions {
+		if name == hash {
+			return true
+		}
+	}
 
-type multiHash struct {
-	io.Writer
-	hashes map[string]hash.Hash
+	return false
 }
 
-func newMultiHash() (m *multiHash) {
+func newMultiHash(hashFunctions []string) (m *multiHash) {
 	m = &multiHash{}
 	m.hashes = make(map[string]hash.Hash)
 
 	var writers []io.Writer
 	for hash, hashFactory := range factories() {
+		if !permittedHashFunction(hashFunctions, hash) {
+			continue
+		}
+
 		writer := hashFactory()
 
 		m.hashes[hash] = writer
diff --git a/workhorse/internal/upload/destination/multi_hash_test.go b/workhorse/internal/upload/destination/multi_hash_test.go
new file mode 100644
index 0000000000000000000000000000000000000000..9a976f5d25dc58a05324037d35b87a763bcd14b5
--- /dev/null
+++ b/workhorse/internal/upload/destination/multi_hash_test.go
@@ -0,0 +1,52 @@
+package destination
+
+import (
+	"sort"
+	"testing"
+
+	"github.com/stretchr/testify/require"
+)
+
+func TestNewMultiHash(t *testing.T) {
+	tests := []struct {
+		name           string
+		allowedHashes  []string
+		expectedHashes []string
+	}{
+		{
+			name:           "default",
+			allowedHashes:  nil,
+			expectedHashes: []string{"md5", "sha1", "sha256", "sha512"},
+		},
+		{
+			name:           "blank",
+			allowedHashes:  []string{},
+			expectedHashes: []string{"md5", "sha1", "sha256", "sha512"},
+		},
+		{
+			name:           "no MD5",
+			allowedHashes:  []string{"sha1", "sha256", "sha512"},
+			expectedHashes: []string{"sha1", "sha256", "sha512"},
+		},
+
+		{
+			name:           "unlisted hash",
+			allowedHashes:  []string{"sha1", "sha256", "sha512", "sha3-256"},
+			expectedHashes: []string{"sha1", "sha256", "sha512"},
+		},
+	}
+
+	for _, test := range tests {
+		mh := newMultiHash(test.allowedHashes)
+
+		require.Equal(t, len(test.expectedHashes), len(mh.hashes))
+
+		var keys []string
+		for key := range mh.hashes {
+			keys = append(keys, key)
+		}
+
+		sort.Strings(keys)
+		require.Equal(t, test.expectedHashes, keys)
+	}
+}
diff --git a/workhorse/internal/upload/destination/upload_opts.go b/workhorse/internal/upload/destination/upload_opts.go
index d81fa10e97c86d427258e99c1edca3147d6a4b99..72efaebc16c7dd9cd17889af79aedd509aa76dfa 100644
--- a/workhorse/internal/upload/destination/upload_opts.go
+++ b/workhorse/internal/upload/destination/upload_opts.go
@@ -63,6 +63,8 @@ type UploadOpts struct {
 	PresignedCompleteMultipart string
 	// PresignedAbortMultipart is a presigned URL for AbortMultipartUpload
 	PresignedAbortMultipart string
+	// UploadHashFunctions contains a list of allowed hash functions (md5, sha1, etc.)
+	UploadHashFunctions []string
 }
 
 // UseWorkhorseClientEnabled checks if the options require direct access to object storage
@@ -92,17 +94,18 @@ func GetOpts(apiResponse *api.Response) (*UploadOpts, error) {
 	}
 
 	opts := UploadOpts{
-		LocalTempPath:      apiResponse.TempPath,
-		RemoteID:           apiResponse.RemoteObject.ID,
-		RemoteURL:          apiResponse.RemoteObject.GetURL,
-		PresignedPut:       apiResponse.RemoteObject.StoreURL,
-		PresignedDelete:    apiResponse.RemoteObject.DeleteURL,
-		SkipDelete:         apiResponse.RemoteObject.SkipDelete,
-		PutHeaders:         apiResponse.RemoteObject.PutHeaders,
-		UseWorkhorseClient: apiResponse.RemoteObject.UseWorkhorseClient,
-		RemoteTempObjectID: apiResponse.RemoteObject.RemoteTempObjectID,
-		Deadline:           time.Now().Add(timeout),
-		MaximumSize:        apiResponse.MaximumSize,
+		LocalTempPath:       apiResponse.TempPath,
+		RemoteID:            apiResponse.RemoteObject.ID,
+		RemoteURL:           apiResponse.RemoteObject.GetURL,
+		PresignedPut:        apiResponse.RemoteObject.StoreURL,
+		PresignedDelete:     apiResponse.RemoteObject.DeleteURL,
+		SkipDelete:          apiResponse.RemoteObject.SkipDelete,
+		PutHeaders:          apiResponse.RemoteObject.PutHeaders,
+		UseWorkhorseClient:  apiResponse.RemoteObject.UseWorkhorseClient,
+		RemoteTempObjectID:  apiResponse.RemoteObject.RemoteTempObjectID,
+		Deadline:            time.Now().Add(timeout),
+		MaximumSize:         apiResponse.MaximumSize,
+		UploadHashFunctions: apiResponse.UploadHashFunctions,
 	}
 
 	if opts.LocalTempPath != "" && opts.RemoteID != "" {
diff --git a/workhorse/internal/upload/uploads_test.go b/workhorse/internal/upload/uploads_test.go
index cc786079e36638d33264920158952320fbc2afb0..69baa2dab6e079b0c023cf94b58810882b956695 100644
--- a/workhorse/internal/upload/uploads_test.go
+++ b/workhorse/internal/upload/uploads_test.go
@@ -24,7 +24,6 @@ import (
 	"gitlab.com/gitlab-org/gitlab/workhorse/internal/helper"
 	"gitlab.com/gitlab-org/gitlab/workhorse/internal/proxy"
 	"gitlab.com/gitlab-org/gitlab/workhorse/internal/testhelper"
-	"gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination"
 	"gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination/objectstore/test"
 	"gitlab.com/gitlab-org/gitlab/workhorse/internal/upstream/roundtripper"
 )
@@ -111,13 +110,7 @@ func TestUploadHandlerRewritingMultiPartData(t *testing.T) {
 
 		expectedLen := 12
 
-		if destination.FIPSEnabled() {
-			expectedLen--
-			require.Empty(t, r.FormValue("file.md5"), "file hash md5")
-		} else {
-			require.Equal(t, "098f6bcd4621d373cade4e832627b4f6", r.FormValue("file.md5"), "file hash md5")
-		}
-
+		require.Equal(t, "098f6bcd4621d373cade4e832627b4f6", r.FormValue("file.md5"), "file hash md5")
 		require.Len(t, r.MultipartForm.Value, expectedLen, "multipart form values")
 
 		w.WriteHeader(202)
diff --git a/workhorse/upload_test.go b/workhorse/upload_test.go
index 22abed25928a830fa45f7f0ba433f7a74fdb1878..333301091c7299743dee228242c2715636ef209d 100644
--- a/workhorse/upload_test.go
+++ b/workhorse/upload_test.go
@@ -20,7 +20,6 @@ import (
 	"gitlab.com/gitlab-org/gitlab/workhorse/internal/secret"
 	"gitlab.com/gitlab-org/gitlab/workhorse/internal/testhelper"
 	"gitlab.com/gitlab-org/gitlab/workhorse/internal/upload"
-	"gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination"
 )
 
 type uploadArtifactsFunction func(url, contentType string, body io.Reader) (*http.Response, string, error)
@@ -66,13 +65,20 @@ func expectSignedRequest(t *testing.T, r *http.Request) {
 	require.NoError(t, err)
 }
 
-func uploadTestServer(t *testing.T, authorizeTests func(r *http.Request), extraTests func(r *http.Request)) *httptest.Server {
+func uploadTestServer(t *testing.T, allowedHashFunctions []string, authorizeTests func(r *http.Request), extraTests func(r *http.Request)) *httptest.Server {
 	return testhelper.TestServerWithHandler(regexp.MustCompile(`.`), func(w http.ResponseWriter, r *http.Request) {
 		if strings.HasSuffix(r.URL.Path, "/authorize") || r.URL.Path == "/api/v4/internal/workhorse/authorize_upload" {
 			expectSignedRequest(t, r)
 
 			w.Header().Set("Content-Type", api.ResponseContentType)
-			_, err := fmt.Fprintf(w, `{"TempPath":"%s"}`, scratchDir)
+			var err error
+
+			if len(allowedHashFunctions) == 0 {
+				_, err = fmt.Fprintf(w, `{"TempPath":"%s"}`, scratchDir)
+			} else {
+				_, err = fmt.Fprintf(w, `{"TempPath":"%s", "UploadHashFunctions": ["%s"]}`, scratchDir, strings.Join(allowedHashFunctions, `","`))
+			}
+
 			require.NoError(t, err)
 
 			if authorizeTests != nil {
@@ -83,15 +89,23 @@ func uploadTestServer(t *testing.T, authorizeTests func(r *http.Request), extraT
 
 		require.NoError(t, r.ParseMultipartForm(100000))
 
-		var nValues int // file name, path, remote_url, remote_id, size, md5, sha1, sha256, sha512, upload_duration, gitlab-workhorse-upload for just the upload (no metadata because we are not POSTing a valid zip file)
-		if destination.FIPSEnabled() {
-			nValues = 10
+		nValues := len([]string{
+			"name",
+			"path",
+			"remote_url",
+			"remote_id",
+			"size",
+			"upload_duration",
+			"gitlab-workhorse-upload",
+		})
+
+		if n := len(allowedHashFunctions); n > 0 {
+			nValues += n
 		} else {
-			nValues = 11
+			nValues += len([]string{"md5", "sha1", "sha256", "sha512"}) // Default hash functions
 		}
 
 		require.Len(t, r.MultipartForm.Value, nValues)
-
 		require.Empty(t, r.MultipartForm.File, "multipart form files")
 
 		if extraTests != nil {
@@ -104,7 +118,7 @@ func uploadTestServer(t *testing.T, authorizeTests func(r *http.Request), extraT
 func signedUploadTestServer(t *testing.T, authorizeTests func(r *http.Request), extraTests func(r *http.Request)) *httptest.Server {
 	t.Helper()
 
-	return uploadTestServer(t, authorizeTests, func(r *http.Request) {
+	return uploadTestServer(t, nil, authorizeTests, func(r *http.Request) {
 		expectSignedRequest(t, r)
 
 		if extraTests != nil {
@@ -167,67 +181,80 @@ func TestAcceleratedUpload(t *testing.T) {
 		{"POST", "/api/v4/projects/group%2Fsubgroup%2Fproject/packages/helm/api/stable/charts", true},
 	}
 
-	for _, tt := range tests {
-		t.Run(tt.resource, func(t *testing.T) {
-			ts := uploadTestServer(t,
-				func(r *http.Request) {
-					if r.URL.Path == "/api/v4/internal/workhorse/authorize_upload" {
-						// Nothing to validate: this is a hard coded URL
-						return
-					}
-					resource := strings.TrimRight(tt.resource, "/")
-					// Validate %2F characters haven't been unescaped
-					require.Equal(t, resource+"/authorize", r.URL.String())
-				},
-				func(r *http.Request) {
-					if tt.signedFinalization {
-						expectSignedRequest(t, r)
-					}
-
-					token, err := jwt.ParseWithClaims(r.Header.Get(upload.RewrittenFieldsHeader), &upload.MultipartClaims{}, testhelper.ParseJWT)
-					require.NoError(t, err)
-
-					rewrittenFields := token.Claims.(*upload.MultipartClaims).RewrittenFields
-					if len(rewrittenFields) != 1 || len(rewrittenFields["file"]) == 0 {
-						t.Fatalf("Unexpected rewritten_fields value: %v", rewrittenFields)
-					}
-
-					token, jwtErr := jwt.ParseWithClaims(r.PostFormValue("file.gitlab-workhorse-upload"), &testhelper.UploadClaims{}, testhelper.ParseJWT)
-					require.NoError(t, jwtErr)
-
-					uploadFields := token.Claims.(*testhelper.UploadClaims).Upload
-					require.Contains(t, uploadFields, "name")
-					require.Contains(t, uploadFields, "path")
-					require.Contains(t, uploadFields, "remote_url")
-					require.Contains(t, uploadFields, "remote_id")
-					require.Contains(t, uploadFields, "size")
-					if destination.FIPSEnabled() {
-						require.NotContains(t, uploadFields, "md5")
-					} else {
-						require.Contains(t, uploadFields, "md5")
-					}
-					require.Contains(t, uploadFields, "sha1")
-					require.Contains(t, uploadFields, "sha256")
-					require.Contains(t, uploadFields, "sha512")
-				})
-
-			defer ts.Close()
-			ws := startWorkhorseServer(ts.URL)
-			defer ws.Close()
-
-			reqBody, contentType, err := multipartBodyWithFile()
-			require.NoError(t, err)
-
-			req, err := http.NewRequest(tt.method, ws.URL+tt.resource, reqBody)
-			require.NoError(t, err)
+	allowedHashFunctions := map[string][]string{
+		"default":   nil,
+		"sha2_only": {"sha256", "sha512"},
+	}
 
-			req.Header.Set("Content-Type", contentType)
-			resp, err := http.DefaultClient.Do(req)
-			require.NoError(t, err)
-			require.Equal(t, 200, resp.StatusCode)
+	for _, tt := range tests {
+		for hashSet, hashFunctions := range allowedHashFunctions {
+			t.Run(tt.resource, func(t *testing.T) {
+
+				ts := uploadTestServer(t,
+					hashFunctions,
+					func(r *http.Request) {
+						if r.URL.Path == "/api/v4/internal/workhorse/authorize_upload" {
+							// Nothing to validate: this is a hard coded URL
+							return
+						}
+						resource := strings.TrimRight(tt.resource, "/")
+						// Validate %2F characters haven't been unescaped
+						require.Equal(t, resource+"/authorize", r.URL.String())
+					},
+					func(r *http.Request) {
+						if tt.signedFinalization {
+							expectSignedRequest(t, r)
+						}
+
+						token, err := jwt.ParseWithClaims(r.Header.Get(upload.RewrittenFieldsHeader), &upload.MultipartClaims{}, testhelper.ParseJWT)
+						require.NoError(t, err)
+
+						rewrittenFields := token.Claims.(*upload.MultipartClaims).RewrittenFields
+						if len(rewrittenFields) != 1 || len(rewrittenFields["file"]) == 0 {
+							t.Fatalf("Unexpected rewritten_fields value: %v", rewrittenFields)
+						}
+
+						token, jwtErr := jwt.ParseWithClaims(r.PostFormValue("file.gitlab-workhorse-upload"), &testhelper.UploadClaims{}, testhelper.ParseJWT)
+						require.NoError(t, jwtErr)
+
+						uploadFields := token.Claims.(*testhelper.UploadClaims).Upload
+						require.Contains(t, uploadFields, "name")
+						require.Contains(t, uploadFields, "path")
+						require.Contains(t, uploadFields, "remote_url")
+						require.Contains(t, uploadFields, "remote_id")
+						require.Contains(t, uploadFields, "size")
+
+						if hashSet == "default" {
+							require.Contains(t, uploadFields, "md5")
+							require.Contains(t, uploadFields, "sha1")
+							require.Contains(t, uploadFields, "sha256")
+							require.Contains(t, uploadFields, "sha512")
+						} else {
+							require.NotContains(t, uploadFields, "md5")
+							require.NotContains(t, uploadFields, "sha1")
+							require.Contains(t, uploadFields, "sha256")
+							require.Contains(t, uploadFields, "sha512")
+						}
+					})
+
+				defer ts.Close()
+				ws := startWorkhorseServer(ts.URL)
+				defer ws.Close()
+
+				reqBody, contentType, err := multipartBodyWithFile()
+				require.NoError(t, err)
+
+				req, err := http.NewRequest(tt.method, ws.URL+tt.resource, reqBody)
+				require.NoError(t, err)
+
+				req.Header.Set("Content-Type", contentType)
+				resp, err := http.DefaultClient.Do(req)
+				require.NoError(t, err)
+				require.Equal(t, 200, resp.StatusCode)
 
-			resp.Body.Close()
-		})
+				resp.Body.Close()
+			})
+		}
 	}
 }