diff --git a/GITLAB_WORKHORSE_VERSION b/GITLAB_WORKHORSE_VERSION
index 5239b90e3c2ce3c4e7a7416cf9c264ad1ed6d981..57b9fc187c0177df8efbe33f380e9805567c89b5 100644
--- a/GITLAB_WORKHORSE_VERSION
+++ b/GITLAB_WORKHORSE_VERSION
@@ -1 +1 @@
-8.30.0
+8.30.1
diff --git a/changelogs/unreleased/security-fix-issue-213139.yml b/changelogs/unreleased/security-fix-issue-213139.yml
new file mode 100644
index 0000000000000000000000000000000000000000..0a78d6bed6347942527a2f89f1a91ccbfbc99ec5
--- /dev/null
+++ b/changelogs/unreleased/security-fix-issue-213139.yml
@@ -0,0 +1,5 @@
+---
+title: Prevent filename bypass on artifact upload
+merge_request:
+author:
+type: security
diff --git a/ee/spec/lib/gitlab/middleware/multipart_spec.rb b/ee/spec/lib/gitlab/middleware/multipart_spec.rb
index fcdd037b089017b0a2b8b2bc6c860c6c839a557c..4e1465e4890a9d36e080ac2b46a2eb204be54a44 100644
--- a/ee/spec/lib/gitlab/middleware/multipart_spec.rb
+++ b/ee/spec/lib/gitlab/middleware/multipart_spec.rb
@@ -27,12 +27,18 @@
         end
       end
 
-      RSpec.shared_examples 'not allowing the multipart upload' do
+      RSpec.shared_examples 'not allowing the multipart upload when package upload path is used' do
         it 'does not allow files to be uploaded' do
           with_tmp_dir('tmp/uploads', storage_path) do |dir, env|
-            allow(Packages::PackageFileUploader).to receive(:root).and_return(File.join(dir, storage_path))
+            # with_tmp_dir set the same workhorse_upload_path for all Uploaders,
+            # so we have to prevent JobArtifactUploader to allow the tested path
+            allow(JobArtifactUploader).to receive(:workhorse_upload_path).and_return(Dir.tmpdir)
+
+            status, headers, body = middleware.call(env)
 
-            expect { middleware.call(env) }.to raise_error(UploadedFile::InvalidPathError)
+            expect(status).to eq(400)
+            expect(headers).to eq({ 'Content-Type' => 'text/plain' })
+            expect(body).to start_with('insecure path used')
           end
         end
       end
@@ -50,7 +56,7 @@
           expect(::Packages::PackageFileUploader).not_to receive(:workhorse_upload_path)
         end
 
-        it_behaves_like 'not allowing the multipart upload'
+        it_behaves_like 'not allowing the multipart upload when package upload path is used'
       end
 
       where(:object_storage_enabled, :direct_upload_enabled, :example_name) do
diff --git a/lib/api/runner.rb b/lib/api/runner.rb
index f97e28de628ddfa30b5e013eb0e81540cbfd9678..9095aba7340f27aae485a32016683b101782959e 100644
--- a/lib/api/runner.rb
+++ b/lib/api/runner.rb
@@ -251,21 +251,14 @@ class Runner < Grape::API
       end
       params do
         requires :id, type: Integer, desc: %q(Job's ID)
+        requires :file, type: ::API::Validations::Types::WorkhorseFile, desc: %(The artifact file to store (generated by Multipart middleware))
         optional :token, type: String, desc: %q(Job's authentication token)
         optional :expire_in, type: String, desc: %q(Specify when artifacts should expire)
         optional :artifact_type, type: String, desc: %q(The type of artifact),
                                  default: 'archive', values: Ci::JobArtifact.file_types.keys
         optional :artifact_format, type: String, desc: %q(The format of artifact),
                                    default: 'zip', values: Ci::JobArtifact.file_formats.keys
-        optional 'file.path', type: String, desc: %q(path to locally stored body (generated by Workhorse))
-        optional 'file.name', type: String, desc: %q(real filename as send in Content-Disposition (generated by Workhorse))
-        optional 'file.type', type: String, desc: %q(real content type as send in Content-Type (generated by Workhorse))
-        optional 'file.size', type: Integer, desc: %q(real size of file (generated by Workhorse))
-        optional 'file.sha256', type: String, desc: %q(sha256 checksum of the file (generated by Workhorse))
-        optional 'metadata.path', type: String, desc: %q(path to locally stored body (generated by Workhorse))
-        optional 'metadata.name', type: String, desc: %q(filename (generated by Workhorse))
-        optional 'metadata.size', type: Integer, desc: %q(real size of metadata (generated by Workhorse))
-        optional 'metadata.sha256', type: String, desc: %q(sha256 checksum of metadata (generated by Workhorse))
+        optional :metadata, type: ::API::Validations::Types::WorkhorseFile, desc: %(The artifact metadata to store (generated by Multipart middleware))
       end
       post '/:id/artifacts' do
         not_allowed! unless Gitlab.config.artifacts.enabled
@@ -274,10 +267,9 @@ class Runner < Grape::API
         job = authenticate_job!
         forbidden!('Job is not running!') unless job.running?
 
-        artifacts = UploadedFile.from_params(params, :file, JobArtifactUploader.workhorse_local_upload_path)
-        metadata = UploadedFile.from_params(params, :metadata, JobArtifactUploader.workhorse_local_upload_path)
+        artifacts = params[:file]
+        metadata = params[:metadata]
 
-        bad_request!('Missing artifacts file!') unless artifacts
         file_too_large! unless artifacts.size < max_artifacts_size(job)
 
         result = Ci::CreateJobArtifactsService.new(job.project).execute(job, artifacts, params, metadata_file: metadata)
diff --git a/lib/gitlab/middleware/multipart.rb b/lib/gitlab/middleware/multipart.rb
index cb4213d23a4afbebd2b2756d3d3a8918733b4206..c82c05e73199dd32b5ae67753bafe6d8fcd44089 100644
--- a/lib/gitlab/middleware/multipart.rb
+++ b/lib/gitlab/middleware/multipart.rb
@@ -107,6 +107,7 @@ def allowed_paths
           [
             ::FileUploader.root,
             Gitlab.config.uploads.storage_path,
+            JobArtifactUploader.workhorse_upload_path,
             File.join(Rails.root, 'public/uploads/tmp')
           ]
         end
@@ -125,6 +126,8 @@ def call(env)
         Handler.new(env, message).with_open_files do
           @app.call(env)
         end
+      rescue UploadedFile::InvalidPathError => e
+        [400, { 'Content-Type' => 'text/plain' }, e.message]
       end
     end
   end
diff --git a/spec/lib/gitlab/middleware/multipart_spec.rb b/spec/lib/gitlab/middleware/multipart_spec.rb
index 1d3ad3afb5682801c01de5e582a4894e3d746a3f..ec153e25d44d53bf1b178514e255f08b613e5333 100644
--- a/spec/lib/gitlab/middleware/multipart_spec.rb
+++ b/spec/lib/gitlab/middleware/multipart_spec.rb
@@ -89,6 +89,17 @@ def expect_uploaded_file(tempfile, path, remote: false)
     end
   end
 
+  it 'allows files in the job artifact upload path' do
+    with_tmp_dir('artifacts') do |dir, env|
+      expect(JobArtifactUploader).to receive(:workhorse_upload_path).and_return(File.join(dir, 'artifacts'))
+      expect(app).to receive(:call) do |env|
+        expect(get_params(env)['file']).to be_a(::UploadedFile)
+      end
+
+      middleware.call(env)
+    end
+  end
+
   it 'allows symlinks for uploads dir' do
     Tempfile.open('two-levels') do |tempfile|
       symlinked_dir = '/some/dir/uploads'
diff --git a/spec/requests/api/runner_spec.rb b/spec/requests/api/runner_spec.rb
index a67c1a48ad40ab00b5997a43a39686c96c384f17..bc2da8a2b9afbe0f3a78bed0f1d3d24c1f25c031 100644
--- a/spec/requests/api/runner_spec.rb
+++ b/spec/requests/api/runner_spec.rb
@@ -1422,8 +1422,8 @@ def force_patch_the_trace
 
     describe 'artifacts' do
       let(:job) { create(:ci_build, :pending, user: user, project: project, pipeline: pipeline, runner_id: runner.id) }
-      let(:jwt_token) { JWT.encode({ 'iss' => 'gitlab-workhorse' }, Gitlab::Workhorse.secret, 'HS256') }
-      let(:headers) { { 'GitLab-Workhorse' => '1.0', Gitlab::Workhorse::INTERNAL_API_REQUEST_HEADER => jwt_token } }
+      let(:jwt) { JWT.encode({ 'iss' => 'gitlab-workhorse' }, Gitlab::Workhorse.secret, 'HS256') }
+      let(:headers) { { 'GitLab-Workhorse' => '1.0', Gitlab::Workhorse::INTERNAL_API_REQUEST_HEADER => jwt } }
       let(:headers_with_token) { headers.merge(API::Helpers::Runner::JOB_TOKEN_HEADER => job.token) }
       let(:file_upload) { fixture_file_upload('spec/fixtures/banana_sample.gif', 'image/gif') }
       let(:file_upload2) { fixture_file_upload('spec/fixtures/dk.png', 'image/gif') }
@@ -1703,12 +1703,12 @@ def authorize_artifacts_with_token_in_headers(params = {}, request_headers = hea
             it 'fails to post artifacts without GitLab-Workhorse' do
               post api("/jobs/#{job.id}/artifacts"), params: { token: job.token }, headers: {}
 
-              expect(response).to have_gitlab_http_status(:forbidden)
+              expect(response).to have_gitlab_http_status(:bad_request)
             end
           end
 
           context 'Is missing GitLab Workhorse token headers' do
-            let(:jwt_token) { JWT.encode({ 'iss' => 'invalid-header' }, Gitlab::Workhorse.secret, 'HS256') }
+            let(:jwt) { JWT.encode({ 'iss' => 'invalid-header' }, Gitlab::Workhorse.secret, 'HS256') }
 
             it 'fails to post artifacts without GitLab-Workhorse' do
               expect(Gitlab::ErrorTracking).to receive(:track_exception).once
@@ -1722,15 +1722,14 @@ def authorize_artifacts_with_token_in_headers(params = {}, request_headers = hea
           context 'when setting an expire date' do
             let(:default_artifacts_expire_in) {}
             let(:post_data) do
-              { 'file.path' => file_upload.path,
-                'file.name' => file_upload.original_filename,
-                'expire_in' => expire_in }
+              { file: file_upload,
+                expire_in: expire_in }
             end
 
             before do
               stub_application_setting(default_artifacts_expire_in: default_artifacts_expire_in)
 
-              post(api("/jobs/#{job.id}/artifacts"), params: post_data, headers: headers_with_token)
+              upload_artifacts(file_upload, headers_with_token, post_data)
             end
 
             context 'when an expire_in is given' do
@@ -1783,20 +1782,22 @@ def authorize_artifacts_with_token_in_headers(params = {}, request_headers = hea
             let(:stored_artifacts_size) { job.reload.artifacts_size }
             let(:stored_artifacts_sha256) { job.reload.job_artifacts_archive.file_sha256 }
             let(:stored_metadata_sha256) { job.reload.job_artifacts_metadata.file_sha256 }
+            let(:file_keys) { post_data.keys }
+            let(:send_rewritten_field) { true }
 
             before do
-              post(api("/jobs/#{job.id}/artifacts"), params: post_data, headers: headers_with_token)
+              workhorse_finalize_with_multiple_files(
+                api("/jobs/#{job.id}/artifacts"),
+                method: :post,
+                file_keys: file_keys,
+                params: post_data,
+                headers: headers_with_token,
+                send_rewritten_field: send_rewritten_field
+              )
             end
 
             context 'when posts data accelerated by workhorse is correct' do
-              let(:post_data) do
-                { 'file.path' => artifacts.path,
-                  'file.name' => artifacts.original_filename,
-                  'file.sha256' => artifacts_sha256,
-                  'metadata.path' => metadata.path,
-                  'metadata.name' => metadata.original_filename,
-                  'metadata.sha256' => metadata_sha256 }
-              end
+              let(:post_data) { { file: artifacts, metadata: metadata } }
 
               it 'stores artifacts and artifacts metadata' do
                 expect(response).to have_gitlab_http_status(:created)
@@ -1808,9 +1809,30 @@ def authorize_artifacts_with_token_in_headers(params = {}, request_headers = hea
               end
             end
 
+            context 'with a malicious file.path param' do
+              let(:post_data) { {} }
+              let(:tmp_file) { Tempfile.new('crafted.file.path') }
+              let(:url) { "/jobs/#{job.id}/artifacts?file.path=#{tmp_file.path}" }
+
+              it 'rejects the request' do
+                expect(response).to have_gitlab_http_status(:bad_request)
+                expect(stored_artifacts_size).to be_nil
+              end
+            end
+
+            context 'when workhorse header is missing' do
+              let(:post_data) { { file: artifacts, metadata: metadata } }
+              let(:send_rewritten_field) { false }
+
+              it 'rejects the request' do
+                expect(response).to have_gitlab_http_status(:bad_request)
+                expect(stored_artifacts_size).to be_nil
+              end
+            end
+
             context 'when there is no artifacts file in post data' do
               let(:post_data) do
-                { 'metadata' => metadata }
+                { metadata: metadata }
               end
 
               it 'is expected to respond with bad request' do
@@ -2053,7 +2075,8 @@ def upload_artifacts(file, headers = {}, params = {})
             method: :post,
             file_key: :file,
             params: params.merge(file: file),
-            headers: headers
+            headers: headers,
+            send_rewritten_field: true
           )
         end
       end
diff --git a/spec/support/helpers/workhorse_helpers.rb b/spec/support/helpers/workhorse_helpers.rb
index de232da3c8cfdf01597c3409fd0a632c3e13db68..53b36b3dd457c5f2a225ea78b90d2a4ea4422933 100644
--- a/spec/support/helpers/workhorse_helpers.rb
+++ b/spec/support/helpers/workhorse_helpers.rb
@@ -33,22 +33,36 @@ def workhorse_post_with_file(url, file_key:, params:)
   # workhorse_finalize will transform file_key inside params as if it was the finalize call of an inline object storage upload.
   # note that based on the content of the params it can simulate a disc acceleration or an object storage upload
   def workhorse_finalize(url, method: :post, file_key:, params:, headers: {}, send_rewritten_field: false)
-    workhorse_request_with_file(method, url,
-                                file_key: file_key,
-                                params: params,
-                                extra_headers: headers,
-                                send_rewritten_field: send_rewritten_field
+    workhorse_finalize_with_multiple_files(url, method: method, file_keys: file_key, params: params, headers: headers, send_rewritten_field: send_rewritten_field)
+  end
+
+  def workhorse_finalize_with_multiple_files(url, method: :post, file_keys:, params:, headers: {}, send_rewritten_field: false)
+    workhorse_request_with_multiple_files(method, url,
+                                          file_keys: file_keys,
+                                          params: params,
+                                          extra_headers: headers,
+                                          send_rewritten_field: send_rewritten_field
     )
   end
 
   def workhorse_request_with_file(method, url, file_key:, params:, env: {}, extra_headers: {}, send_rewritten_field:)
+    workhorse_request_with_multiple_files(method, url, file_keys: file_key, params: params, env: env, extra_headers: extra_headers, send_rewritten_field: send_rewritten_field)
+  end
+
+  def workhorse_request_with_multiple_files(method, url, file_keys:, params:, env: {}, extra_headers: {}, send_rewritten_field:)
     workhorse_params = params.dup
-    file = workhorse_params.delete(file_key)
 
-    workhorse_params = workhorse_disk_accelerated_file_params(file_key, file).merge(workhorse_params)
+    file_keys = Array(file_keys)
+    rewritten_fields = {}
+
+    file_keys.each do |key|
+      file = workhorse_params.delete(key)
+      rewritten_fields[key] = file.path if file
+      workhorse_params = workhorse_disk_accelerated_file_params(key, file).merge(workhorse_params)
+    end
 
     headers = if send_rewritten_field
-                workhorse_rewritten_fields_header(file_key => file.path)
+                workhorse_rewritten_fields_header(rewritten_fields)
               else
                 {}
               end
@@ -75,7 +89,11 @@ def workhorse_disk_accelerated_file_params(key, file)
       "#{key}.name" => file.original_filename,
       "#{key}.size" => file.size
     }.tap do |params|
-      params["#{key}.path"] = file.path if file.path
+      if file.path
+        params["#{key}.path"] = file.path
+        params["#{key}.sha256"] = Digest::SHA256.file(file.path).hexdigest
+      end
+
       params["#{key}.remote_id"] = file.remote_id if file.respond_to?(:remote_id) && file.remote_id.present?
     end
   end