diff --git a/CHANGELOG b/CHANGELOG
index 3006ff4049da2d4cbcb825fc4b0b9ace44a88546..d58864442b375cbbe37f4fc45cb6b0786a2b9768 100644
--- a/CHANGELOG
+++ b/CHANGELOG
@@ -5,6 +5,7 @@ v 7.4.0
   - Refactor test coverage tools usage. Use SIMPLECOV=true to generate it locally
   - Increase unicorn timeout to 60 seconds
   - Sort search autocomplete projects by stars count so most popular go first
+  - Support for backup uploads to remote storage
 
 v 7.3.1
   - Fix ref parsing in Gitlab::GitAccess
diff --git a/Gemfile b/Gemfile
index 9686bf9610f98c7c4b03e08e7e0f0c59eedc641c..d4cca8c23c6218cf26868200b3be936a11b92cef 100644
--- a/Gemfile
+++ b/Gemfile
@@ -70,8 +70,8 @@ gem "carrierwave"
 gem 'dropzonejs-rails'
 
 # for aws storage
-gem "fog", "~> 1.14", group: :aws
-gem "unf", group: :aws
+gem "fog", "~> 1.14"
+gem "unf"
 
 # Authorization
 gem "six"
diff --git a/config/gitlab.yml.example b/config/gitlab.yml.example
index 3092ebf345049e1600def2f53ff2a55e14254f98..ad1db4b4f433774610952778f27ce61aa441b3db 100644
--- a/config/gitlab.yml.example
+++ b/config/gitlab.yml.example
@@ -212,6 +212,14 @@ production: &base
   backup:
     path: "tmp/backups"   # Relative paths are relative to Rails.root (default: tmp/backups/)
     # keep_time: 604800   # default: 0 (forever) (in seconds)
+    # upload:
+    #   # Fog storage connection settings, see http://fog.io/storage/ .
+    #   connection:
+    #     provider: AWS
+    #     aws_access_key_id: AKIAKIAKI
+    #     aws_secret_access_key: 'secret123'
+    #   # The remote 'directory' to store your backups. For S3, this would be the bucket name.
+    #   remote_directory: 'my.s3.bucket'
 
   ## GitLab Shell settings
   gitlab_shell:
diff --git a/config/initializers/1_settings.rb b/config/initializers/1_settings.rb
index 5b7e69fbc6af909432e0abc5df5faf278506313a..caf4ef25cdd503e7a826039f30ddf9a9a29d17c4 100644
--- a/config/initializers/1_settings.rb
+++ b/config/initializers/1_settings.rb
@@ -129,6 +129,11 @@ def verify_constant(modul, current, default)
 Settings['backup'] ||= Settingslogic.new({})
 Settings.backup['keep_time']  ||= 0
 Settings.backup['path']         = File.expand_path(Settings.backup['path'] || "tmp/backups/", Rails.root)
+Settings.backup['upload'] ||= Settingslogic.new({'remote_directory' => nil, 'connection' => nil})
+# Convert upload connection settings to use symbol keys, to make Fog happy
+if Settings.backup['upload']['connection']
+  Settings.backup['upload']['connection'] = Hash[Settings.backup['upload']['connection'].map { |k, v| [k.to_sym, v] }]
+end
 
 #
 # Git
diff --git a/doc/raketasks/backup_restore.md b/doc/raketasks/backup_restore.md
index beb2c33a67b494480e7900f672fa6a531b10fb98..06ec88c3d5868614ddc6487ce2e0879731976522 100644
--- a/doc/raketasks/backup_restore.md
+++ b/doc/raketasks/backup_restore.md
@@ -46,6 +46,105 @@ Deleting tmp directories...[DONE]
 Deleting old backups... [SKIPPING]
 ```
 
+## Upload backups to remote (cloud) storage
+
+Starting with GitLab 7.4 you can let the backup script upload the '.tar' file
+it creates to cloud storage using [Fog](http://fog.io/storage/). In the example
+below we use Amazon S3 for storage, but Fog also lets you use other storage
+backends; see http://fog.io/storage/ .
+
+For omnibus packages:
+
+```ruby
+gitlab_rails['backup_upload_connection'] = {
+  'provider' => 'AWS',
+  'aws_access_key_id' => 'AKIAKIAKI',
+  'aws_secret_access_key' => 'secret123'
+}
+gitlab_rails['backup_upload_remote_directory'] = 'my.s3.bucket'
+```
+
+For installations from source:
+
+```yaml
+  backup:
+    # snip
+    upload:
+      # Fog storage connection settings, see http://fog.io/storage/ .
+      connection:
+        provider: AWS
+        aws_access_key_id: AKIAKIAKI
+        aws_secret_access_key: 'secret123'
+      # The remote 'directory' to store your backups. For S3, this would be the bucket name.
+      remote_directory: 'my.s3.bucket'
+```
+
+If you are uploading your backups to S3 you will probably want to create a new
+IAM user with restricted access rights. To give the upload user access only for
+uploading backups create the following three profiles, replacing `my.s3.bucket`
+with the name of your bucket:
+
+```json
+{
+  "Version": "2014-09-29",
+  "Statement": [
+    {
+      "Sid": "Stmt1411994999",
+      "Effect": "Allow",
+      "Action": [
+        "s3:AbortMultipartUpload",
+        "s3:GetBucketAcl",
+        "s3:GetBucketLocation",
+        "s3:GetObject",
+        "s3:GetObjectAcl",
+        "s3:ListMultipartUploadParts",
+        "s3:PutObject",
+        "s3:PutObjectAcl"
+      ],
+      "Resource": [
+        "arn:aws:s3:::my.s3.bucket/*"
+      ]
+    }
+  ]
+}
+```
+
+```json
+{
+  "Version": "2014-09-29",
+  "Statement": [
+    {
+      "Sid": "Stmt1411995081",
+      "Effect": "Allow",
+      "Action": [
+        "s3:ListAllMyBuckets", "s3:GetBucketLocation"
+      ],
+      "Resource": [
+        "*"
+      ]
+    }
+  ]
+}
+```
+
+```json
+{
+  "Version": "2014-09-29",
+  "Statement": [
+    {
+      "Sid": "Stmt1411995608",
+      "Effect": "Allow",
+      "Action": [
+        "s3:ListBucket"
+      ],
+      "Resource": [
+        "arn:aws:s3:::my.s3.bucket"
+      ]
+    }
+  ]
+}
+```
+
 ## Storing configuration files
 
 Please be informed that a backup does not store your configuration files.
diff --git a/lib/backup/manager.rb b/lib/backup/manager.rb
index 28e323fe30d148f04b70a9ac7f6a0e68aa1818f3..908f10a41380462ffb5e3313e8d125a8c69bb6aa 100644
--- a/lib/backup/manager.rb
+++ b/lib/backup/manager.rb
@@ -9,6 +9,7 @@ def pack
       s[:backup_created_at]  = Time.now
       s[:gitlab_version]     = Gitlab::VERSION
       s[:tar_version]        = tar_version
+      tar_file = "#{s[:backup_created_at].to_i}_gitlab_backup.tar"
 
       Dir.chdir(Gitlab.config.backup.path)
 
@@ -17,8 +18,29 @@ def pack
       end
 
       # create archive
-      print "Creating backup archive: #{s[:backup_created_at].to_i}_gitlab_backup.tar ... "
-      if Kernel.system('tar', '-cf', "#{s[:backup_created_at].to_i}_gitlab_backup.tar", *BACKUP_CONTENTS)
+      print "Creating backup archive: #{tar_file} ... "
+      if Kernel.system('tar', '-cf', tar_file, *BACKUP_CONTENTS)
+        puts "done".green
+      else
+        puts "failed".red
+      end
+
+      upload(tar_file)
+    end
+
+    def upload(tar_file)
+      remote_directory = Gitlab.config.backup.upload.remote_directory
+      print "Uploading backup archive to remote storage #{remote_directory} ... "
+
+      connection_settings = Gitlab.config.backup.upload.connection
+      if connection_settings.blank?
+        puts "skipped".yellow
+        return
+      end
+
+      connection = ::Fog::Storage.new(connection_settings)
+      directory = connection.directories.get(remote_directory)
+      if directory.files.create(key: tar_file, body: File.open(tar_file), public: false)
         puts "done".green
       else
         puts "failed".red