From bca885eb0e8a7db84ad888e39899dcfa033355eb Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Thiago=20Figueir=C3=B3?= <tfigueiro@gitlab.com>
Date: Mon, 19 Sep 2022 02:38:12 +0000
Subject: [PATCH] Fix Layout/FirstArrayElementIndentation offenses 8

---
 .../first_array_element_indentation.yml       |  16 -
 ee/lib/ee/api/helpers/award_emoji.rb          |   4 +-
 .../escalation_policy/create_spec.rb          |   9 +-
 .../bulk_epic_aggregate_loader_spec.rb        |   6 +-
 ee/spec/models/snippet_repository_spec.rb     |  41 +--
 .../base_service_spec.rb                      |   6 +-
 ee/spec/services/search_service_spec.rb       |  16 +-
 .../ingestion/tasks/hooks_execution_spec.rb   |   9 +-
 ...process_scan_result_policy_service_spec.rb |   6 +-
 .../store_findings_metadata_service_spec.rb   |  35 ++-
 .../services/timebox_report_service_spec.rb   | 296 +++++++++---------
 .../user_permissions/export_service_spec.rb   |   7 +-
 .../services/search_notes_shared_examples.rb  |   4 +-
 .../geo/scheduler/scheduler_worker_spec.rb    |  22 +-
 lib/event_filter.rb                           |  25 +-
 lib/gitlab/database/migration_helpers.rb      |  15 +-
 .../message/in_product_marketing/team.rb      |  14 +-
 17 files changed, 267 insertions(+), 264 deletions(-)

diff --git a/.rubocop_todo/layout/first_array_element_indentation.yml b/.rubocop_todo/layout/first_array_element_indentation.yml
index b0d405482d4a6..f82321c518f59 100644
--- a/.rubocop_todo/layout/first_array_element_indentation.yml
+++ b/.rubocop_todo/layout/first_array_element_indentation.yml
@@ -2,22 +2,6 @@
 # Cop supports --auto-correct.
 Layout/FirstArrayElementIndentation:
   Exclude:
-    - 'ee/lib/ee/api/helpers/award_emoji.rb'
-    - 'ee/spec/graphql/mutations/incident_management/escalation_policy/create_spec.rb'
-    - 'ee/spec/lib/gitlab/graphql/loaders/bulk_epic_aggregate_loader_spec.rb'
-    - 'ee/spec/models/snippet_repository_spec.rb'
-    - 'ee/spec/services/protected_environments/base_service_spec.rb'
-    - 'ee/spec/services/search_service_spec.rb'
-    - 'ee/spec/services/security/ingestion/tasks/hooks_execution_spec.rb'
-    - 'ee/spec/services/security/security_orchestration_policies/process_scan_result_policy_service_spec.rb'
-    - 'ee/spec/services/security/store_findings_metadata_service_spec.rb'
-    - 'ee/spec/services/timebox_report_service_spec.rb'
-    - 'ee/spec/services/user_permissions/export_service_spec.rb'
-    - 'ee/spec/support/shared_examples/services/search_notes_shared_examples.rb'
-    - 'ee/spec/workers/geo/scheduler/scheduler_worker_spec.rb'
-    - 'lib/event_filter.rb'
-    - 'lib/gitlab/database/migration_helpers.rb'
-    - 'lib/gitlab/email/message/in_product_marketing/team.rb'
     - 'lib/gitlab/email/message/in_product_marketing/trial.rb'
     - 'lib/gitlab/email/message/in_product_marketing/verify.rb'
     - 'lib/gitlab/import_export/base/relation_factory.rb'
diff --git a/ee/lib/ee/api/helpers/award_emoji.rb b/ee/lib/ee/api/helpers/award_emoji.rb
index e9649fd42034e..70311511a6352 100644
--- a/ee/lib/ee/api/helpers/award_emoji.rb
+++ b/ee/lib/ee/api/helpers/award_emoji.rb
@@ -13,8 +13,8 @@ module AwardEmoji
           override :awardables
           def awardables
             super.concat([
-              { type: 'epic', resource: :groups, find_by: :iid, feature_category: :portfolio_management }
-            ])
+                           { type: 'epic', resource: :groups, find_by: :iid, feature_category: :portfolio_management }
+                         ])
           end
 
           override :awardable_id_desc
diff --git a/ee/spec/graphql/mutations/incident_management/escalation_policy/create_spec.rb b/ee/spec/graphql/mutations/incident_management/escalation_policy/create_spec.rb
index 4c36ab1e8b1ba..f8f6060fa045a 100644
--- a/ee/spec/graphql/mutations/incident_management/escalation_policy/create_spec.rb
+++ b/ee/spec/graphql/mutations/incident_management/escalation_policy/create_spec.rb
@@ -70,10 +70,11 @@
           rules = resolve[:escalation_policy].rules
 
           expect(rules.size).to eq(2)
-          expect(rules).to match_array([
-            have_attributes(oncall_schedule_id: oncall_schedule.id, user: nil, elapsed_time_seconds: 300, status: 'acknowledged'),
-            have_attributes(oncall_schedule_id: nil, user: current_user, elapsed_time_seconds: 600, status: 'resolved')
-          ])
+          expect(rules).to match_array(
+            [
+              have_attributes(oncall_schedule_id: oncall_schedule.id, user: nil, elapsed_time_seconds: 300, status: 'acknowledged'),
+              have_attributes(oncall_schedule_id: nil, user: current_user, elapsed_time_seconds: 600, status: 'resolved')
+            ])
         end
 
         context 'rules are missing' do
diff --git a/ee/spec/lib/gitlab/graphql/loaders/bulk_epic_aggregate_loader_spec.rb b/ee/spec/lib/gitlab/graphql/loaders/bulk_epic_aggregate_loader_spec.rb
index 72adfd615bde7..7a18203811949 100644
--- a/ee/spec/lib/gitlab/graphql/loaders/bulk_epic_aggregate_loader_spec.rb
+++ b/ee/spec/lib/gitlab/graphql/loaders/bulk_epic_aggregate_loader_spec.rb
@@ -62,9 +62,9 @@
     it 'sums all the weights, even confidential, or in private groups' do
       expected_result = {
         parent_epic.id => [
-            result_for(parent_epic, issues_state: OPENED_ISSUE_STATE, issues_count: 5, issues_weight_sum: 4),
-            result_for(parent_epic, issues_state: CLOSED_ISSUE_STATE, issues_count: 1, issues_weight_sum: 1)
-          ],
+          result_for(parent_epic, issues_state: OPENED_ISSUE_STATE, issues_count: 5, issues_weight_sum: 4),
+          result_for(parent_epic, issues_state: CLOSED_ISSUE_STATE, issues_count: 1, issues_weight_sum: 1)
+        ],
         epic_with_issues.id => [
           result_for(epic_with_issues, issues_state: OPENED_ISSUE_STATE, issues_count: 5, issues_weight_sum: 4),
           result_for(epic_with_issues, issues_state: CLOSED_ISSUE_STATE, issues_count: 1, issues_weight_sum: 1)
diff --git a/ee/spec/models/snippet_repository_spec.rb b/ee/spec/models/snippet_repository_spec.rb
index 3fd4fd4f3ec13..77c56973e9e63 100644
--- a/ee/spec/models/snippet_repository_spec.rb
+++ b/ee/spec/models/snippet_repository_spec.rb
@@ -78,25 +78,27 @@
 
     describe '#replicables_for_current_secondary' do
       it 'returns all snippet_repositories without selective sync' do
-        expect(described_class.replicables_for_current_secondary(1..described_class.last.id)).to match_array([
-          snippet_repository_1,
-          snippet_repository_2,
-          snippet_repository_3,
-          snippet_repository_4,
-          snippet_repository_5
-        ])
+        expect(described_class.replicables_for_current_secondary(1..described_class.last.id)).to match_array(
+          [
+            snippet_repository_1,
+            snippet_repository_2,
+            snippet_repository_3,
+            snippet_repository_4,
+            snippet_repository_5
+          ])
       end
 
       context 'with selective sync by namespace' do
         it 'returns snippet_repositories that belong to the namespaces + personal snippets' do
           node.update!(selective_sync_type: 'namespaces', namespaces: [group_1])
 
-          expect(described_class.replicables_for_current_secondary(1..described_class.last.id)).to match_array([
-            snippet_repository_1,
-            snippet_repository_2,
-            snippet_repository_3,
-            snippet_repository_4
-          ])
+          expect(described_class.replicables_for_current_secondary(1..described_class.last.id)).to match_array(
+            [
+              snippet_repository_1,
+              snippet_repository_2,
+              snippet_repository_3,
+              snippet_repository_4
+            ])
         end
       end
 
@@ -104,12 +106,13 @@
         it 'returns snippet_repositories that belong to the shards' do
           node.update!(selective_sync_type: 'shards', selective_sync_shards: ['default'])
 
-          expect(described_class.replicables_for_current_secondary(1..described_class.last.id)).to match_array([
-            snippet_repository_1,
-            snippet_repository_2,
-            snippet_repository_4,
-            snippet_repository_5
-          ])
+          expect(described_class.replicables_for_current_secondary(1..described_class.last.id)).to match_array(
+            [
+              snippet_repository_1,
+              snippet_repository_2,
+              snippet_repository_4,
+              snippet_repository_5
+            ])
         end
       end
 
diff --git a/ee/spec/services/protected_environments/base_service_spec.rb b/ee/spec/services/protected_environments/base_service_spec.rb
index 7bffc12640dc8..e35c1721db1c2 100644
--- a/ee/spec/services/protected_environments/base_service_spec.rb
+++ b/ee/spec/services/protected_environments/base_service_spec.rb
@@ -68,13 +68,13 @@
           it 'includes invited groups' do
             is_expected.to eq(
               deploy_access_levels_attributes: [
-               { group_id: group.id },
-               { group_id: linked_group.id }
+                { group_id: group.id },
+                { group_id: linked_group.id }
               ],
               approval_rules_attributes: [
                 { group_id: group.id },
                 { group_id: linked_group.id }
-               ]
+              ]
             )
           end
         end
diff --git a/ee/spec/services/search_service_spec.rb b/ee/spec/services/search_service_spec.rb
index 8a50c32232468..a0f0cadfa2617 100644
--- a/ee/spec/services/search_service_spec.rb
+++ b/ee/spec/services/search_service_spec.rb
@@ -125,9 +125,10 @@
                   message: "redacted_search_results",
                   current_user_id: user.id,
                   query: 'some-search-string',
-                  filtered: array_including([
-                    { class_name: "Project", id: unauthorized_project.id, ability: :read_project }
-                  ])))
+                  filtered: array_including(
+                    [
+                      { class_name: "Project", id: unauthorized_project.id, ability: :read_project }
+                    ])))
 
         expect(subject).to be_kind_of(Kaminari::PaginatableArray)
         expect(subject).to contain_exactly(project)
@@ -154,10 +155,11 @@
                   message: "redacted_search_results",
                   current_user_id: user.id,
                   query: 'some-search-string',
-                  filtered: array_including([
-                    { class_name: "Issue", id: issue1_in_unauthorized_project.id, ability: :read_issue },
-                    { class_name: "Issue", id: issue2_in_unauthorized_project.id, ability: :read_issue }
-                  ])))
+                  filtered: array_including(
+                    [
+                      { class_name: "Issue", id: issue1_in_unauthorized_project.id, ability: :read_issue },
+                      { class_name: "Issue", id: issue2_in_unauthorized_project.id, ability: :read_issue }
+                    ])))
 
         expect(subject).to be_kind_of(Kaminari::PaginatableArray)
         expect(subject).to contain_exactly(issue_in_project)
diff --git a/ee/spec/services/security/ingestion/tasks/hooks_execution_spec.rb b/ee/spec/services/security/ingestion/tasks/hooks_execution_spec.rb
index 75e157bb7aafc..d41f955a7d14a 100644
--- a/ee/spec/services/security/ingestion/tasks/hooks_execution_spec.rb
+++ b/ee/spec/services/security/ingestion/tasks/hooks_execution_spec.rb
@@ -21,10 +21,11 @@
         allow(vulnerability).to receive(:execute_hooks)
       end
 
-      allow(Vulnerability).to receive(:where).with(id: vulnerabilities[0..1].map(&:id)).and_return([
-        vulnerabilities[0],
-        vulnerabilities[1]
-      ])
+      allow(Vulnerability).to receive(:where).with(id: vulnerabilities[0..1].map(&:id)).and_return(
+        [
+          vulnerabilities[0],
+          vulnerabilities[1]
+        ])
 
       ingest_finding_remediations
     end
diff --git a/ee/spec/services/security/security_orchestration_policies/process_scan_result_policy_service_spec.rb b/ee/spec/services/security/security_orchestration_policies/process_scan_result_policy_service_spec.rb
index e03b59f648cab..a6531b0f05e1e 100644
--- a/ee/spec/services/security/security_orchestration_policies/process_scan_result_policy_service_spec.rb
+++ b/ee/spec/services/security/security_orchestration_policies/process_scan_result_policy_service_spec.rb
@@ -126,9 +126,9 @@
 
       where(:rules_count, :expected_rules_count) do
         [
-         [Security::ScanResultPolicy::LIMIT - 1, Security::ScanResultPolicy::LIMIT - 1],
-         [Security::ScanResultPolicy::LIMIT, Security::ScanResultPolicy::LIMIT],
-         [Security::ScanResultPolicy::LIMIT + 1, Security::ScanResultPolicy::LIMIT]
+          [Security::ScanResultPolicy::LIMIT - 1, Security::ScanResultPolicy::LIMIT - 1],
+          [Security::ScanResultPolicy::LIMIT, Security::ScanResultPolicy::LIMIT],
+          [Security::ScanResultPolicy::LIMIT + 1, Security::ScanResultPolicy::LIMIT]
         ]
       end
 
diff --git a/ee/spec/services/security/store_findings_metadata_service_spec.rb b/ee/spec/services/security/store_findings_metadata_service_spec.rb
index e271487d7abe0..ac25fd9c83681 100644
--- a/ee/spec/services/security/store_findings_metadata_service_spec.rb
+++ b/ee/spec/services/security/store_findings_metadata_service_spec.rb
@@ -48,23 +48,24 @@
         store_findings
 
         expect(security_scan.findings.reload.as_json(only: [:partition_number, :uuid, :deduplicated]))
-          .to match_array([
-            {
-              "partition_number" => findings_partition_number,
-              "uuid" => security_finding_1.uuid,
-              "deduplicated" => true
-            },
-            {
-              "partition_number" => findings_partition_number,
-              "uuid" => security_finding_2.uuid,
-              "deduplicated" => false
-            },
-            {
-              "partition_number" => findings_partition_number,
-              "uuid" => security_finding_3.uuid,
-              "deduplicated" => true
-            }
-          ])
+          .to match_array(
+            [
+              {
+                "partition_number" => findings_partition_number,
+                "uuid" => security_finding_1.uuid,
+                "deduplicated" => true
+              },
+              {
+                "partition_number" => findings_partition_number,
+                "uuid" => security_finding_2.uuid,
+                "deduplicated" => false
+              },
+              {
+                "partition_number" => findings_partition_number,
+                "uuid" => security_finding_3.uuid,
+                "deduplicated" => true
+              }
+            ])
       end
 
       context 'when the scanners already exist in the database' do
diff --git a/ee/spec/services/timebox_report_service_spec.rb b/ee/spec/services/timebox_report_service_spec.rb
index 969b2aea511b6..9f66a432d2776 100644
--- a/ee/spec/services/timebox_report_service_spec.rb
+++ b/ee/spec/services/timebox_report_service_spec.rb
@@ -65,15 +65,16 @@
         total: { count: 4, weight: 10 }
       })
 
-      expect(response.payload[:burnup_time_series]).to eq([
-        {
-          date: timebox_start_date,
-          scope_count: 4,
-          scope_weight: 10,
-          completed_count: 2,
-          completed_weight: 7
-        }
-      ])
+      expect(response.payload[:burnup_time_series]).to eq(
+        [
+          {
+            date: timebox_start_date,
+            scope_count: 4,
+            scope_weight: 10,
+            completed_count: 2,
+            completed_weight: 7
+          }
+        ])
     end
 
     context 'when events have the same timestamp for created_at', :aggregate_failures do
@@ -99,15 +100,16 @@
           total: { count: 1, weight: 0 }
         })
 
-        expect(response.payload[:burnup_time_series]).to eq([
-          {
-            date: timebox_start_date,
-            scope_count: 1,
-            scope_weight: 0,
-            completed_count: 0,
-            completed_weight: 0
-          }
-        ])
+        expect(response.payload[:burnup_time_series]).to eq(
+          [
+            {
+              date: timebox_start_date,
+              scope_count: 1,
+              scope_weight: 0,
+              completed_count: 0,
+              completed_weight: 0
+            }
+          ])
       end
     end
 
@@ -147,36 +149,37 @@
         incomplete: { count: 1, weight: 0 },
         total: { count: 1, weight: 0 }
       })
-      expect(response.payload[:burnup_time_series]).to eq([
-        {
-          date: timebox_start_date + 4.days,
-          scope_count: 2,
-          scope_weight: 2,
-          completed_count: 0,
-          completed_weight: 0
-        },
-        {
-          date: timebox_start_date + 5.days,
-          scope_count: 3,
-          scope_weight: 5,
-          completed_count: 1,
-          completed_weight: 3
-        },
-        {
-          date: timebox_start_date + 6.days,
-          scope_count: 2,
-          scope_weight: 3,
-          completed_count: 1,
-          completed_weight: 3
-        },
-        {
-          date: timebox_start_date + 7.days,
-          scope_count: 1,
-          scope_weight: 0,
-          completed_count: 0,
-          completed_weight: 0
-        }
-      ])
+      expect(response.payload[:burnup_time_series]).to eq(
+        [
+          {
+             date: timebox_start_date + 4.days,
+             scope_count: 2,
+             scope_weight: 2,
+             completed_count: 0,
+             completed_weight: 0
+           },
+          {
+            date: timebox_start_date + 5.days,
+            scope_count: 3,
+            scope_weight: 5,
+            completed_count: 1,
+            completed_weight: 3
+          },
+          {
+            date: timebox_start_date + 6.days,
+            scope_count: 2,
+            scope_weight: 3,
+            completed_count: 1,
+            completed_weight: 3
+          },
+          {
+            date: timebox_start_date + 7.days,
+            scope_count: 1,
+            scope_weight: 0,
+            completed_count: 0,
+            completed_weight: 0
+          }
+        ])
     end
 
     it 'updates the completed counts when issue state is changed' do
@@ -213,57 +216,58 @@
         incomplete: { count: 1, weight: 2 },
         total: { count: 1, weight: 2 }
       })
-      expect(response.payload[:burnup_time_series]).to eq([
-        {
-          date: timebox_start_date,
-          scope_count: 1,
-          scope_weight: 2,
-          completed_count: 0,
-          completed_weight: 0
-        },
-        {
-          date: timebox_start_date + 1.day,
-          scope_count: 1,
-          scope_weight: 2,
-          completed_count: 1,
-          completed_weight: 2
-        },
-        {
-          date: timebox_start_date + 3.days,
-          scope_count: 1,
-          scope_weight: 2,
-          completed_count: 0,
-          completed_weight: 0
-        },
-        {
-          date: timebox_start_date + 4.days,
-          scope_count: 2,
-          scope_weight: 5,
-          completed_count: 0,
-          completed_weight: 0
-        },
-        {
-          date: timebox_start_date + 5.days,
-          scope_count: 2,
-          scope_weight: 5,
-          completed_count: 0,
-          completed_weight: 0
-        },
-        {
-          date: timebox_start_date + 7.days,
-          scope_count: 2,
-          scope_weight: 5,
-          completed_count: 1,
-          completed_weight: 3
-        },
-        {
-          date: timebox_start_date + 8.days,
-          scope_count: 1,
-          scope_weight: 2,
-          completed_count: 0,
-          completed_weight: 0
-        }
-      ])
+      expect(response.payload[:burnup_time_series]).to eq(
+        [
+          {
+            date: timebox_start_date,
+            scope_count: 1,
+            scope_weight: 2,
+            completed_count: 0,
+            completed_weight: 0
+          },
+          {
+            date: timebox_start_date + 1.day,
+            scope_count: 1,
+            scope_weight: 2,
+            completed_count: 1,
+            completed_weight: 2
+          },
+          {
+            date: timebox_start_date + 3.days,
+            scope_count: 1,
+            scope_weight: 2,
+            completed_count: 0,
+            completed_weight: 0
+          },
+          {
+            date: timebox_start_date + 4.days,
+            scope_count: 2,
+            scope_weight: 5,
+            completed_count: 0,
+            completed_weight: 0
+          },
+          {
+            date: timebox_start_date + 5.days,
+            scope_count: 2,
+            scope_weight: 5,
+            completed_count: 0,
+            completed_weight: 0
+          },
+          {
+            date: timebox_start_date + 7.days,
+            scope_count: 2,
+            scope_weight: 5,
+            completed_count: 1,
+            completed_weight: 3
+          },
+          {
+            date: timebox_start_date + 8.days,
+            scope_count: 1,
+            scope_weight: 2,
+            completed_count: 0,
+            completed_weight: 0
+          }
+        ])
     end
 
     it 'updates the weight totals when issue weight is changed' do
@@ -289,43 +293,44 @@
         incomplete: { count: 0, weight: 0 },
         total: { count: 1, weight: 1 }
       })
-      expect(response.payload[:burnup_time_series]).to eq([
-        {
-          date: timebox_start_date,
-          scope_count: 1,
-          scope_weight: 0,
-          completed_count: 0,
-          completed_weight: 0
-        },
-        {
-          date: timebox_start_date + 1.day,
-          scope_count: 1,
-          scope_weight: 2,
-          completed_count: 0,
-          completed_weight: 0
-        },
-        {
-          date: timebox_start_date + 2.days,
-          scope_count: 2,
-          scope_weight: 7,
-          completed_count: 1,
-          completed_weight: 5
-        },
-        {
-          date: timebox_start_date + 3.days,
-          scope_count: 2,
-          scope_weight: 3,
-          completed_count: 1,
-          completed_weight: 1
-        },
-        {
-          date: timebox_start_date + 4.days,
-          scope_count: 1,
-          scope_weight: 1,
-          completed_count: 1,
-          completed_weight: 1
-        }
-      ])
+      expect(response.payload[:burnup_time_series]).to eq(
+        [
+          {
+            date: timebox_start_date,
+            scope_count: 1,
+            scope_weight: 0,
+            completed_count: 0,
+            completed_weight: 0
+          },
+          {
+            date: timebox_start_date + 1.day,
+            scope_count: 1,
+            scope_weight: 2,
+            completed_count: 0,
+            completed_weight: 0
+          },
+          {
+            date: timebox_start_date + 2.days,
+            scope_count: 2,
+            scope_weight: 7,
+            completed_count: 1,
+            completed_weight: 5
+          },
+          {
+            date: timebox_start_date + 3.days,
+            scope_count: 2,
+            scope_weight: 3,
+            completed_count: 1,
+            completed_weight: 1
+          },
+          {
+            date: timebox_start_date + 4.days,
+            scope_count: 1,
+            scope_weight: 1,
+            completed_count: 1,
+            completed_weight: 1
+          }
+        ])
     end
 
     context 'when timebox is removed and then added back' do
@@ -405,15 +410,16 @@
             total: { count: expected_count, weight: expected_weight }
           })
 
-          expect(response.payload[:burnup_time_series]).to eq([
-            {
-              date: timebox_start_date,
-              scope_count: expected_count,
-              scope_weight: expected_weight,
-              completed_count: 0,
-              completed_weight: 0
-            }
-          ])
+          expect(response.payload[:burnup_time_series]).to eq(
+            [
+              {
+                date: timebox_start_date,
+                scope_count: expected_count,
+                scope_weight: expected_weight,
+                completed_count: 0,
+                completed_weight: 0
+              }
+            ])
         end
       end
     end
diff --git a/ee/spec/services/user_permissions/export_service_spec.rb b/ee/spec/services/user_permissions/export_service_spec.rb
index 393892f0c20c9..59fc527c74034 100644
--- a/ee/spec/services/user_permissions/export_service_spec.rb
+++ b/ee/spec/services/user_permissions/export_service_spec.rb
@@ -66,9 +66,10 @@
     end
 
     it 'includes the appropriate headers' do
-      expect(csv.headers).to eq([
-        'Username', 'Email', 'Type', 'Path', 'Access Level', 'Last Activity'
-      ])
+      expect(csv.headers).to eq(
+        [
+          'Username', 'Email', 'Type', 'Path', 'Access Level', 'Last Activity'
+        ])
     end
 
     specify 'Username' do
diff --git a/ee/spec/support/shared_examples/services/search_notes_shared_examples.rb b/ee/spec/support/shared_examples/services/search_notes_shared_examples.rb
index 8b108ba3a2ef9..024ec81537819 100644
--- a/ee/spec/support/shared_examples/services/search_notes_shared_examples.rb
+++ b/ee/spec/support/shared_examples/services/search_notes_shared_examples.rb
@@ -73,8 +73,8 @@
             ensure_elasticsearch_index!
 
             expected_objects = [
-                not_confidential_note, nil_confidential_note, confidential_note,
-                not_confidential_note_on_confidential_issue, confidential_note_on_confidential_issue
+              not_confidential_note, nil_confidential_note, confidential_note,
+              not_confidential_note_on_confidential_issue, confidential_note_on_confidential_issue
             ]
             expect_search_results(user, 'notes', expected_objects: expected_objects) do |user|
               described_class.new(user, search: 'note').execute
diff --git a/ee/spec/workers/geo/scheduler/scheduler_worker_spec.rb b/ee/spec/workers/geo/scheduler/scheduler_worker_spec.rb
index 151193dc0e3b2..af8f5a8997286 100644
--- a/ee/spec/workers/geo/scheduler/scheduler_worker_spec.rb
+++ b/ee/spec/workers/geo/scheduler/scheduler_worker_spec.rb
@@ -20,21 +20,23 @@
       it 'returns a batch of jobs' do
         expect(subject).to receive(:db_retrieve_batch_size).and_return(4)
 
-        expect(subject.send(:take_batch, a, b, c)).to eq([
-          [2, :lfs],
-          [3, :job_artifact],
-          [3, :lfs],
-          [8, :job_artifact]
-        ])
+        expect(subject.send(:take_batch, a, b, c)).to eq(
+          [
+            [2, :lfs],
+            [3, :job_artifact],
+            [3, :lfs],
+            [8, :job_artifact]
+          ])
       end
     end
 
     context 'with batch_size' do
       it 'returns a batch of jobs' do
-        expect(subject.send(:take_batch, a, b, c, batch_size: 2)).to eq([
-          [2, :lfs],
-          [3, :job_artifact]
-        ])
+        expect(subject.send(:take_batch, a, b, c, batch_size: 2)).to eq(
+          [
+            [2, :lfs],
+            [3, :job_artifact]
+          ])
       end
     end
   end
diff --git a/lib/event_filter.rb b/lib/event_filter.rb
index 8c3377fdb80e3..f14b0a6b9e7d7 100644
--- a/lib/event_filter.rb
+++ b/lib/event_filter.rb
@@ -131,18 +131,19 @@ def in_operator_params(array_data:, scope: nil, in_column: nil, in_values: nil,
     finder_query = -> (id_expression) { Event.where(Event.arel_table[:id].eq(id_expression)) }
 
     if order_hint_column.present?
-      order = Gitlab::Pagination::Keyset::Order.build([
-        Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
-          attribute_name: order_hint_column,
-          order_expression: Event.arel_table[order_hint_column].desc,
-          nullable: :nulls_last,
-          distinct: false
-        ),
-        Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
-          attribute_name: :id,
-          order_expression: Event.arel_table[:id].desc
-        )
-      ])
+      order = Gitlab::Pagination::Keyset::Order.build(
+        [
+          Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+            attribute_name: order_hint_column,
+            order_expression: Event.arel_table[order_hint_column].desc,
+            nullable: :nulls_last,
+            distinct: false
+          ),
+          Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+            attribute_name: :id,
+            order_expression: Event.arel_table[:id].desc
+          )
+        ])
 
       finder_query = -> (_order_hint, id_expression) { Event.where(Event.arel_table[:id].eq(id_expression)) }
     end
diff --git a/lib/gitlab/database/migration_helpers.rb b/lib/gitlab/database/migration_helpers.rb
index db39524f4f644..e574422ce11ac 100644
--- a/lib/gitlab/database/migration_helpers.rb
+++ b/lib/gitlab/database/migration_helpers.rb
@@ -936,13 +936,14 @@ def backfill_conversion_of_integer_to_bigint(
       def revert_backfill_conversion_of_integer_to_bigint(table, columns, primary_key: :id)
         columns = Array.wrap(columns)
 
-        conditions = ActiveRecord::Base.sanitize_sql([
-          'job_class_name = :job_class_name AND table_name = :table_name AND column_name = :column_name AND job_arguments = :job_arguments',
-          job_class_name: 'CopyColumnUsingBackgroundMigrationJob',
-          table_name: table,
-          column_name: primary_key,
-          job_arguments: [columns, columns.map { |column| convert_to_bigint_column(column) }].to_json
-        ])
+        conditions = ActiveRecord::Base.sanitize_sql(
+          [
+            'job_class_name = :job_class_name AND table_name = :table_name AND column_name = :column_name AND job_arguments = :job_arguments',
+            job_class_name: 'CopyColumnUsingBackgroundMigrationJob',
+            table_name: table,
+            column_name: primary_key,
+            job_arguments: [columns, columns.map { |column| convert_to_bigint_column(column) }].to_json
+          ])
 
         execute("DELETE FROM batched_background_migrations WHERE #{conditions}")
       end
diff --git a/lib/gitlab/email/message/in_product_marketing/team.rb b/lib/gitlab/email/message/in_product_marketing/team.rb
index 6a0471ef9c578..ca99dd12c8e92 100644
--- a/lib/gitlab/email/message/in_product_marketing/team.rb
+++ b/lib/gitlab/email/message/in_product_marketing/team.rb
@@ -42,18 +42,18 @@ def body_line1
               [
                 s_('InProductMarketing|Did you know teams that use GitLab are far more efficient?'),
                 list([
-                  s_('InProductMarketing|Goldman Sachs went from 1 build every two weeks to thousands of builds a day'),
-                  s_('InProductMarketing|Ticketmaster decreased their CI build time by 15X')
-                ])
+                       s_('InProductMarketing|Goldman Sachs went from 1 build every two weeks to thousands of builds a day'),
+                       s_('InProductMarketing|Ticketmaster decreased their CI build time by 15X')
+                     ])
               ].join("\n"),
               s_("InProductMarketing|We know a thing or two about efficiency and we don't want to keep that to ourselves. Sign up for a free trial of GitLab Ultimate and your teams will be on it from day one."),
               [
                 s_('InProductMarketing|Stop wondering and use GitLab to answer questions like:'),
                 list([
-                  s_('InProductMarketing|How long does it take us to close issues/MRs by types like feature requests, bugs, tech debt, security?'),
-                  s_('InProductMarketing|How many days does it take our team to complete various tasks?'),
-                  s_('InProductMarketing|What does our value stream timeline look like from product to development to review and production?')
-                ])
+                       s_('InProductMarketing|How long does it take us to close issues/MRs by types like feature requests, bugs, tech debt, security?'),
+                       s_('InProductMarketing|How many days does it take our team to complete various tasks?'),
+                       s_('InProductMarketing|What does our value stream timeline look like from product to development to review and production?')
+                     ])
               ].join("\n")
             ][series]
           end
-- 
GitLab