From c4d5c77c87494bb320fa7fdf19b0e4d7d52af1d1 Mon Sep 17 00:00:00 2001
From: Kassio Borges <kborges@gitlab.com>
Date: Mon, 24 Apr 2023 22:18:47 +0000
Subject: [PATCH] Remove Settingslogic gem

- For better backward compatibility use string keys on
  GitlabSettings::Options
- Remove all the Settingslogic references from code (not comments)
- Ensure to use GitlabSettings::Options when pushing new options to
  Array values
- Instead of inheriting from Hash, wrap a hash in
  GitlabSettings::Options and make it respond to all the requirements.
    * PROS: This provides more autonomy on what needs to be done
    * CONS: Some places were checking by Hash, we need to tackle these
    scenarios
- Ensure to deep_merge return nested Options
- Use the _raw_ version of options when passing to Fog::Storage
- Ensure to load required active_support libraries
- Show the option name in the GitlabSettings::MissingSetting error
- Ensure to have an option `source` for the path of the `gitlab.yml`
- Fix GitlabSettings::Options#deep_merge and
  GitlabSettings::Options#merge to return a new GitlabSettings::Options
  object with the options deep_merged/merged
- Add GitlabSettings#reload! for backward compatibility
- Make file/namespace initializer arguments
- Avoid mutating Settings in sidekiq_cluster/cli tests
- Fix configuration stubbing

Related to: https://gitlab.com/gitlab-org/gitlab/-/issues/286873
---
 Gemfile                                       |   3 -
 Gemfile.lock                                  |   2 -
 config/initializers/1_settings.rb             | 386 +++++++++---------
 config/object_store_settings.rb               |  22 +-
 config/settings.rb                            | 354 ++++++++--------
 config/smime_signature_settings.rb            |   2 +-
 lib/banzai/filter/asset_proxy_filter.rb       |   2 +-
 lib/gitlab/consul/internal.rb                 |   2 +-
 lib/gitlab/omniauth_initializer.rb            |   2 +-
 lib/gitlab/prometheus/internal.rb             |   4 +-
 lib/gitlab/sidekiq_config.rb                  |   7 +-
 lib/gitlab/tracking.rb                        |   2 +-
 .../tracking/destinations/snowplow_micro.rb   |   2 +-
 lib/gitlab/url_blocker.rb                     |   3 +-
 lib/gitlab_settings.rb                        |  17 +
 lib/gitlab_settings/options.rb                |  80 ++++
 lib/gitlab_settings/settings.rb               |  37 ++
 lib/object_storage/direct_upload.rb           |   2 +-
 metrics_server/metrics_server.rb              |   2 +-
 .../metrics_server/metrics_server_spec.rb     |   2 +-
 spec/commands/sidekiq_cluster/cli_spec.rb     |  46 +--
 spec/config/object_store_settings_spec.rb     |  16 +-
 spec/config/settings_spec.rb                  |   2 +-
 spec/config/smime_signature_settings_spec.rb  |   6 +-
 spec/initializers/settings_spec.rb            |   7 +-
 spec/lib/gitlab/auth/o_auth/provider_spec.rb  |   2 +-
 .../ci/components/instance_path_spec.rb       |   2 +-
 spec/lib/gitlab/consul/internal_spec.rb       |   2 +-
 .../project/create_service_spec.rb            |   2 +-
 .../legacy_github_import/client_spec.rb       |   4 +-
 spec/lib/gitlab/prometheus/internal_spec.rb   |   4 +-
 spec/lib/gitlab/sidekiq_config_spec.rb        |  21 +
 .../destinations/snowplow_micro_spec.rb       |   2 +-
 spec/lib/gitlab/tracking_spec.rb              |   2 +-
 spec/lib/gitlab/url_blocker_spec.rb           |   9 +-
 spec/lib/gitlab_settings/options_spec.rb      | 155 +++++++
 spec/lib/gitlab_settings/settings_spec.rb     |  53 +++
 spec/metrics_server/metrics_server_spec.rb    |   4 +-
 spec/models/instance_configuration_spec.rb    |   2 +-
 spec/support/helpers/stub_configuration.rb    |   6 +-
 spec/support/helpers/stub_object_storage.rb   |   4 +-
 spec/uploaders/object_storage/cdn_spec.rb     |   2 +-
 spec/uploaders/object_storage_spec.rb         |   4 +-
 43 files changed, 802 insertions(+), 488 deletions(-)
 create mode 100644 lib/gitlab_settings.rb
 create mode 100644 lib/gitlab_settings/options.rb
 create mode 100644 lib/gitlab_settings/settings.rb
 create mode 100644 spec/lib/gitlab_settings/options_spec.rb
 create mode 100644 spec/lib/gitlab_settings/settings_spec.rb

diff --git a/Gemfile b/Gemfile
index 232205824da7d..cc8f1fc3fd081 100644
--- a/Gemfile
+++ b/Gemfile
@@ -241,9 +241,6 @@ gem 'rainbow', '~> 3.0'
 # Progress bar
 gem 'ruby-progressbar', '~> 1.10'
 
-# GitLab settings
-gem 'settingslogic', '~> 2.0.9'
-
 # Linear-time regex library for untrusted regular expressions
 gem 're2', '~> 1.6.0'
 
diff --git a/Gemfile.lock b/Gemfile.lock
index a40830271f739..d4da63e996273 100644
--- a/Gemfile.lock
+++ b/Gemfile.lock
@@ -1418,7 +1418,6 @@ GEM
       sentry-ruby (~> 5.8.0)
       sidekiq (>= 3.0)
     set (1.0.1)
-    settingslogic (2.0.9)
     sexp_processor (4.16.1)
     shellany (0.0.1)
     shoulda-matchers (5.1.0)
@@ -1919,7 +1918,6 @@ DEPENDENCIES
   sentry-raven (~> 3.1)
   sentry-ruby (~> 5.8.0)
   sentry-sidekiq (~> 5.8.0)
-  settingslogic (~> 2.0.9)
   shoulda-matchers (~> 5.1.0)
   sidekiq (~> 6.5.7)
   sidekiq-cron (~> 1.8.0)
diff --git a/config/initializers/1_settings.rb b/config/initializers/1_settings.rb
index 2b31c6c55d79c..5b2d7e6793de0 100644
--- a/config/initializers/1_settings.rb
+++ b/config/initializers/1_settings.rb
@@ -5,16 +5,16 @@
 require_relative '../smime_signature_settings'
 
 # Default settings
-Settings['shared'] ||= Settingslogic.new({})
+Settings['shared'] ||= {}
 Settings.shared['path'] = Settings.absolute(Settings.shared['path'] || "shared")
 
-Settings['encrypted_settings'] ||= Settingslogic.new({})
+Settings['encrypted_settings'] ||= {}
 Settings.encrypted_settings['path'] ||= File.join(Settings.shared['path'], "encrypted_settings")
 Settings.encrypted_settings['path'] = Settings.absolute(Settings.encrypted_settings['path'])
 
-Settings['ldap'] ||= Settingslogic.new({})
+Settings['ldap'] ||= {}
 Settings.ldap['enabled'] = false if Settings.ldap['enabled'].nil?
-Settings.ldap['servers'] ||= Settingslogic.new({})
+Settings.ldap['servers'] ||= {}
 Settings.ldap['prevent_ldap_sign_in'] = false if Settings.ldap['prevent_ldap_sign_in'].blank?
 Settings.ldap['secret_file'] = Settings.absolute(Settings.ldap['secret_file'] || File.join(Settings.encrypted_settings['path'], "ldap.yaml.enc"))
 
@@ -37,8 +37,6 @@
   end
 
   Settings.ldap['servers'].each do |key, server|
-    server = Settingslogic.new(server)
-
     server['label'] ||= 'LDAP'
     server['timeout'] ||= 10.seconds
     server['block_auto_created_users'] = false if server['block_auto_created_users'].nil?
@@ -87,7 +85,7 @@
   end
 end
 
-Settings['omniauth'] ||= Settingslogic.new({})
+Settings['omniauth'] ||= {}
 Settings.omniauth['enabled'] = true if Settings.omniauth['enabled'].nil?
 Settings.omniauth['auto_sign_in_with_provider'] = false if Settings.omniauth['auto_sign_in_with_provider'].nil?
 Settings.omniauth['allow_single_sign_on'] = false if Settings.omniauth['allow_single_sign_on'].nil?
@@ -114,9 +112,9 @@
 end
 
 Settings.omniauth['providers'] ||= []
-Settings.omniauth['cas3'] ||= Settingslogic.new({})
+Settings.omniauth['cas3'] ||= {}
 Settings.omniauth.cas3['session_duration'] ||= 8.hours
-Settings.omniauth['session_tickets'] ||= Settingslogic.new({})
+Settings.omniauth['session_tickets'] ||= {}
 Settings.omniauth.session_tickets['cas3'] = 'ticket'
 
 # Handle backward compatibility with the renamed kerberos_spnego provider
@@ -152,7 +150,7 @@
     github_settings['url'] = github_default_url
   end
 
-  github_settings["args"] ||= Settingslogic.new({})
+  github_settings["args"] ||= {}
 
   github_settings["args"]["client_options"] =
     if github_settings["url"].include?(github_default_url)
@@ -172,7 +170,7 @@
 end
 
 if Gitlab.ee? && Rails.env.test? && !saml_provider_enabled
-  Settings.omniauth.providers << Settingslogic.new({ 'name' => 'group_saml' })
+  Settings.omniauth.providers << GitlabSettings::Options.build({ 'name' => 'group_saml' })
 end
 
 Settings['issues_tracker'] ||= {}
@@ -180,7 +178,7 @@
 #
 # GitLab
 #
-Settings['gitlab'] ||= Settingslogic.new({})
+Settings['gitlab'] ||= {}
 Settings.gitlab['default_project_creation'] ||= ::Gitlab::Access::DEVELOPER_MAINTAINER_PROJECT_ACCESS
 Settings.gitlab['default_project_deletion_protection'] ||= false
 Settings.gitlab['default_projects_limit'] ||= 100000
@@ -254,7 +252,7 @@
 # Elasticseacrh
 #
 Gitlab.ee do
-  Settings['elasticsearch'] ||= Settingslogic.new({})
+  Settings['elasticsearch'] ||= {}
   Settings.elasticsearch['enabled'] = false if Settings.elasticsearch['enabled'].nil?
   Settings.elasticsearch['url'] = ENV['ELASTIC_URL'] || "http://localhost:9200"
   Settings.elasticsearch['indexer_path'] ||= Gitlab::Utils.which('gitlab-elasticsearch-indexer')
@@ -263,7 +261,7 @@
 #
 # CI
 #
-Settings['gitlab_ci'] ||= Settingslogic.new({})
+Settings['gitlab_ci'] ||= {}
 Settings.gitlab_ci['shared_runners_enabled'] = true if Settings.gitlab_ci['shared_runners_enabled'].nil?
 Settings.gitlab_ci['builds_path']           = Settings.absolute(Settings.gitlab_ci['builds_path'] || "builds/")
 Settings.gitlab_ci['url']                 ||= Settings.__send__(:build_gitlab_ci_url)
@@ -272,7 +270,7 @@
 #
 # CI Secure Files
 #
-Settings['ci_secure_files'] ||= Settingslogic.new({})
+Settings['ci_secure_files'] ||= {}
 Settings.ci_secure_files['enabled']      = true if Settings.ci_secure_files['enabled'].nil?
 Settings.ci_secure_files['storage_path'] = Settings.absolute(Settings.ci_secure_files['storage_path'] || File.join(Settings.shared['path'], "ci_secure_files"))
 Settings.ci_secure_files['object_store'] = ObjectStoreSettings.legacy_parse(Settings.ci_secure_files['object_store'], 'secure_files')
@@ -280,7 +278,7 @@
 #
 # Reply by email
 #
-Settings['incoming_email'] ||= Settingslogic.new({})
+Settings['incoming_email'] ||= {}
 Settings.incoming_email['enabled'] = false if Settings.incoming_email['enabled'].nil?
 Settings.incoming_email['inbox_method'] ||= 'imap'
 Settings.incoming_email['encrypted_secret_file'] = Settings.absolute(Settings.incoming_email['encrypted_secret_file'] || File.join(Settings.encrypted_settings['path'], "incoming_email.yaml.enc"))
@@ -288,14 +286,14 @@
 #
 # Service desk email
 #
-Settings['service_desk_email'] ||= Settingslogic.new({})
+Settings['service_desk_email'] ||= {}
 Settings.service_desk_email['enabled'] = false if Settings.service_desk_email['enabled'].nil?
 Settings.service_desk_email['encrypted_secret_file'] = Settings.absolute(Settings.service_desk_email['encrypted_secret_file'] || File.join(Settings.encrypted_settings['path'], "service_desk_email.yaml.enc"))
 
 #
 # Build Artifacts
 #
-Settings['artifacts'] ||= Settingslogic.new({})
+Settings['artifacts'] ||= {}
 Settings.artifacts['enabled']      = true if Settings.artifacts['enabled'].nil?
 Settings.artifacts['storage_path'] = Settings.absolute(Settings.artifacts.values_at('path', 'storage_path').compact.first || File.join(Settings.shared['path'], "artifacts"))
 # Settings.artifact['path'] is deprecated, use `storage_path` instead
@@ -306,7 +304,7 @@
 #
 # Registry
 #
-Settings['registry'] ||= Settingslogic.new({})
+Settings['registry'] ||= {}
 Settings.registry['enabled'] ||= false
 Settings.registry['host'] ||= "example.com"
 Settings.registry['port'] ||= nil
@@ -320,7 +318,7 @@
 #
 # Error Reporting and Logging with Sentry
 #
-Settings['sentry'] ||= Settingslogic.new({})
+Settings['sentry'] ||= {}
 Settings.sentry['enabled'] ||= false
 Settings.sentry['dsn'] ||= nil
 Settings.sentry['environment'] ||= nil
@@ -329,7 +327,7 @@
 #
 # Pages
 #
-Settings['pages'] ||= Settingslogic.new({})
+Settings['pages'] ||= {}
 Settings['pages'] = ::Gitlab::Pages::Settings.new(Settings.pages) # For path access detection https://gitlab.com/gitlab-org/gitlab/-/issues/230702
 Settings.pages['enabled']           = false if Settings.pages['enabled'].nil?
 Settings.pages['access_control']    = false if Settings.pages['access_control'].nil?
@@ -347,14 +345,14 @@
 # this will allow us to easier migrate existing instances with NFS
 Settings.pages['storage_path']      = Settings.pages['path']
 Settings.pages['object_store']      = ObjectStoreSettings.legacy_parse(Settings.pages['object_store'], 'pages')
-Settings.pages['local_store'] ||= Settingslogic.new({})
+Settings.pages['local_store'] ||= {}
 Settings.pages['local_store']['path'] = Settings.absolute(Settings.pages['local_store']['path'] || File.join(Settings.shared['path'], "pages"))
 Settings.pages['local_store']['enabled'] = true if Settings.pages['local_store']['enabled'].nil?
 
 #
 # GitLab documentation
 #
-Settings['gitlab_docs'] ||= Settingslogic.new({})
+Settings['gitlab_docs'] ||= {}
 Settings.gitlab_docs['enabled'] ||= false
 Settings.gitlab_docs['host'] = nil unless Settings.gitlab_docs.enabled
 
@@ -362,28 +360,28 @@
 # Geo
 #
 Gitlab.ee do
-  Settings['geo'] ||= Settingslogic.new({})
+  Settings['geo'] ||= {}
   # For backwards compatibility, default to gitlab_url and if so, ensure it ends with "/"
   Settings.geo['node_name'] = Settings.geo['node_name'].presence || Settings.gitlab['url'].chomp('/').concat('/')
 
   #
   # Registry replication
   #
-  Settings.geo['registry_replication'] ||= Settingslogic.new({})
+  Settings.geo['registry_replication'] ||= {}
   Settings.geo.registry_replication['enabled'] ||= false
 end
 
 #
 # Unleash
 #
-Settings['feature_flags'] ||= Settingslogic.new({})
-Settings.feature_flags['unleash'] ||= Settingslogic.new({})
+Settings['feature_flags'] ||= {}
+Settings.feature_flags['unleash'] ||= {}
 Settings.feature_flags.unleash['enabled'] = false if Settings.feature_flags.unleash['enabled'].nil?
 
 #
 # External merge request diffs
 #
-Settings['external_diffs'] ||= Settingslogic.new({})
+Settings['external_diffs'] ||= {}
 Settings.external_diffs['enabled']      = false if Settings.external_diffs['enabled'].nil?
 Settings.external_diffs['when']         = 'always' if Settings.external_diffs['when'].nil?
 Settings.external_diffs['storage_path'] = Settings.absolute(Settings.external_diffs['storage_path'] || File.join(Settings.shared['path'], 'external-diffs'))
@@ -392,7 +390,7 @@
 #
 # Git LFS
 #
-Settings['lfs'] ||= Settingslogic.new({})
+Settings['lfs'] ||= {}
 Settings.lfs['enabled']      = true if Settings.lfs['enabled'].nil?
 Settings.lfs['storage_path'] = Settings.absolute(Settings.lfs['storage_path'] || File.join(Settings.shared['path'], "lfs-objects"))
 Settings.lfs['object_store'] = ObjectStoreSettings.legacy_parse(Settings.lfs['object_store'], 'lfs')
@@ -400,7 +398,7 @@
 #
 # Uploads
 #
-Settings['uploads'] ||= Settingslogic.new({})
+Settings['uploads'] ||= {}
 Settings.uploads['storage_path'] = Settings.absolute(Settings.uploads['storage_path'] || 'public')
 Settings.uploads['base_dir'] = Settings.uploads['base_dir'] || 'uploads/-/system'
 Settings.uploads['object_store'] = ObjectStoreSettings.legacy_parse(Settings.uploads['object_store'], 'uploads')
@@ -409,7 +407,7 @@
 #
 # Packages
 #
-Settings['packages'] ||= Settingslogic.new({})
+Settings['packages'] ||= {}
 Settings.packages['enabled']       = true if Settings.packages['enabled'].nil?
 Settings.packages['dpkg_deb_path'] = '/usr/bin/dpkg-deb' if Settings.packages['dpkg_deb_path'].nil?
 Settings.packages['storage_path']  = Settings.absolute(Settings.packages['storage_path'] || File.join(Settings.shared['path'], "packages"))
@@ -418,7 +416,7 @@
 #
 # Dependency Proxy
 #
-Settings['dependency_proxy'] ||= Settingslogic.new({})
+Settings['dependency_proxy'] ||= {}
 Settings.dependency_proxy['enabled']      = true if Settings.dependency_proxy['enabled'].nil?
 Settings.dependency_proxy['storage_path'] = Settings.absolute(Settings.dependency_proxy['storage_path'] || File.join(Settings.shared['path'], "dependency_proxy"))
 Settings.dependency_proxy['object_store'] = ObjectStoreSettings.legacy_parse(Settings.dependency_proxy['object_store'], 'dependency_proxy')
@@ -432,7 +430,7 @@
 #
 # Terraform state
 #
-Settings['terraform_state'] ||= Settingslogic.new({})
+Settings['terraform_state'] ||= {}
 Settings.terraform_state['enabled']      = true if Settings.terraform_state['enabled'].nil?
 Settings.terraform_state['storage_path'] = Settings.absolute(Settings.terraform_state['storage_path'] || File.join(Settings.shared['path'], "terraform_state"))
 Settings.terraform_state['object_store'] = ObjectStoreSettings.legacy_parse(Settings.terraform_state['object_store'], 'terraform_state')
@@ -440,14 +438,14 @@
 #
 # Mattermost
 #
-Settings['mattermost'] ||= Settingslogic.new({})
+Settings['mattermost'] ||= {}
 Settings.mattermost['enabled'] = false if Settings.mattermost['enabled'].nil?
 Settings.mattermost['host'] = nil unless Settings.mattermost.enabled
 
 #
 # Jira Connect (GitLab for Jira Cloud App)
 #
-Settings['jira_connect'] ||= Settingslogic.new({})
+Settings['jira_connect'] ||= {}
 
 Settings.jira_connect['atlassian_js_url'] ||= 'https://connect-cdn.atl-paas.net/all.js'
 Settings.jira_connect['enforce_jira_base_url_https'] = true if Settings.jira_connect['enforce_jira_base_url_https'].nil?
@@ -456,7 +454,7 @@
 #
 # Gravatar
 #
-Settings['gravatar'] ||= Settingslogic.new({})
+Settings['gravatar'] ||= {}
 Settings.gravatar['enabled']      = true if Settings.gravatar['enabled'].nil?
 Settings.gravatar['plain_url']  ||= 'https://www.gravatar.com/avatar/%{hash}?s=%{size}&d=identicon'
 Settings.gravatar['ssl_url']    ||= 'https://secure.gravatar.com/avatar/%{hash}?s=%{size}&d=identicon'
@@ -465,395 +463,395 @@
 #
 # Cron Jobs
 #
-Settings['cron_jobs'] ||= Settingslogic.new({})
+Settings['cron_jobs'] ||= {}
 
 if Gitlab.ee? && Settings['ee_cron_jobs']
   Settings.cron_jobs.merge!(Settings.ee_cron_jobs)
 end
 
 Settings.cron_jobs['poll_interval'] ||= nil
-Settings.cron_jobs['stuck_ci_jobs_worker'] ||= Settingslogic.new({})
+Settings.cron_jobs['stuck_ci_jobs_worker'] ||= {}
 Settings.cron_jobs['stuck_ci_jobs_worker']['cron'] ||= '0 * * * *'
 Settings.cron_jobs['stuck_ci_jobs_worker']['job_class'] = 'StuckCiJobsWorker'
-Settings.cron_jobs['pipeline_schedule_worker'] ||= Settingslogic.new({})
+Settings.cron_jobs['pipeline_schedule_worker'] ||= {}
 Settings.cron_jobs['pipeline_schedule_worker']['cron'] ||= '3-59/10 * * * *'
 Settings.cron_jobs['pipeline_schedule_worker']['job_class'] = 'PipelineScheduleWorker'
-Settings.cron_jobs['expire_build_artifacts_worker'] ||= Settingslogic.new({})
+Settings.cron_jobs['expire_build_artifacts_worker'] ||= {}
 Settings.cron_jobs['expire_build_artifacts_worker']['cron'] ||= '*/7 * * * *'
 Settings.cron_jobs['expire_build_artifacts_worker']['job_class'] = 'ExpireBuildArtifactsWorker'
-Settings.cron_jobs['update_locked_unknown_artifacts_worker'] ||= Settingslogic.new({})
+Settings.cron_jobs['update_locked_unknown_artifacts_worker'] ||= {}
 Settings.cron_jobs['update_locked_unknown_artifacts_worker']['cron'] ||= '*/7 * * * *'
 Settings.cron_jobs['update_locked_unknown_artifacts_worker']['job_class'] = 'Ci::UpdateLockedUnknownArtifactsWorker'
-Settings.cron_jobs['ci_pipelines_expire_artifacts_worker'] ||= Settingslogic.new({})
+Settings.cron_jobs['ci_pipelines_expire_artifacts_worker'] ||= {}
 Settings.cron_jobs['ci_pipelines_expire_artifacts_worker']['cron'] ||= '*/23 * * * *'
 Settings.cron_jobs['ci_pipelines_expire_artifacts_worker']['job_class'] = 'Ci::PipelineArtifacts::ExpireArtifactsWorker'
-Settings.cron_jobs['ci_schedule_delete_objects_worker'] ||= Settingslogic.new({})
+Settings.cron_jobs['ci_schedule_delete_objects_worker'] ||= {}
 Settings.cron_jobs['ci_schedule_delete_objects_worker']['cron'] ||= '*/16 * * * *'
 Settings.cron_jobs['ci_schedule_delete_objects_worker']['job_class'] = 'Ci::ScheduleDeleteObjectsCronWorker'
-Settings.cron_jobs['environments_auto_stop_cron_worker'] ||= Settingslogic.new({})
+Settings.cron_jobs['environments_auto_stop_cron_worker'] ||= {}
 Settings.cron_jobs['environments_auto_stop_cron_worker']['cron'] ||= '24 * * * *'
 Settings.cron_jobs['environments_auto_stop_cron_worker']['job_class'] = 'Environments::AutoStopCronWorker'
-Settings.cron_jobs['environments_auto_delete_cron_worker'] ||= Settingslogic.new({})
+Settings.cron_jobs['environments_auto_delete_cron_worker'] ||= {}
 Settings.cron_jobs['environments_auto_delete_cron_worker']['cron'] ||= '34 * * * *'
 Settings.cron_jobs['environments_auto_delete_cron_worker']['job_class'] = 'Environments::AutoDeleteCronWorker'
-Settings.cron_jobs['repository_check_worker'] ||= Settingslogic.new({})
+Settings.cron_jobs['repository_check_worker'] ||= {}
 Settings.cron_jobs['repository_check_worker']['cron'] ||= '20 * * * *'
 Settings.cron_jobs['repository_check_worker']['job_class'] = 'RepositoryCheck::DispatchWorker'
-Settings.cron_jobs['admin_email_worker'] ||= Settingslogic.new({})
+Settings.cron_jobs['admin_email_worker'] ||= {}
 Settings.cron_jobs['admin_email_worker']['cron'] ||= '0 0 * * 0'
 Settings.cron_jobs['admin_email_worker']['job_class'] = 'AdminEmailWorker'
-Settings.cron_jobs['personal_access_tokens_expiring_worker'] ||= Settingslogic.new({})
+Settings.cron_jobs['personal_access_tokens_expiring_worker'] ||= {}
 Settings.cron_jobs['personal_access_tokens_expiring_worker']['cron'] ||= '0 1 * * *'
 Settings.cron_jobs['personal_access_tokens_expiring_worker']['job_class'] = 'PersonalAccessTokens::ExpiringWorker'
-Settings.cron_jobs['personal_access_tokens_expired_notification_worker'] ||= Settingslogic.new({})
+Settings.cron_jobs['personal_access_tokens_expired_notification_worker'] ||= {}
 Settings.cron_jobs['personal_access_tokens_expired_notification_worker']['cron'] ||= '0 2 * * *'
 Settings.cron_jobs['personal_access_tokens_expired_notification_worker']['job_class'] = 'PersonalAccessTokens::ExpiredNotificationWorker'
-Settings.cron_jobs['repository_archive_cache_worker'] ||= Settingslogic.new({})
+Settings.cron_jobs['repository_archive_cache_worker'] ||= {}
 Settings.cron_jobs['repository_archive_cache_worker']['cron'] ||= '0 * * * *'
 Settings.cron_jobs['repository_archive_cache_worker']['job_class'] = 'RepositoryArchiveCacheWorker'
-Settings.cron_jobs['import_export_project_cleanup_worker'] ||= Settingslogic.new({})
+Settings.cron_jobs['import_export_project_cleanup_worker'] ||= {}
 Settings.cron_jobs['import_export_project_cleanup_worker']['cron'] ||= '0 * * * *'
 Settings.cron_jobs['import_export_project_cleanup_worker']['job_class'] = 'ImportExportProjectCleanupWorker'
-Settings.cron_jobs['ci_archive_traces_cron_worker'] ||= Settingslogic.new({})
+Settings.cron_jobs['ci_archive_traces_cron_worker'] ||= {}
 Settings.cron_jobs['ci_archive_traces_cron_worker']['cron'] ||= '17 * * * *'
 Settings.cron_jobs['ci_archive_traces_cron_worker']['job_class'] = 'Ci::ArchiveTracesCronWorker'
-Settings.cron_jobs['remove_expired_members_worker'] ||= Settingslogic.new({})
+Settings.cron_jobs['remove_expired_members_worker'] ||= {}
 Settings.cron_jobs['remove_expired_members_worker']['cron'] ||= '10 0 * * *'
 Settings.cron_jobs['remove_expired_members_worker']['job_class'] = 'RemoveExpiredMembersWorker'
-Settings.cron_jobs['remove_expired_group_links_worker'] ||= Settingslogic.new({})
+Settings.cron_jobs['remove_expired_group_links_worker'] ||= {}
 Settings.cron_jobs['remove_expired_group_links_worker']['cron'] ||= '10 0 * * *'
 Settings.cron_jobs['remove_expired_group_links_worker']['job_class'] = 'RemoveExpiredGroupLinksWorker'
-Settings.cron_jobs['remove_unaccepted_member_invites_worker'] ||= Settingslogic.new({})
+Settings.cron_jobs['remove_unaccepted_member_invites_worker'] ||= {}
 Settings.cron_jobs['remove_unaccepted_member_invites_worker']['cron'] ||= '10 15 * * *'
 Settings.cron_jobs['remove_unaccepted_member_invites_worker']['job_class'] = 'RemoveUnacceptedMemberInvitesWorker'
-Settings.cron_jobs['prune_old_events_worker'] ||= Settingslogic.new({})
+Settings.cron_jobs['prune_old_events_worker'] ||= {}
 Settings.cron_jobs['prune_old_events_worker']['cron'] ||= '0 */6 * * *'
 Settings.cron_jobs['prune_old_events_worker']['job_class'] = 'PruneOldEventsWorker'
-Settings.cron_jobs['gitlab_export_prune_project_export_jobs_worker'] ||= Settingslogic.new({})
+Settings.cron_jobs['gitlab_export_prune_project_export_jobs_worker'] ||= {}
 Settings.cron_jobs['gitlab_export_prune_project_export_jobs_worker']['cron'] ||= '30 3 * * */7'
 Settings.cron_jobs['gitlab_export_prune_project_export_jobs_worker']['job_class'] = 'Gitlab::Export::PruneProjectExportJobsWorker'
-Settings.cron_jobs['trending_projects_worker'] ||= Settingslogic.new({})
+Settings.cron_jobs['trending_projects_worker'] ||= {}
 Settings.cron_jobs['trending_projects_worker']['cron'] = '0 1 * * *'
 Settings.cron_jobs['trending_projects_worker']['job_class'] = 'TrendingProjectsWorker'
-Settings.cron_jobs['remove_unreferenced_lfs_objects_worker'] ||= Settingslogic.new({})
+Settings.cron_jobs['remove_unreferenced_lfs_objects_worker'] ||= {}
 Settings.cron_jobs['remove_unreferenced_lfs_objects_worker']['cron'] ||= '20 0 * * *'
 Settings.cron_jobs['remove_unreferenced_lfs_objects_worker']['job_class'] = 'RemoveUnreferencedLfsObjectsWorker'
-Settings.cron_jobs['bulk_imports_stuck_import_worker'] ||= Settingslogic.new({})
+Settings.cron_jobs['bulk_imports_stuck_import_worker'] ||= {}
 Settings.cron_jobs['bulk_imports_stuck_import_worker']['cron'] ||= '0 */4 * * *'
 Settings.cron_jobs['bulk_imports_stuck_import_worker']['job_class'] = 'BulkImports::StuckImportWorker'
-Settings.cron_jobs['import_stuck_project_import_jobs'] ||= Settingslogic.new({})
+Settings.cron_jobs['import_stuck_project_import_jobs'] ||= {}
 Settings.cron_jobs['import_stuck_project_import_jobs']['cron'] ||= '15 * * * *'
 Settings.cron_jobs['import_stuck_project_import_jobs']['job_class'] = 'Gitlab::Import::StuckProjectImportJobsWorker'
-Settings.cron_jobs['jira_import_stuck_jira_import_jobs'] ||= Settingslogic.new({})
+Settings.cron_jobs['jira_import_stuck_jira_import_jobs'] ||= {}
 Settings.cron_jobs['jira_import_stuck_jira_import_jobs']['cron'] ||= '* 0/15 * * *'
 Settings.cron_jobs['jira_import_stuck_jira_import_jobs']['job_class'] = 'Gitlab::JiraImport::StuckJiraImportJobsWorker'
-Settings.cron_jobs['stuck_export_jobs_worker'] ||= Settingslogic.new({})
+Settings.cron_jobs['stuck_export_jobs_worker'] ||= {}
 Settings.cron_jobs['stuck_export_jobs_worker']['cron'] ||= '30 * * * *'
 Settings.cron_jobs['stuck_export_jobs_worker']['job_class'] = 'StuckExportJobsWorker'
-Settings.cron_jobs['gitlab_service_ping_worker'] ||= Settingslogic.new({})
+Settings.cron_jobs['gitlab_service_ping_worker'] ||= {}
 Settings.cron_jobs['gitlab_service_ping_worker']['cron'] ||= nil # This is dynamically loaded in the sidekiq initializer
 Settings.cron_jobs['gitlab_service_ping_worker']['job_class'] = 'GitlabServicePingWorker'
-Settings.cron_jobs['stuck_merge_jobs_worker'] ||= Settingslogic.new({})
+Settings.cron_jobs['stuck_merge_jobs_worker'] ||= {}
 Settings.cron_jobs['stuck_merge_jobs_worker']['cron'] ||= '0 */2 * * *'
 Settings.cron_jobs['stuck_merge_jobs_worker']['job_class'] = 'StuckMergeJobsWorker'
-Settings.cron_jobs['pages_domain_verification_cron_worker'] ||= Settingslogic.new({})
+Settings.cron_jobs['pages_domain_verification_cron_worker'] ||= {}
 Settings.cron_jobs['pages_domain_verification_cron_worker']['cron'] ||= '*/15 * * * *'
 Settings.cron_jobs['pages_domain_verification_cron_worker']['job_class'] = 'PagesDomainVerificationCronWorker'
-Settings.cron_jobs['pages_domain_removal_cron_worker'] ||= Settingslogic.new({})
+Settings.cron_jobs['pages_domain_removal_cron_worker'] ||= {}
 Settings.cron_jobs['pages_domain_removal_cron_worker']['cron'] ||= '47 0 * * *'
 Settings.cron_jobs['pages_domain_removal_cron_worker']['job_class'] = 'PagesDomainRemovalCronWorker'
-Settings.cron_jobs['pages_domain_ssl_renewal_cron_worker'] ||= Settingslogic.new({})
+Settings.cron_jobs['pages_domain_ssl_renewal_cron_worker'] ||= {}
 Settings.cron_jobs['pages_domain_ssl_renewal_cron_worker']['cron'] ||= '*/10 * * * *'
 Settings.cron_jobs['pages_domain_ssl_renewal_cron_worker']['job_class'] = 'PagesDomainSslRenewalCronWorker'
-Settings.cron_jobs['issue_due_scheduler_worker'] ||= Settingslogic.new({})
+Settings.cron_jobs['issue_due_scheduler_worker'] ||= {}
 Settings.cron_jobs['issue_due_scheduler_worker']['cron'] ||= '50 00 * * *'
 Settings.cron_jobs['issue_due_scheduler_worker']['job_class'] = 'IssueDueSchedulerWorker'
-Settings.cron_jobs['metrics_dashboard_schedule_annotations_prune_worker'] ||= Settingslogic.new({})
+Settings.cron_jobs['metrics_dashboard_schedule_annotations_prune_worker'] ||= {}
 Settings.cron_jobs['metrics_dashboard_schedule_annotations_prune_worker']['cron'] ||= '0 1 * * *'
 Settings.cron_jobs['metrics_dashboard_schedule_annotations_prune_worker']['job_class'] = 'Metrics::Dashboard::ScheduleAnnotationsPruneWorker'
-Settings.cron_jobs['schedule_migrate_external_diffs_worker'] ||= Settingslogic.new({})
+Settings.cron_jobs['schedule_migrate_external_diffs_worker'] ||= {}
 Settings.cron_jobs['schedule_migrate_external_diffs_worker']['cron'] ||= '15 * * * *'
 Settings.cron_jobs['schedule_migrate_external_diffs_worker']['job_class'] = 'ScheduleMigrateExternalDiffsWorker'
-Settings.cron_jobs['namespaces_prune_aggregation_schedules_worker'] ||= Settingslogic.new({})
+Settings.cron_jobs['namespaces_prune_aggregation_schedules_worker'] ||= {}
 Settings.cron_jobs['namespaces_prune_aggregation_schedules_worker']['cron'] ||= '5 1 * * *'
 Settings.cron_jobs['namespaces_prune_aggregation_schedules_worker']['job_class'] = 'Namespaces::PruneAggregationSchedulesWorker'
-Settings.cron_jobs['container_expiration_policy_worker'] ||= Settingslogic.new({})
+Settings.cron_jobs['container_expiration_policy_worker'] ||= {}
 Settings.cron_jobs['container_expiration_policy_worker']['cron'] ||= '50 * * * *'
 Settings.cron_jobs['container_expiration_policy_worker']['job_class'] = 'ContainerExpirationPolicyWorker'
-Settings.cron_jobs['container_registry_migration_guard_worker'] ||= Settingslogic.new({})
+Settings.cron_jobs['container_registry_migration_guard_worker'] ||= {}
 Settings.cron_jobs['container_registry_migration_guard_worker']['cron'] ||= '*/10 * * * *'
 Settings.cron_jobs['container_registry_migration_guard_worker']['job_class'] = 'ContainerRegistry::Migration::GuardWorker'
-Settings.cron_jobs['container_registry_migration_observer_worker'] ||= Settingslogic.new({})
+Settings.cron_jobs['container_registry_migration_observer_worker'] ||= {}
 Settings.cron_jobs['container_registry_migration_observer_worker']['cron'] ||= '*/30 * * * *'
 Settings.cron_jobs['container_registry_migration_observer_worker']['job_class'] = 'ContainerRegistry::Migration::ObserverWorker'
-Settings.cron_jobs['container_registry_migration_enqueuer_worker'] ||= Settingslogic.new({})
+Settings.cron_jobs['container_registry_migration_enqueuer_worker'] ||= {}
 Settings.cron_jobs['container_registry_migration_enqueuer_worker']['cron'] ||= '15,45 */1 * * *'
 Settings.cron_jobs['container_registry_migration_enqueuer_worker']['job_class'] = 'ContainerRegistry::Migration::EnqueuerWorker'
-Settings.cron_jobs['cleanup_container_registry_worker'] ||= Settingslogic.new({})
+Settings.cron_jobs['cleanup_container_registry_worker'] ||= {}
 Settings.cron_jobs['cleanup_container_registry_worker']['cron'] ||= '*/5 * * * *'
 Settings.cron_jobs['cleanup_container_registry_worker']['job_class'] = 'ContainerRegistry::CleanupWorker'
-Settings.cron_jobs['image_ttl_group_policy_worker'] ||= Settingslogic.new({})
+Settings.cron_jobs['image_ttl_group_policy_worker'] ||= {}
 Settings.cron_jobs['image_ttl_group_policy_worker']['cron'] ||= '40 0 * * *'
 Settings.cron_jobs['image_ttl_group_policy_worker']['job_class'] = 'DependencyProxy::ImageTtlGroupPolicyWorker'
-Settings.cron_jobs['cleanup_dependency_proxy_worker'] ||= Settingslogic.new({})
+Settings.cron_jobs['cleanup_dependency_proxy_worker'] ||= {}
 Settings.cron_jobs['cleanup_dependency_proxy_worker']['cron'] ||= '20 3,15 * * *'
 Settings.cron_jobs['cleanup_dependency_proxy_worker']['job_class'] = 'DependencyProxy::CleanupDependencyProxyWorker'
-Settings.cron_jobs['cleanup_package_registry_worker'] ||= Settingslogic.new({})
+Settings.cron_jobs['cleanup_package_registry_worker'] ||= {}
 Settings.cron_jobs['cleanup_package_registry_worker']['cron'] ||= '20 * * * *'
 Settings.cron_jobs['cleanup_package_registry_worker']['job_class'] = 'Packages::CleanupPackageRegistryWorker'
-Settings.cron_jobs['x509_issuer_crl_check_worker'] ||= Settingslogic.new({})
+Settings.cron_jobs['x509_issuer_crl_check_worker'] ||= {}
 Settings.cron_jobs['x509_issuer_crl_check_worker']['cron'] ||= '30 1 * * *'
 Settings.cron_jobs['x509_issuer_crl_check_worker']['job_class'] = 'X509IssuerCrlCheckWorker'
-Settings.cron_jobs['users_create_statistics_worker'] ||= Settingslogic.new({})
+Settings.cron_jobs['users_create_statistics_worker'] ||= {}
 Settings.cron_jobs['users_create_statistics_worker']['cron'] ||= '2 15 * * *'
 Settings.cron_jobs['users_create_statistics_worker']['job_class'] = 'Users::CreateStatisticsWorker'
-Settings.cron_jobs['authorized_project_update_periodic_recalculate_worker'] ||= Settingslogic.new({})
+Settings.cron_jobs['authorized_project_update_periodic_recalculate_worker'] ||= {}
 Settings.cron_jobs['authorized_project_update_periodic_recalculate_worker']['cron'] ||= '45 1 1,15 * *'
 Settings.cron_jobs['authorized_project_update_periodic_recalculate_worker']['job_class'] = 'AuthorizedProjectUpdate::PeriodicRecalculateWorker'
-Settings.cron_jobs['update_container_registry_info_worker'] ||= Settingslogic.new({})
+Settings.cron_jobs['update_container_registry_info_worker'] ||= {}
 Settings.cron_jobs['update_container_registry_info_worker']['cron'] ||= '0 0 * * *'
 Settings.cron_jobs['update_container_registry_info_worker']['job_class'] = 'UpdateContainerRegistryInfoWorker'
-Settings.cron_jobs['postgres_dynamic_partitions_manager'] ||= Settingslogic.new({})
+Settings.cron_jobs['postgres_dynamic_partitions_manager'] ||= {}
 Settings.cron_jobs['postgres_dynamic_partitions_manager']['cron'] ||= '21 */6 * * *'
 Settings.cron_jobs['postgres_dynamic_partitions_manager']['job_class'] ||= 'Database::PartitionManagementWorker'
-Settings.cron_jobs['postgres_dynamic_partitions_dropper'] ||= Settingslogic.new({})
+Settings.cron_jobs['postgres_dynamic_partitions_dropper'] ||= {}
 Settings.cron_jobs['postgres_dynamic_partitions_dropper']['cron'] ||= '45 12 * * *'
 Settings.cron_jobs['postgres_dynamic_partitions_dropper']['job_class'] ||= 'Database::DropDetachedPartitionsWorker'
-Settings.cron_jobs['ci_platform_metrics_update_cron_worker'] ||= Settingslogic.new({})
+Settings.cron_jobs['ci_platform_metrics_update_cron_worker'] ||= {}
 Settings.cron_jobs['ci_platform_metrics_update_cron_worker']['cron'] ||= '47 9 * * *'
 Settings.cron_jobs['ci_platform_metrics_update_cron_worker']['job_class'] = 'CiPlatformMetricsUpdateCronWorker'
-Settings.cron_jobs['analytics_usage_trends_count_job_trigger_worker'] ||= Settingslogic.new({})
+Settings.cron_jobs['analytics_usage_trends_count_job_trigger_worker'] ||= {}
 Settings.cron_jobs['analytics_usage_trends_count_job_trigger_worker']['cron'] ||= '50 23 */1 * *'
 Settings.cron_jobs['analytics_usage_trends_count_job_trigger_worker']['job_class'] ||= 'Analytics::UsageTrends::CountJobTriggerWorker'
-Settings.cron_jobs['member_invitation_reminder_emails_worker'] ||= Settingslogic.new({})
+Settings.cron_jobs['member_invitation_reminder_emails_worker'] ||= {}
 Settings.cron_jobs['member_invitation_reminder_emails_worker']['cron'] ||= '0 0 * * *'
 Settings.cron_jobs['member_invitation_reminder_emails_worker']['job_class'] = 'MemberInvitationReminderEmailsWorker'
-Settings.cron_jobs['schedule_merge_request_cleanup_refs_worker'] ||= Settingslogic.new({})
+Settings.cron_jobs['schedule_merge_request_cleanup_refs_worker'] ||= {}
 Settings.cron_jobs['schedule_merge_request_cleanup_refs_worker']['cron'] ||= '* * * * *'
 Settings.cron_jobs['schedule_merge_request_cleanup_refs_worker']['job_class'] = 'ScheduleMergeRequestCleanupRefsWorker'
-Settings.cron_jobs['manage_evidence_worker'] ||= Settingslogic.new({})
+Settings.cron_jobs['manage_evidence_worker'] ||= {}
 Settings.cron_jobs['manage_evidence_worker']['cron'] ||= '0 * * * *'
 Settings.cron_jobs['manage_evidence_worker']['job_class'] = 'Releases::ManageEvidenceWorker'
-Settings.cron_jobs['user_status_cleanup_batch_worker'] ||= Settingslogic.new({})
+Settings.cron_jobs['user_status_cleanup_batch_worker'] ||= {}
 Settings.cron_jobs['user_status_cleanup_batch_worker']['cron'] ||= '* * * * *'
 Settings.cron_jobs['user_status_cleanup_batch_worker']['job_class'] = 'UserStatusCleanup::BatchWorker'
-Settings.cron_jobs['ssh_keys_expired_notification_worker'] ||= Settingslogic.new({})
+Settings.cron_jobs['ssh_keys_expired_notification_worker'] ||= {}
 Settings.cron_jobs['ssh_keys_expired_notification_worker']['cron'] ||= '0 2,14 * * *'
 Settings.cron_jobs['ssh_keys_expired_notification_worker']['job_class'] = 'SshKeys::ExpiredNotificationWorker'
-Settings.cron_jobs['namespaces_in_product_marketing_emails_worker'] ||= Settingslogic.new({})
+Settings.cron_jobs['namespaces_in_product_marketing_emails_worker'] ||= {}
 Settings.cron_jobs['namespaces_in_product_marketing_emails_worker']['cron'] ||= '0 16 * * *'
 Settings.cron_jobs['namespaces_in_product_marketing_emails_worker']['job_class'] = 'Namespaces::InProductMarketingEmailsWorker'
-Settings.cron_jobs['ssh_keys_expiring_soon_notification_worker'] ||= Settingslogic.new({})
+Settings.cron_jobs['ssh_keys_expiring_soon_notification_worker'] ||= {}
 Settings.cron_jobs['ssh_keys_expiring_soon_notification_worker']['cron'] ||= '0 1 * * *'
 Settings.cron_jobs['ssh_keys_expiring_soon_notification_worker']['job_class'] = 'SshKeys::ExpiringSoonNotificationWorker'
-Settings.cron_jobs['users_deactivate_dormant_users_worker'] ||= Settingslogic.new({})
+Settings.cron_jobs['users_deactivate_dormant_users_worker'] ||= {}
 Settings.cron_jobs['users_deactivate_dormant_users_worker']['cron'] ||= '21,42 0-4 * * *'
 Settings.cron_jobs['users_deactivate_dormant_users_worker']['job_class'] = 'Users::DeactivateDormantUsersWorker'
-Settings.cron_jobs['ci_delete_unit_tests_worker'] ||= Settingslogic.new({})
+Settings.cron_jobs['ci_delete_unit_tests_worker'] ||= {}
 Settings.cron_jobs['ci_delete_unit_tests_worker']['cron'] ||= '0 0 * * *'
 Settings.cron_jobs['ci_delete_unit_tests_worker']['job_class'] = 'Ci::DeleteUnitTestsWorker'
-Settings.cron_jobs['batched_background_migrations_worker'] ||= Settingslogic.new({})
+Settings.cron_jobs['batched_background_migrations_worker'] ||= {}
 Settings.cron_jobs['batched_background_migrations_worker']['cron'] ||= '* * * * *'
 Settings.cron_jobs['batched_background_migrations_worker']['job_class'] = 'Database::BatchedBackgroundMigrationWorker'
-Settings.cron_jobs['batched_background_migration_worker_ci_database'] ||= Settingslogic.new({})
+Settings.cron_jobs['batched_background_migration_worker_ci_database'] ||= {}
 Settings.cron_jobs['batched_background_migration_worker_ci_database']['cron'] ||= '* * * * *'
 Settings.cron_jobs['batched_background_migration_worker_ci_database']['job_class'] = 'Database::BatchedBackgroundMigration::CiDatabaseWorker'
-Settings.cron_jobs['issues_reschedule_stuck_issue_rebalances'] ||= Settingslogic.new({})
+Settings.cron_jobs['issues_reschedule_stuck_issue_rebalances'] ||= {}
 Settings.cron_jobs['issues_reschedule_stuck_issue_rebalances']['cron'] ||= '*/15 * * * *'
 Settings.cron_jobs['issues_reschedule_stuck_issue_rebalances']['job_class'] = 'Issues::RescheduleStuckIssueRebalancesWorker'
-Settings.cron_jobs['clusters_integrations_check_prometheus_health_worker'] ||= Settingslogic.new({})
+Settings.cron_jobs['clusters_integrations_check_prometheus_health_worker'] ||= {}
 Settings.cron_jobs['clusters_integrations_check_prometheus_health_worker']['cron'] ||= '0 * * * *'
 Settings.cron_jobs['clusters_integrations_check_prometheus_health_worker']['job_class'] = 'Clusters::Integrations::CheckPrometheusHealthWorker'
-Settings.cron_jobs['projects_schedule_refresh_build_artifacts_size_statistics_worker'] ||= Settingslogic.new({})
+Settings.cron_jobs['projects_schedule_refresh_build_artifacts_size_statistics_worker'] ||= {}
 Settings.cron_jobs['projects_schedule_refresh_build_artifacts_size_statistics_worker']['cron'] ||= '2/17 * * * *'
 Settings.cron_jobs['projects_schedule_refresh_build_artifacts_size_statistics_worker']['job_class'] = 'Projects::ScheduleRefreshBuildArtifactsSizeStatisticsWorker'
-Settings.cron_jobs['inactive_projects_deletion_cron_worker'] ||= Settingslogic.new({})
+Settings.cron_jobs['inactive_projects_deletion_cron_worker'] ||= {}
 Settings.cron_jobs['inactive_projects_deletion_cron_worker']['cron'] ||= '*/10 * * * *'
 Settings.cron_jobs['inactive_projects_deletion_cron_worker']['job_class'] = 'Projects::InactiveProjectsDeletionCronWorker'
-Settings.cron_jobs['loose_foreign_keys_cleanup_worker'] ||= Settingslogic.new({})
+Settings.cron_jobs['loose_foreign_keys_cleanup_worker'] ||= {}
 Settings.cron_jobs['loose_foreign_keys_cleanup_worker']['cron'] ||= '*/1 * * * *'
 Settings.cron_jobs['loose_foreign_keys_cleanup_worker']['job_class'] = 'LooseForeignKeys::CleanupWorker'
-Settings.cron_jobs['ci_runner_versions_reconciliation_worker'] ||= Settingslogic.new({})
+Settings.cron_jobs['ci_runner_versions_reconciliation_worker'] ||= {}
 Settings.cron_jobs['ci_runner_versions_reconciliation_worker']['cron'] ||= '@daily'
 Settings.cron_jobs['ci_runner_versions_reconciliation_worker']['job_class'] = 'Ci::Runners::ReconcileExistingRunnerVersionsCronWorker'
-Settings.cron_jobs['users_migrate_records_to_ghost_user_in_batches_worker'] ||= Settingslogic.new({})
+Settings.cron_jobs['users_migrate_records_to_ghost_user_in_batches_worker'] ||= {}
 Settings.cron_jobs['users_migrate_records_to_ghost_user_in_batches_worker']['cron'] ||= '*/2 * * * *'
 Settings.cron_jobs['users_migrate_records_to_ghost_user_in_batches_worker']['job_class'] = 'Users::MigrateRecordsToGhostUserInBatchesWorker'
-Settings.cron_jobs['ci_runners_stale_machines_cleanup_worker'] ||= Settingslogic.new({})
+Settings.cron_jobs['ci_runners_stale_machines_cleanup_worker'] ||= {}
 Settings.cron_jobs['ci_runners_stale_machines_cleanup_worker']['cron'] ||= '36 4 * * *'
 Settings.cron_jobs['ci_runners_stale_machines_cleanup_worker']['job_class'] = 'Ci::Runners::StaleMachinesCleanupCronWorker'
-Settings.cron_jobs['cleanup_dangling_debian_package_files_worker'] ||= Settingslogic.new({})
+Settings.cron_jobs['cleanup_dangling_debian_package_files_worker'] ||= {}
 Settings.cron_jobs['cleanup_dangling_debian_package_files_worker']['cron'] ||= '20 21 * * *'
 Settings.cron_jobs['cleanup_dangling_debian_package_files_worker']['job_class'] = 'Packages::Debian::CleanupDanglingPackageFilesWorker'
-Settings.cron_jobs['global_metrics_update_worker'] ||= Settingslogic.new({})
+Settings.cron_jobs['global_metrics_update_worker'] ||= {}
 Settings.cron_jobs['global_metrics_update_worker']['cron'] ||= '*/1 * * * *'
 Settings.cron_jobs['global_metrics_update_worker']['job_class'] ||= 'Metrics::GlobalMetricsUpdateWorker'
 
 Gitlab.ee do
-  Settings.cron_jobs['analytics_devops_adoption_create_all_snapshots_worker'] ||= Settingslogic.new({})
+  Settings.cron_jobs['analytics_devops_adoption_create_all_snapshots_worker'] ||= {}
   Settings.cron_jobs['analytics_devops_adoption_create_all_snapshots_worker']['cron'] ||= '0 1 * * *'
   Settings.cron_jobs['analytics_devops_adoption_create_all_snapshots_worker']['job_class'] = 'Analytics::DevopsAdoption::CreateAllSnapshotsWorker'
-  Settings.cron_jobs['analytics_cycle_analytics_incremental_worker'] ||= Settingslogic.new({})
+  Settings.cron_jobs['analytics_cycle_analytics_incremental_worker'] ||= {}
   Settings.cron_jobs['analytics_cycle_analytics_incremental_worker']['cron'] ||= '*/10 * * * *'
   Settings.cron_jobs['analytics_cycle_analytics_incremental_worker']['job_class'] = 'Analytics::CycleAnalytics::IncrementalWorker'
-  Settings.cron_jobs['analytics_cycle_analytics_consistency_worker'] ||= Settingslogic.new({})
+  Settings.cron_jobs['analytics_cycle_analytics_consistency_worker'] ||= {}
   Settings.cron_jobs['analytics_cycle_analytics_consistency_worker']['cron'] ||= '*/30 * * * *'
   Settings.cron_jobs['analytics_cycle_analytics_consistency_worker']['job_class'] = 'Analytics::CycleAnalytics::ConsistencyWorker'
-  Settings.cron_jobs['analytics_cycle_analytics_reaggregation_worker'] ||= Settingslogic.new({})
+  Settings.cron_jobs['analytics_cycle_analytics_reaggregation_worker'] ||= {}
   Settings.cron_jobs['analytics_cycle_analytics_reaggregation_worker']['cron'] ||= '44 * * * *'
   Settings.cron_jobs['analytics_cycle_analytics_reaggregation_worker']['job_class'] = 'Analytics::CycleAnalytics::ReaggregationWorker'
-  Settings.cron_jobs['active_user_count_threshold_worker'] ||= Settingslogic.new({})
+  Settings.cron_jobs['active_user_count_threshold_worker'] ||= {}
   Settings.cron_jobs['active_user_count_threshold_worker']['cron'] ||= '0 12 * * *'
   Settings.cron_jobs['active_user_count_threshold_worker']['job_class'] = 'ActiveUserCountThresholdWorker'
-  Settings.cron_jobs['adjourned_group_deletion_worker'] ||= Settingslogic.new({})
+  Settings.cron_jobs['adjourned_group_deletion_worker'] ||= {}
   Settings.cron_jobs['adjourned_group_deletion_worker']['cron'] ||= '0 2 * * *'
   Settings.cron_jobs['adjourned_group_deletion_worker']['job_class'] = 'AdjournedGroupDeletionWorker'
-  Settings.cron_jobs['clear_shared_runners_minutes_worker'] ||= Settingslogic.new({})
+  Settings.cron_jobs['clear_shared_runners_minutes_worker'] ||= {}
   Settings.cron_jobs['clear_shared_runners_minutes_worker']['cron'] ||= '0 0 1 * *'
   Settings.cron_jobs['clear_shared_runners_minutes_worker']['job_class'] = 'ClearSharedRunnersMinutesWorker'
-  Settings.cron_jobs['adjourned_projects_deletion_cron_worker'] ||= Settingslogic.new({})
+  Settings.cron_jobs['adjourned_projects_deletion_cron_worker'] ||= {}
   Settings.cron_jobs['adjourned_projects_deletion_cron_worker']['cron'] ||= '0 7 * * *'
   Settings.cron_jobs['adjourned_projects_deletion_cron_worker']['job_class'] = 'AdjournedProjectsDeletionCronWorker'
-  Settings.cron_jobs['geo_verification_cron_worker'] ||= Settingslogic.new({})
+  Settings.cron_jobs['geo_verification_cron_worker'] ||= {}
   Settings.cron_jobs['geo_verification_cron_worker']['cron'] ||= '* * * * *'
   Settings.cron_jobs['geo_verification_cron_worker']['job_class'] ||= 'Geo::VerificationCronWorker'
-  Settings.cron_jobs['geo_sync_timeout_cron_worker'] ||= Settingslogic.new({})
+  Settings.cron_jobs['geo_sync_timeout_cron_worker'] ||= {}
   Settings.cron_jobs['geo_sync_timeout_cron_worker']['cron'] ||= '*/10 * * * *'
   Settings.cron_jobs['geo_sync_timeout_cron_worker']['job_class'] ||= 'Geo::SyncTimeoutCronWorker'
-  Settings.cron_jobs['geo_secondary_usage_data_cron_worker'] ||= Settingslogic.new({})
+  Settings.cron_jobs['geo_secondary_usage_data_cron_worker'] ||= {}
   Settings.cron_jobs['geo_secondary_usage_data_cron_worker']['cron'] ||= '0 0 * * 0'
   Settings.cron_jobs['geo_secondary_usage_data_cron_worker']['job_class'] ||= 'Geo::SecondaryUsageDataCronWorker'
-  Settings.cron_jobs['geo_registry_sync_worker'] ||= Settingslogic.new({})
+  Settings.cron_jobs['geo_registry_sync_worker'] ||= {}
   Settings.cron_jobs['geo_registry_sync_worker']['cron'] ||= '*/1 * * * *'
   Settings.cron_jobs['geo_registry_sync_worker']['job_class'] ||= 'Geo::RegistrySyncWorker'
-  Settings.cron_jobs['geo_repository_registry_sync_worker'] ||= Settingslogic.new({})
+  Settings.cron_jobs['geo_repository_registry_sync_worker'] ||= {}
   Settings.cron_jobs['geo_repository_registry_sync_worker']['cron'] ||= '*/1 * * * *'
   Settings.cron_jobs['geo_repository_registry_sync_worker']['job_class'] ||= 'Geo::RepositoryRegistrySyncWorker'
-  Settings.cron_jobs['geo_metrics_update_worker'] ||= Settingslogic.new({})
+  Settings.cron_jobs['geo_metrics_update_worker'] ||= {}
   Settings.cron_jobs['geo_metrics_update_worker']['cron'] ||= '*/1 * * * *'
   Settings.cron_jobs['geo_metrics_update_worker']['job_class'] ||= 'Geo::MetricsUpdateWorker'
-  Settings.cron_jobs['geo_prune_event_log_worker'] ||= Settingslogic.new({})
+  Settings.cron_jobs['geo_prune_event_log_worker'] ||= {}
   Settings.cron_jobs['geo_prune_event_log_worker']['cron'] ||= '*/5 * * * *'
   Settings.cron_jobs['geo_prune_event_log_worker']['job_class'] ||= 'Geo::PruneEventLogWorker'
-  Settings.cron_jobs['geo_repository_sync_worker'] ||= Settingslogic.new({})
+  Settings.cron_jobs['geo_repository_sync_worker'] ||= {}
   Settings.cron_jobs['geo_repository_sync_worker']['cron'] ||= '*/1 * * * *'
   Settings.cron_jobs['geo_repository_sync_worker']['job_class'] ||= 'Geo::RepositorySyncWorker'
-  Settings.cron_jobs['geo_secondary_registry_consistency_worker'] ||= Settingslogic.new({})
+  Settings.cron_jobs['geo_secondary_registry_consistency_worker'] ||= {}
   Settings.cron_jobs['geo_secondary_registry_consistency_worker']['cron'] ||= '* * * * *'
   Settings.cron_jobs['geo_secondary_registry_consistency_worker']['job_class'] ||= 'Geo::Secondary::RegistryConsistencyWorker'
-  Settings.cron_jobs['geo_repository_verification_primary_batch_worker'] ||= Settingslogic.new({})
+  Settings.cron_jobs['geo_repository_verification_primary_batch_worker'] ||= {}
   Settings.cron_jobs['geo_repository_verification_primary_batch_worker']['cron'] ||= '*/1 * * * *'
   Settings.cron_jobs['geo_repository_verification_primary_batch_worker']['job_class'] ||= 'Geo::RepositoryVerification::Primary::BatchWorker'
-  Settings.cron_jobs['geo_repository_verification_secondary_scheduler_worker'] ||= Settingslogic.new({})
+  Settings.cron_jobs['geo_repository_verification_secondary_scheduler_worker'] ||= {}
   Settings.cron_jobs['geo_repository_verification_secondary_scheduler_worker']['cron'] ||= '*/1 * * * *'
   Settings.cron_jobs['geo_repository_verification_secondary_scheduler_worker']['job_class'] ||= 'Geo::RepositoryVerification::Secondary::SchedulerWorker'
-  Settings.cron_jobs['historical_data_worker'] ||= Settingslogic.new({})
+  Settings.cron_jobs['historical_data_worker'] ||= {}
   Settings.cron_jobs['historical_data_worker']['cron'] ||= '0 12 * * *'
   Settings.cron_jobs['historical_data_worker']['job_class'] = 'HistoricalDataWorker'
-  Settings.cron_jobs['incident_sla_exceeded_check_worker'] ||= Settingslogic.new({})
+  Settings.cron_jobs['incident_sla_exceeded_check_worker'] ||= {}
   Settings.cron_jobs['incident_sla_exceeded_check_worker']['cron'] ||= '*/2 * * * *'
   Settings.cron_jobs['incident_sla_exceeded_check_worker']['job_class'] = 'IncidentManagement::IncidentSlaExceededCheckWorker'
-  Settings.cron_jobs['incident_management_persist_oncall_rotation_worker'] ||= Settingslogic.new({})
+  Settings.cron_jobs['incident_management_persist_oncall_rotation_worker'] ||= {}
   Settings.cron_jobs['incident_management_persist_oncall_rotation_worker']['cron'] ||= '*/5 * * * *'
   Settings.cron_jobs['incident_management_persist_oncall_rotation_worker']['job_class'] = 'IncidentManagement::OncallRotations::PersistAllRotationsShiftsJob'
-  Settings.cron_jobs['incident_management_schedule_escalation_check_worker'] ||= Settingslogic.new({})
+  Settings.cron_jobs['incident_management_schedule_escalation_check_worker'] ||= {}
   Settings.cron_jobs['incident_management_schedule_escalation_check_worker']['cron'] ||= '*/1 * * * *'
   Settings.cron_jobs['incident_management_schedule_escalation_check_worker']['job_class'] = 'IncidentManagement::PendingEscalations::ScheduleCheckCronWorker'
-  Settings.cron_jobs['import_software_licenses_worker'] ||= Settingslogic.new({})
+  Settings.cron_jobs['import_software_licenses_worker'] ||= {}
   Settings.cron_jobs['import_software_licenses_worker']['cron'] ||= '0 3 * * 0'
   Settings.cron_jobs['import_software_licenses_worker']['job_class'] = 'ImportSoftwareLicensesWorker'
-  Settings.cron_jobs['ldap_group_sync_worker'] ||= Settingslogic.new({})
+  Settings.cron_jobs['ldap_group_sync_worker'] ||= {}
   Settings.cron_jobs['ldap_group_sync_worker']['cron'] ||= '0 * * * *'
   Settings.cron_jobs['ldap_group_sync_worker']['job_class'] = 'LdapAllGroupsSyncWorker'
-  Settings.cron_jobs['ldap_sync_worker'] ||= Settingslogic.new({})
+  Settings.cron_jobs['ldap_sync_worker'] ||= {}
   Settings.cron_jobs['ldap_sync_worker']['cron'] ||= '30 1 * * *'
   Settings.cron_jobs['ldap_sync_worker']['job_class'] = 'LdapSyncWorker'
-  Settings.cron_jobs['elastic_index_bulk_cron_worker'] ||= Settingslogic.new({})
+  Settings.cron_jobs['elastic_index_bulk_cron_worker'] ||= {}
   Settings.cron_jobs['elastic_index_bulk_cron_worker']['cron'] ||= '*/1 * * * *'
   Settings.cron_jobs['elastic_index_bulk_cron_worker']['job_class'] ||= 'ElasticIndexBulkCronWorker'
-  Settings.cron_jobs['elastic_index_initial_bulk_cron_worker'] ||= Settingslogic.new({})
+  Settings.cron_jobs['elastic_index_initial_bulk_cron_worker'] ||= {}
   Settings.cron_jobs['elastic_index_initial_bulk_cron_worker']['cron'] ||= '*/1 * * * *'
   Settings.cron_jobs['elastic_index_initial_bulk_cron_worker']['job_class'] ||= 'ElasticIndexInitialBulkCronWorker'
-  Settings.cron_jobs['elastic_cluster_reindexing_cron_worker'] ||= Settingslogic.new({})
+  Settings.cron_jobs['elastic_cluster_reindexing_cron_worker'] ||= {}
   Settings.cron_jobs['elastic_cluster_reindexing_cron_worker']['cron'] ||= '*/10 * * * *'
   Settings.cron_jobs['elastic_cluster_reindexing_cron_worker']['job_class'] ||= 'ElasticClusterReindexingCronWorker'
-  Settings.cron_jobs['elastic_remove_expired_namespace_subscriptions_from_index_cron_worker'] ||= Settingslogic.new({})
+  Settings.cron_jobs['elastic_remove_expired_namespace_subscriptions_from_index_cron_worker'] ||= {}
   Settings.cron_jobs['elastic_remove_expired_namespace_subscriptions_from_index_cron_worker']['cron'] ||= '10 3 * * *'
   Settings.cron_jobs['elastic_remove_expired_namespace_subscriptions_from_index_cron_worker']['job_class'] ||= 'ElasticRemoveExpiredNamespaceSubscriptionsFromIndexCronWorker'
-  Settings.cron_jobs['elastic_migration_worker'] ||= Settingslogic.new({})
+  Settings.cron_jobs['elastic_migration_worker'] ||= {}
   Settings.cron_jobs['elastic_migration_worker']['cron'] ||= '*/5 * * * *'
   Settings.cron_jobs['elastic_migration_worker']['job_class'] ||= 'Elastic::MigrationWorker'
-  Settings.cron_jobs['search_index_curation_worker'] ||= Settingslogic.new({})
+  Settings.cron_jobs['search_index_curation_worker'] ||= {}
   Settings.cron_jobs['search_index_curation_worker']['cron'] ||= '*/1 * * * *'
   Settings.cron_jobs['search_index_curation_worker']['job_class'] ||= 'Search::IndexCurationWorker'
-  Settings.cron_jobs['sync_seat_link_worker'] ||= Settingslogic.new({})
+  Settings.cron_jobs['sync_seat_link_worker'] ||= {}
   Settings.cron_jobs['sync_seat_link_worker']['cron'] ||= "#{rand(60)} #{rand(3..4)} * * * UTC"
   Settings.cron_jobs['sync_seat_link_worker']['job_class'] = 'SyncSeatLinkWorker'
-  Settings.cron_jobs['users_create_statistics_worker'] ||= Settingslogic.new({})
+  Settings.cron_jobs['users_create_statistics_worker'] ||= {}
   Settings.cron_jobs['users_create_statistics_worker']['cron'] ||= '2 15 * * *'
   Settings.cron_jobs['users_create_statistics_worker']['job_class'] = 'Users::CreateStatisticsWorker'
-  Settings.cron_jobs['iterations_update_status_worker'] ||= Settingslogic.new({})
+  Settings.cron_jobs['iterations_update_status_worker'] ||= {}
   Settings.cron_jobs['iterations_update_status_worker']['cron'] ||= '5 0 * * *'
   Settings.cron_jobs['iterations_update_status_worker']['job_class'] = 'IterationsUpdateStatusWorker'
-  Settings.cron_jobs['iterations_generator_worker'] ||= Settingslogic.new({})
+  Settings.cron_jobs['iterations_generator_worker'] ||= {}
   Settings.cron_jobs['iterations_generator_worker']['cron'] ||= '5 0 * * *'
   Settings.cron_jobs['iterations_generator_worker']['job_class'] = 'Iterations::Cadences::ScheduleCreateIterationsWorker'
-  Settings.cron_jobs['vulnerability_statistics_schedule_worker'] ||= Settingslogic.new({})
+  Settings.cron_jobs['vulnerability_statistics_schedule_worker'] ||= {}
   Settings.cron_jobs['vulnerability_statistics_schedule_worker']['cron'] ||= '15 1,20 * * *'
   Settings.cron_jobs['vulnerability_statistics_schedule_worker']['job_class'] = 'Vulnerabilities::Statistics::ScheduleWorker'
-  Settings.cron_jobs['vulnerability_historical_statistics_deletion_worker'] ||= Settingslogic.new({})
+  Settings.cron_jobs['vulnerability_historical_statistics_deletion_worker'] ||= {}
   Settings.cron_jobs['vulnerability_historical_statistics_deletion_worker']['cron'] ||= '15 3 * * *'
   Settings.cron_jobs['vulnerability_historical_statistics_deletion_worker']['job_class'] = 'Vulnerabilities::HistoricalStatistics::DeletionWorker'
-  Settings.cron_jobs['security_create_orchestration_policy_worker'] ||= Settingslogic.new({})
+  Settings.cron_jobs['security_create_orchestration_policy_worker'] ||= {}
   Settings.cron_jobs['security_create_orchestration_policy_worker']['cron'] ||= '*/10 * * * *'
   Settings.cron_jobs['security_create_orchestration_policy_worker']['job_class'] = 'Security::CreateOrchestrationPolicyWorker'
-  Settings.cron_jobs['security_orchestration_policy_rule_schedule_worker'] ||= Settingslogic.new({})
+  Settings.cron_jobs['security_orchestration_policy_rule_schedule_worker'] ||= {}
   Settings.cron_jobs['security_orchestration_policy_rule_schedule_worker']['cron'] ||= '*/15 * * * *'
   Settings.cron_jobs['security_orchestration_policy_rule_schedule_worker']['job_class'] = 'Security::OrchestrationPolicyRuleScheduleWorker'
-  Settings.cron_jobs['security_scans_purge_worker'] ||= Settingslogic.new({})
+  Settings.cron_jobs['security_scans_purge_worker'] ||= {}
   Settings.cron_jobs['security_scans_purge_worker']['cron'] ||= '0 */4 * * 6,0'
   Settings.cron_jobs['security_scans_purge_worker']['job_class'] = 'Security::Scans::PurgeWorker'
-  Settings.cron_jobs['app_sec_dast_profile_schedule_worker'] ||= Settingslogic.new({})
+  Settings.cron_jobs['app_sec_dast_profile_schedule_worker'] ||= {}
   Settings.cron_jobs['app_sec_dast_profile_schedule_worker']['cron'] ||= '7-59/15 * * * *'
   Settings.cron_jobs['app_sec_dast_profile_schedule_worker']['job_class'] = 'AppSec::Dast::ProfileScheduleWorker'
-  Settings.cron_jobs['ci_namespace_mirrors_consistency_check_worker'] ||= Settingslogic.new({})
+  Settings.cron_jobs['ci_namespace_mirrors_consistency_check_worker'] ||= {}
   Settings.cron_jobs['ci_namespace_mirrors_consistency_check_worker']['cron'] ||= '*/4 * * * *'
   Settings.cron_jobs['ci_namespace_mirrors_consistency_check_worker']['job_class'] = 'Database::CiNamespaceMirrorsConsistencyCheckWorker'
-  Settings.cron_jobs['ci_project_mirrors_consistency_check_worker'] ||= Settingslogic.new({})
+  Settings.cron_jobs['ci_project_mirrors_consistency_check_worker'] ||= {}
   Settings.cron_jobs['ci_project_mirrors_consistency_check_worker']['cron'] ||= '2-58/4 * * * *'
   Settings.cron_jobs['ci_project_mirrors_consistency_check_worker']['job_class'] = 'Database::CiProjectMirrorsConsistencyCheckWorker'
-  Settings.cron_jobs['arkose_blocked_users_report_worker'] ||= Settingslogic.new({})
+  Settings.cron_jobs['arkose_blocked_users_report_worker'] ||= {}
   Settings.cron_jobs['arkose_blocked_users_report_worker']['cron'] ||= '0 6 * * *'
   Settings.cron_jobs['arkose_blocked_users_report_worker']['job_class'] = 'Arkose::BlockedUsersReportWorker'
-  Settings.cron_jobs['ci_runners_stale_group_runners_prune_worker_cron'] ||= Settingslogic.new({})
+  Settings.cron_jobs['ci_runners_stale_group_runners_prune_worker_cron'] ||= {}
   Settings.cron_jobs['ci_runners_stale_group_runners_prune_worker_cron']['cron'] ||= '30 * * * *'
   Settings.cron_jobs['ci_runners_stale_group_runners_prune_worker_cron']['job_class'] = 'Ci::Runners::StaleGroupRunnersPruneCronWorker'
-  Settings.cron_jobs['licenses_reset_submit_license_usage_data_banner'] ||= Settingslogic.new({})
+  Settings.cron_jobs['licenses_reset_submit_license_usage_data_banner'] ||= {}
   Settings.cron_jobs['licenses_reset_submit_license_usage_data_banner']['cron'] ||= "0 0 * * *"
   Settings.cron_jobs['licenses_reset_submit_license_usage_data_banner']['job_class'] = 'Licenses::ResetSubmitLicenseUsageDataBannerWorker'
-  Settings.cron_jobs['abandoned_trial_emails'] ||= Settingslogic.new({})
+  Settings.cron_jobs['abandoned_trial_emails'] ||= {}
   Settings.cron_jobs['abandoned_trial_emails']['cron'] ||= "0 1 * * *"
   Settings.cron_jobs['abandoned_trial_emails']['job_class'] = 'Emails::AbandonedTrialEmailsCronWorker'
-  Settings.cron_jobs['package_metadata_sync_worker'] ||= Settingslogic.new({})
+  Settings.cron_jobs['package_metadata_sync_worker'] ||= {}
   Settings.cron_jobs['package_metadata_sync_worker']['cron'] ||= "*/5 * * * *"
   Settings.cron_jobs['package_metadata_sync_worker']['job_class'] = 'PackageMetadata::SyncWorker'
-  Settings.cron_jobs['compliance_violations_consistency_worker'] ||= Settingslogic.new({})
+  Settings.cron_jobs['compliance_violations_consistency_worker'] ||= {}
   Settings.cron_jobs['compliance_violations_consistency_worker']['cron'] ||= '0 1 * * *'
   Settings.cron_jobs['compliance_violations_consistency_worker']['job_class'] = 'ComplianceManagement::MergeRequests::ComplianceViolationsConsistencyWorker'
   Gitlab.com do
-    Settings.cron_jobs['free_user_cap_backfill_notification_jobs_worker'] ||= Settingslogic.new({})
+    Settings.cron_jobs['free_user_cap_backfill_notification_jobs_worker'] ||= {}
     Settings.cron_jobs['free_user_cap_backfill_notification_jobs_worker']['cron'] ||= '*/5 * * * *'
     Settings.cron_jobs['free_user_cap_backfill_notification_jobs_worker']['job_class'] = 'Namespaces::FreeUserCap::BackfillNotificationJobsWorker'
-    Settings.cron_jobs['free_user_cap_backfill_clear_notified_flag'] ||= Settingslogic.new({})
+    Settings.cron_jobs['free_user_cap_backfill_clear_notified_flag'] ||= {}
     Settings.cron_jobs['free_user_cap_backfill_clear_notified_flag']['cron'] ||= '*/5 * * * *'
     Settings.cron_jobs['free_user_cap_backfill_clear_notified_flag']['job_class'] ||= 'Namespaces::FreeUserCap::BackfillNotificationClearingJobsWorker'
-    Settings.cron_jobs['disable_legacy_open_source_license_for_inactive_projects'] ||= Settingslogic.new({})
+    Settings.cron_jobs['disable_legacy_open_source_license_for_inactive_projects'] ||= {}
     Settings.cron_jobs['disable_legacy_open_source_license_for_inactive_projects']['cron'] ||= "30 5 * * 0"
     Settings.cron_jobs['disable_legacy_open_source_license_for_inactive_projects']['job_class'] = 'Projects::DisableLegacyOpenSourceLicenseForInactiveProjectsWorker'
-    Settings.cron_jobs['notify_seats_exceeded_batch_worker'] ||= Settingslogic.new({})
+    Settings.cron_jobs['notify_seats_exceeded_batch_worker'] ||= {}
     Settings.cron_jobs['notify_seats_exceeded_batch_worker']['cron'] ||= '0 3 * * *'
     Settings.cron_jobs['notify_seats_exceeded_batch_worker']['job_class'] ||= 'GitlabSubscriptions::NotifySeatsExceededBatchWorker'
-    Settings.cron_jobs['gitlab_subscriptions_schedule_refresh_seats_worker'] ||= Settingslogic.new({})
+    Settings.cron_jobs['gitlab_subscriptions_schedule_refresh_seats_worker'] ||= {}
     Settings.cron_jobs['gitlab_subscriptions_schedule_refresh_seats_worker']['cron'] ||= "0 */6 * * *"
     Settings.cron_jobs['gitlab_subscriptions_schedule_refresh_seats_worker']['job_class'] = 'GitlabSubscriptions::ScheduleRefreshSeatsWorker'
   end
@@ -862,14 +860,14 @@
 #
 # Sidekiq
 #
-Settings['sidekiq'] ||= Settingslogic.new({})
+Settings['sidekiq'] ||= {}
 Settings['sidekiq']['log_format'] ||= 'default'
 Settings['sidekiq']['routing_rules'] = Settings.build_sidekiq_routing_rules(Settings['sidekiq']['routing_rules'])
 
 #
 # GitLab Shell
 #
-Settings['gitlab_shell'] ||= Settingslogic.new({})
+Settings['gitlab_shell'] ||= {}
 Settings.gitlab_shell['path']           = Settings.absolute(Settings.gitlab_shell['path'] || Settings.gitlab['user_home'] + '/gitlab-shell/')
 Settings.gitlab_shell['hooks_path']     = :deprecated_use_gitlab_shell_path_instead
 Settings.gitlab_shell['authorized_keys_file'] ||= File.join(Dir.home, '.ssh', 'authorized_keys')
@@ -889,13 +887,13 @@
 #
 # Workhorse
 #
-Settings['workhorse'] ||= Settingslogic.new({})
+Settings['workhorse'] ||= {}
 Settings.workhorse['secret_file'] ||= Rails.root.join('.gitlab_workhorse_secret')
 
 #
 # GitLab KAS
 #
-Settings['gitlab_kas'] ||= Settingslogic.new({})
+Settings['gitlab_kas'] ||= {}
 Settings.gitlab_kas['enabled'] ||= false
 Settings.gitlab_kas['secret_file'] ||= Rails.root.join('.gitlab_kas_secret')
 Settings.gitlab_kas['external_url'] ||= 'wss://kas.example.com'
@@ -906,14 +904,14 @@
 # Suggested Reviewers
 #
 Gitlab.ee do
-  Settings['suggested_reviewers'] ||= Settingslogic.new({})
+  Settings['suggested_reviewers'] ||= {}
   Settings.suggested_reviewers['secret_file'] ||= Rails.root.join('.gitlab_suggested_reviewers_secret')
 end
 
 #
 # Repositories
 #
-Settings['repositories'] ||= Settingslogic.new({})
+Settings['repositories'] ||= {}
 Settings.repositories['storages'] ||= {}
 
 Settings.repositories.storages.each do |key, storage|
@@ -943,12 +941,12 @@
 #
 # Backup
 #
-Settings['backup'] ||= Settingslogic.new({})
+Settings['backup'] ||= {}
 Settings.backup['keep_time'] ||= 0
 Settings.backup['pg_schema']    = nil
 Settings.backup['path']         = Settings.absolute(Settings.backup['path'] || "tmp/backups/")
 Settings.backup['archive_permissions'] ||= 0600
-Settings.backup['upload'] ||= Settingslogic.new({ 'remote_directory' => nil, 'connection' => nil })
+Settings.backup['upload'] ||= { 'remote_directory' => nil, 'connection' => nil }
 Settings.backup['upload']['multipart_chunk_size'] ||= 104857600
 Settings.backup['upload']['encryption'] ||= nil
 Settings.backup['upload']['encryption_key'] ||= ENV['GITLAB_BACKUP_ENCRYPTION_KEY']
@@ -958,19 +956,19 @@
 #
 # Git
 #
-Settings['git'] ||= Settingslogic.new({})
+Settings['git'] ||= {}
 Settings.git['bin_path'] ||= '/usr/bin/git'
 
 # Important: keep the satellites.path setting until GitLab 9.0 at
 # least. This setting is fed to 'rm -rf' in
 # db/migrate/20151023144219_remove_satellites.rb
-Settings['satellites'] ||= Settingslogic.new({})
+Settings['satellites'] ||= {}
 Settings.satellites['path'] = Settings.absolute(Settings.satellites['path'] || "tmp/repo_satellites/")
 
 #
 # Microsoft Graph Mailer
 #
-Settings['microsoft_graph_mailer'] ||= Settingslogic.new({})
+Settings['microsoft_graph_mailer'] ||= {}
 Settings.microsoft_graph_mailer['enabled'] = false if Settings.microsoft_graph_mailer['enabled'].nil?
 Settings.microsoft_graph_mailer['user_id'] ||= nil
 Settings.microsoft_graph_mailer['tenant'] ||= nil
@@ -983,7 +981,7 @@
 # Kerberos
 #
 Gitlab.ee do
-  Settings['kerberos'] ||= Settingslogic.new({})
+  Settings['kerberos'] ||= {}
   Settings.kerberos['enabled'] = false if Settings.kerberos['enabled'].nil?
   Settings.kerberos['keytab'] = nil if Settings.kerberos['keytab'].blank? # nil means use default keytab
   Settings.kerberos['simple_ldap_linking_allowed_realms'] = [] if Settings.kerberos['simple_ldap_linking_allowed_realms'].blank?
@@ -993,7 +991,7 @@
   Settings.kerberos['port'] ||= Settings.kerberos.https ? 8443 : 8088
 
   if Settings.kerberos['enabled'] && !Settings.omniauth.providers.map(&:name).include?('kerberos')
-    Settings.omniauth.providers << Settingslogic.new({ 'name' => 'kerberos' })
+    Settings.omniauth.providers << GitlabSettings::Options.build({ 'name' => 'kerberos' })
   end
 end
 
@@ -1001,7 +999,7 @@
 # Smartcard
 #
 Gitlab.ee do
-  Settings['smartcard'] ||= Settingslogic.new({})
+  Settings['smartcard'] ||= {}
   Settings.smartcard['enabled'] = false if Settings.smartcard['enabled'].nil?
   Settings.smartcard['client_certificate_required_host'] = Settings.gitlab.host if Settings.smartcard['client_certificate_required_host'].nil?
   Settings.smartcard['client_certificate_required_port'] = 3444 if Settings.smartcard['client_certificate_required_port'].nil?
@@ -1012,26 +1010,26 @@
 #
 # FortiAuthenticator
 #
-Settings['forti_authenticator'] ||= Settingslogic.new({})
+Settings['forti_authenticator'] ||= {}
 Settings.forti_authenticator['enabled'] = false if Settings.forti_authenticator['enabled'].nil?
 Settings.forti_authenticator['port'] = 443 if Settings.forti_authenticator['port'].to_i == 0
 
 #
 # FortiToken Cloud
 #
-Settings['forti_token_cloud'] ||= Settingslogic.new({})
+Settings['forti_token_cloud'] ||= {}
 Settings.forti_token_cloud['enabled'] = false if Settings.forti_token_cloud['enabled'].nil?
 
 #
 # DuoAuth
 #
-Settings['duo_auth'] ||= Settingslogic.new({})
+Settings['duo_auth'] ||= {}
 Settings.duo_auth['enabled'] = false if Settings.duo_auth['enabled'].nil?
 
 #
 # Extra customization
 #
-Settings['extra'] ||= Settingslogic.new({})
+Settings['extra'] ||= {}
 Settings.extra['matomo_site_id'] ||= Settings.extra['piwik_site_id'] if Settings.extra['piwik_site_id'].present?
 Settings.extra['matomo_url'] ||= Settings.extra['piwik_url'] if Settings.extra['piwik_url'].present?
 Settings.extra['matomo_disable_cookies'] = false if Settings.extra['matomo_disable_cookies'].nil?
@@ -1040,8 +1038,8 @@
 #
 # Rack::Attack settings
 #
-Settings['rack_attack'] ||= Settingslogic.new({})
-Settings.rack_attack['git_basic_auth'] ||= Settingslogic.new({})
+Settings['rack_attack'] ||= {}
+Settings.rack_attack['git_basic_auth'] ||= {}
 Settings.rack_attack.git_basic_auth['enabled'] = false if Settings.rack_attack.git_basic_auth['enabled'].nil?
 Settings.rack_attack.git_basic_auth['ip_whitelist'] ||= %w{127.0.0.1}
 Settings.rack_attack.git_basic_auth['maxretry'] ||= 10
@@ -1051,17 +1049,17 @@
 #
 # Gitaly
 #
-Settings['gitaly'] ||= Settingslogic.new({})
+Settings['gitaly'] ||= {}
 
 #
 # Webpack settings
 #
-Settings['webpack'] ||= Settingslogic.new({})
+Settings['webpack'] ||= {}
 Settings.webpack['config_file'] ||= 'config/webpack.config.js'
 Settings.webpack['output_dir']  ||= 'public/assets/webpack'
 Settings.webpack['public_path'] ||= 'assets/webpack'
 Settings.webpack['manifest_filename'] ||= 'manifest.json'
-Settings.webpack['dev_server'] ||= Settingslogic.new({})
+Settings.webpack['dev_server'] ||= {}
 Settings.webpack.dev_server['enabled'] ||= false
 Settings.webpack.dev_server['host']    ||= 'localhost'
 Settings.webpack.dev_server['port']    ||= 3808
@@ -1070,10 +1068,10 @@
 #
 # Monitoring settings
 #
-Settings['monitoring'] ||= Settingslogic.new({})
+Settings['monitoring'] ||= {}
 Settings.monitoring['ip_whitelist'] ||= ['127.0.0.1/8']
 
-Settings.monitoring['sidekiq_exporter'] ||= Settingslogic.new({})
+Settings.monitoring['sidekiq_exporter'] ||= {}
 Settings.monitoring.sidekiq_exporter['enabled'] ||= false
 Settings.monitoring.sidekiq_exporter['log_enabled'] ||= false
 Settings.monitoring.sidekiq_exporter['address'] ||= 'localhost'
@@ -1082,12 +1080,12 @@
 Settings.monitoring.sidekiq_exporter['tls_cert_path'] ||= nil
 Settings.monitoring.sidekiq_exporter['tls_key_path'] ||= nil
 
-Settings.monitoring['sidekiq_health_checks'] ||= Settingslogic.new({})
+Settings.monitoring['sidekiq_health_checks'] ||= {}
 Settings.monitoring.sidekiq_health_checks['enabled'] ||= false
 Settings.monitoring.sidekiq_health_checks['address'] ||= 'localhost'
 Settings.monitoring.sidekiq_health_checks['port'] ||= 8092
 
-Settings.monitoring['web_exporter'] ||= Settingslogic.new({})
+Settings.monitoring['web_exporter'] ||= {}
 Settings.monitoring.web_exporter['enabled'] ||= false
 Settings.monitoring.web_exporter['log_enabled'] ||= true
 Settings.monitoring.web_exporter['address'] ||= 'localhost'
@@ -1099,20 +1097,20 @@
 #
 # Prometheus settings
 #
-Settings['prometheus'] ||= Settingslogic.new({})
+Settings['prometheus'] ||= {}
 Settings.prometheus['enabled'] ||= false
 Settings.prometheus['server_address'] ||= nil
 
 #
 # Bullet settings
 #
-Settings['bullet'] ||= Settingslogic.new({})
+Settings['bullet'] ||= {}
 Settings.bullet['enabled'] ||= Rails.env.development?
 
 #
 # Shutdown settings
 #
-Settings['shutdown'] ||= Settingslogic.new({})
+Settings['shutdown'] ||= {}
 Settings.shutdown['blackout_seconds'] ||= 10
 
 #
diff --git a/config/object_store_settings.rb b/config/object_store_settings.rb
index 58d7ea2223102..615b13d59caaf 100644
--- a/config/object_store_settings.rb
+++ b/config/object_store_settings.rb
@@ -20,7 +20,7 @@ class ObjectStoreSettings
 
   # Legacy parser
   def self.legacy_parse(object_store, object_store_type)
-    object_store ||= Settingslogic.new({})
+    object_store ||= GitlabSettings::Options.build({})
     object_store['enabled'] = false if object_store['enabled'].nil?
     object_store['remote_directory'], object_store['bucket_prefix'] = split_bucket_prefix(
       object_store['remote_directory']
@@ -162,7 +162,7 @@ def parse!
       )
       target_config['consolidated_settings'] = true
       section['object_store'] = target_config
-      # Settingslogic internally stores data as a Hash, but it also
+      # GitlabSettings::Options internally stores data as a Hash, but it also
       # creates a Settings object for every key. To avoid confusion, we should
       # update both so that Settings.artifacts and Settings['artifacts'] return
       # the same result.
@@ -178,23 +178,11 @@ def parse!
   # 1. The common settings are defined
   # 2. The legacy settings are not defined
   def use_consolidated_settings?
-    # to_h is needed because we define `default` as a Gitaly storage name
-    # in stub_storage_settings. This causes Settingslogic to redefine Hash#default,
-    # which causes Hash#dig to fail when the key doesn't exist: https://gitlab.com/gitlab-org/gitlab/-/issues/286873
-    settings_h = settings.to_h
-    return false unless settings_h.dig('object_store', 'enabled')
-    return false unless settings_h.dig('object_store', 'connection').present?
+    return false unless settings.dig('object_store', 'enabled')
+    return false unless settings.dig('object_store', 'connection').present?
 
     WORKHORSE_ACCELERATED_TYPES.each do |store|
-      # to_h is needed because we define `default` as a Gitaly storage name
-      # in stub_storage_settings. This causes Settingslogic to redefine Hash#default,
-      # which causes Hash#dig to fail when the key doesn't exist: https://gitlab.com/gitlab-org/gitlab/-/issues/286873
-      #
-      # (byebug) section.dig
-      # *** ArgumentError Exception: wrong number of arguments (given 0, expected 1+)
-      # (byebug) section.dig('object_store')
-      # *** ArgumentError Exception: wrong number of arguments (given 1, expected 0)
-      section = settings.try(store)&.to_h
+      section = settings.try(store)
 
       next unless section
 
diff --git a/config/settings.rb b/config/settings.rb
index a76889f34aeaf..e03d9877e1cb2 100644
--- a/config/settings.rb
+++ b/config/settings.rb
@@ -1,224 +1,222 @@
 # frozen_string_literal: true
 
-require 'settingslogic'
+require_relative '../lib/gitlab_settings'
 
-class Settings < Settingslogic
-  source ENV.fetch('GITLAB_CONFIG') { Pathname.new(File.expand_path('..', __dir__)).join('config/gitlab.yml') }
-  namespace ENV.fetch('GITLAB_ENV') { Rails.env }
+file = ENV.fetch('GITLAB_CONFIG') { Rails.root.join('config/gitlab.yml') }
+section = ENV.fetch('GITLAB_ENV') { Rails.env }
 
-  class << self
-    def gitlab_on_standard_port?
-      on_standard_port?(gitlab)
-    end
+Settings = GitlabSettings.load(file, section) do
+  def gitlab_on_standard_port?
+    on_standard_port?(gitlab)
+  end
 
-    def build_ci_component_fqdn
-      custom_port = ":#{gitlab.port}" unless on_standard_port?(gitlab)
+  def build_ci_component_fqdn
+    custom_port = ":#{gitlab.port}" unless on_standard_port?(gitlab)
 
-      [
-        gitlab.host,
-        custom_port,
-        gitlab.relative_url_root,
-        '/'
-      ].join('')
-    end
+    [
+      gitlab.host,
+      custom_port,
+      gitlab.relative_url_root,
+      '/'
+    ].join('')
+  end
 
-    def host_without_www(url)
-      host(url).sub('www.', '')
-    end
+  def host_without_www(url)
+    host(url).sub('www.', '')
+  end
 
-    def build_gitlab_ci_url
-      custom_port =
-        if on_standard_port?(gitlab)
-          nil
-        else
-          ":#{gitlab.port}"
-        end
-
-      [
-        gitlab.protocol,
-        "://",
-        gitlab.host,
-        custom_port,
-        gitlab.relative_url_root
-      ].join('')
-    end
+  def build_gitlab_ci_url
+    custom_port =
+      if on_standard_port?(gitlab)
+        nil
+      else
+        ":#{gitlab.port}"
+      end
 
-    def build_pages_url
-      base_url(pages).join('')
-    end
+    [
+      gitlab.protocol,
+      "://",
+      gitlab.host,
+      custom_port,
+      gitlab.relative_url_root
+    ].join('')
+  end
 
-    def build_gitlab_shell_ssh_path_prefix
-      user = "#{gitlab_shell.ssh_user}@" unless gitlab_shell.ssh_user.empty?
-      user_host = "#{user}#{gitlab_shell.ssh_host}"
+  def build_pages_url
+    base_url(pages).join('')
+  end
 
-      if gitlab_shell.ssh_port != 22
-        "ssh://#{user_host}:#{gitlab_shell.ssh_port}/"
-      elsif gitlab_shell.ssh_host.include? ':'
-        "[#{user_host}]:"
-      else
-        "#{user_host}:"
-      end
-    end
+  def build_gitlab_shell_ssh_path_prefix
+    user = "#{gitlab_shell.ssh_user}@" unless gitlab_shell.ssh_user.empty?
+    user_host = "#{user}#{gitlab_shell.ssh_host}"
 
-    def build_base_gitlab_url
-      base_url(gitlab).join('')
+    if gitlab_shell.ssh_port != 22
+      "ssh://#{user_host}:#{gitlab_shell.ssh_port}/"
+    elsif gitlab_shell.ssh_host.include? ':'
+      "[#{user_host}]:"
+    else
+      "#{user_host}:"
     end
+  end
 
-    def build_gitlab_url
-      (base_url(gitlab) + [gitlab.relative_url_root]).join('')
-    end
+  def build_base_gitlab_url
+    base_url(gitlab).join('')
+  end
 
-    def build_gitlab_go_url
-      # "Go package paths are not URLs, and do not include port numbers"
-      # https://github.com/golang/go/issues/38213#issuecomment-607851460
-      "#{gitlab.host}#{gitlab.relative_url_root}"
-    end
+  def build_gitlab_url
+    (base_url(gitlab) + [gitlab.relative_url_root]).join('')
+  end
 
-    def kerberos_protocol
-      kerberos.https ? "https" : "http"
-    end
+  def build_gitlab_go_url
+    # "Go package paths are not URLs, and do not include port numbers"
+    # https://github.com/golang/go/issues/38213#issuecomment-607851460
+    "#{gitlab.host}#{gitlab.relative_url_root}"
+  end
 
-    def kerberos_port
-      kerberos.use_dedicated_port ? kerberos.port : gitlab.port
-    end
+  def kerberos_protocol
+    kerberos.https ? "https" : "http"
+  end
 
-    # Curl expects username/password for authentication. However when using GSS-Negotiate not credentials should be needed.
-    # By inserting in the Kerberos dedicated URL ":@", we give to curl an empty username and password and GSS auth goes ahead
-    # Known bug reported in http://sourceforge.net/p/curl/bugs/440/ and http://curl.haxx.se/docs/knownbugs.html
-    def build_gitlab_kerberos_url
-      [
-        kerberos_protocol,
-        "://:@",
-        gitlab.host,
-        ":#{kerberos_port}",
-        gitlab.relative_url_root
-      ].join('')
-    end
+  def kerberos_port
+    kerberos.use_dedicated_port ? kerberos.port : gitlab.port
+  end
 
-    def alternative_gitlab_kerberos_url?
-      kerberos.enabled && (build_gitlab_kerberos_url != build_gitlab_url)
-    end
+  # Curl expects username/password for authentication. However when using GSS-Negotiate not credentials should be needed.
+  # By inserting in the Kerberos dedicated URL ":@", we give to curl an empty username and password and GSS auth goes ahead
+  # Known bug reported in http://sourceforge.net/p/curl/bugs/440/ and http://curl.haxx.se/docs/knownbugs.html
+  def build_gitlab_kerberos_url
+    [
+      kerberos_protocol,
+      "://:@",
+      gitlab.host,
+      ":#{kerberos_port}",
+      gitlab.relative_url_root
+    ].join('')
+  end
 
-    # check that values in `current` (string or integer) is a contant in `modul`.
-    def verify_constant_array(modul, current, default)
-      values = default || []
-      unless current.nil?
-        values = []
-        current.each do |constant|
-          values.push(verify_constant(modul, constant, nil))
-        end
-        values.delete_if { |value| value.nil? }
-      end
+  def alternative_gitlab_kerberos_url?
+    kerberos.enabled && (build_gitlab_kerberos_url != build_gitlab_url)
+  end
 
-      values
+  # check that values in `current` (string or integer) is a contant in `modul`.
+  def verify_constant_array(modul, current, default)
+    values = default || []
+    unless current.nil?
+      values = []
+      current.each do |constant|
+        values.push(verify_constant(modul, constant, nil))
+      end
+      values.delete_if { |value| value.nil? }
     end
 
-    # check that `current` (string or integer) is a contant in `modul`.
-    def verify_constant(modul, current, default)
-      constant = modul.constants.find { |name| modul.const_get(name, false) == current }
-      value = constant.nil? ? default : modul.const_get(constant, false)
-      if current.is_a? String
-        value = begin
-          modul.const_get(current.upcase, false)
-        rescue StandardError
-          default
-        end
-      end
+    values
+  end
 
-      value
+  # check that `current` (string or integer) is a contant in `modul`.
+  def verify_constant(modul, current, default)
+    constant = modul.constants.find { |name| modul.const_get(name, false) == current }
+    value = constant.nil? ? default : modul.const_get(constant, false)
+    if current.is_a? String
+      value = begin
+        modul.const_get(current.upcase, false)
+      rescue StandardError
+        default
+      end
     end
 
-    def absolute(path)
-      File.expand_path(path, Rails.root)
-    end
+    value
+  end
 
-    # Don't use this in new code, use attr_encrypted_db_key_base_32 instead!
-    def attr_encrypted_db_key_base_truncated
-      Gitlab::Application.secrets.db_key_base[0..31]
-    end
+  def absolute(path)
+    File.expand_path(path, Rails.root)
+  end
 
-    # Ruby 2.4+ requires passing in the exact required length for OpenSSL keys
-    # (https://github.com/ruby/ruby/commit/ce635262f53b760284d56bb1027baebaaec175d1).
-    # Previous versions quietly truncated the input.
-    #
-    # Makes sure the key is exactly 32 bytes long, either by
-    # truncating or right-padding it with ASCII 0s. Use this when
-    # using :per_attribute_iv mode for attr_encrypted.
-    def attr_encrypted_db_key_base_32
-      Gitlab::Utils.ensure_utf8_size(attr_encrypted_db_key_base, bytes: 32.bytes)
-    end
+  # Don't use this in new code, use attr_encrypted_db_key_base_32 instead!
+  def attr_encrypted_db_key_base_truncated
+    Gitlab::Application.secrets.db_key_base[0..31]
+  end
 
-    def attr_encrypted_db_key_base_12
-      Gitlab::Utils.ensure_utf8_size(attr_encrypted_db_key_base, bytes: 12.bytes)
-    end
+  # Ruby 2.4+ requires passing in the exact required length for OpenSSL keys
+  # (https://github.com/ruby/ruby/commit/ce635262f53b760284d56bb1027baebaaec175d1).
+  # Previous versions quietly truncated the input.
+  #
+  # Makes sure the key is exactly 32 bytes long, either by
+  # truncating or right-padding it with ASCII 0s. Use this when
+  # using :per_attribute_iv mode for attr_encrypted.
+  def attr_encrypted_db_key_base_32
+    Gitlab::Utils.ensure_utf8_size(attr_encrypted_db_key_base, bytes: 32.bytes)
+  end
 
-    # This should be used for :per_attribute_salt_and_iv mode. There is no
-    # need to truncate the key because the encryptor will use the salt to
-    # generate a hash of the password:
-    # https://github.com/attr-encrypted/encryptor/blob/c3a62c4a9e74686dd95e0548f9dc2a361fdc95d1/lib/encryptor.rb#L77
-    def attr_encrypted_db_key_base
-      Gitlab::Application.secrets.db_key_base
-    end
+  def attr_encrypted_db_key_base_12
+    Gitlab::Utils.ensure_utf8_size(attr_encrypted_db_key_base, bytes: 12.bytes)
+  end
 
-    def encrypted(path)
-      Gitlab::EncryptedConfiguration.new(
-        content_path: path,
-        base_key: Gitlab::Application.secrets.encrypted_settings_key_base,
-        previous_keys: Gitlab::Application.secrets.rotated_encrypted_settings_key_base || []
-      )
-    end
+  # This should be used for :per_attribute_salt_and_iv mode. There is no
+  # need to truncate the key because the encryptor will use the salt to
+  # generate a hash of the password:
+  # https://github.com/attr-encrypted/encryptor/blob/c3a62c4a9e74686dd95e0548f9dc2a361fdc95d1/lib/encryptor.rb#L77
+  def attr_encrypted_db_key_base
+    Gitlab::Application.secrets.db_key_base
+  end
 
-    def load_dynamic_cron_schedules!
-      cron_jobs['gitlab_service_ping_worker']['cron'] ||= cron_for_service_ping
-    end
+  def encrypted(path)
+    Gitlab::EncryptedConfiguration.new(
+      content_path: path,
+      base_key: Gitlab::Application.secrets.encrypted_settings_key_base,
+      previous_keys: Gitlab::Application.secrets.rotated_encrypted_settings_key_base || []
+    )
+  end
 
-    # Route jobs to queue based on worker name.
-    def build_sidekiq_routing_rules(rules)
-      return rules unless rules.nil? || rules&.empty?
+  def load_dynamic_cron_schedules!
+    cron_jobs['gitlab_service_ping_worker']['cron'] ||= cron_for_service_ping
+  end
 
-      [[Gitlab::SidekiqConfig::WorkerMatcher::WILDCARD_MATCH, nil]]
-    end
+  # Route jobs to queue based on worker name.
+  def build_sidekiq_routing_rules(rules)
+    return rules unless rules.nil? || rules&.empty?
 
-    private
+    [[Gitlab::SidekiqConfig::WorkerMatcher::WILDCARD_MATCH, nil]]
+  end
 
-    def base_url(config)
-      custom_port = on_standard_port?(config) ? nil : ":#{config.port}"
+  private
 
-      [
-        config.protocol,
-        "://",
-        config.host,
-        custom_port
-      ]
-    end
+  def base_url(config)
+    custom_port = on_standard_port?(config) ? nil : ":#{config.port}"
 
-    def on_standard_port?(config)
-      config.port.to_i == (config.https ? 443 : 80)
-    end
+    [
+      config.protocol,
+      "://",
+      config.host,
+      custom_port
+    ]
+  end
 
-    # Extract the host part of the given +url+.
-    def host(url)
-      url = url.downcase
-      url = "http://#{url}" unless url.start_with?('http')
+  def on_standard_port?(config)
+    config.port.to_i == (config.https ? 443 : 80)
+  end
 
-      # Get rid of the path so that we don't even have to encode it
-      url_without_path = url.sub(%r{(https?://[^/]+)/?.*}, '\1')
+  # Extract the host part of the given +url+.
+  def host(url)
+    url = url.downcase
+    url = "http://#{url}" unless url.start_with?('http')
 
-      URI.parse(url_without_path).host
-    end
+    # Get rid of the path so that we don't even have to encode it
+    url_without_path = url.sub(%r{(https?://[^/]+)/?.*}, '\1')
 
-    # Runs at a consistent random time of day on a day of the week based on
-    # the instance UUID. This is to balance the load on the service receiving
-    # these pings. The sidekiq job handles temporary http failures.
-    def cron_for_service_ping
-      # Set a default UUID for the case when the UUID hasn't been initialized.
-      uuid = Gitlab::CurrentSettings.uuid || 'uuid-not-set'
+    URI.parse(url_without_path).host
+  end
 
-      minute = Digest::SHA256.hexdigest(uuid + 'minute').to_i(16) % 60
-      hour = Digest::SHA256.hexdigest(uuid + 'hour').to_i(16) % 24
-      day_of_week = Digest::SHA256.hexdigest(uuid).to_i(16) % 7
+  # Runs at a consistent random time of day on a day of the week based on
+  # the instance UUID. This is to balance the load on the service receiving
+  # these pings. The sidekiq job handles temporary http failures.
+  def cron_for_service_ping
+    # Set a default UUID for the case when the UUID hasn't been initialized.
+    uuid = Gitlab::CurrentSettings.uuid || 'uuid-not-set'
 
-      "#{minute} #{hour} * * #{day_of_week}"
-    end
+    minute = Digest::SHA256.hexdigest(uuid + 'minute').to_i(16) % 60
+    hour = Digest::SHA256.hexdigest(uuid + 'hour').to_i(16) % 24
+    day_of_week = Digest::SHA256.hexdigest(uuid).to_i(16) % 7
+
+    "#{minute} #{hour} * * #{day_of_week}"
   end
 end
diff --git a/config/smime_signature_settings.rb b/config/smime_signature_settings.rb
index 4b50e01dba0a5..e964684ea8077 100644
--- a/config/smime_signature_settings.rb
+++ b/config/smime_signature_settings.rb
@@ -3,7 +3,7 @@
 # Set default values for email_smime settings
 class SmimeSignatureSettings
   def self.parse(email_smime)
-    email_smime ||= Settingslogic.new({})
+    email_smime ||= GitlabSettings::Options.build({})
     email_smime['enabled'] = false unless email_smime['enabled']
     email_smime['key_file'] ||= Rails.root.join('.gitlab_smime_key')
     email_smime['cert_file'] ||= Rails.root.join('.gitlab_smime_cert')
diff --git a/lib/banzai/filter/asset_proxy_filter.rb b/lib/banzai/filter/asset_proxy_filter.rb
index 4c14ee7299bea..13b86277ce18d 100644
--- a/lib/banzai/filter/asset_proxy_filter.rb
+++ b/lib/banzai/filter/asset_proxy_filter.rb
@@ -38,7 +38,7 @@ def self.transform_context(context)
       # whenever the application settings are changed
       def self.initialize_settings
         application_settings           = Gitlab::CurrentSettings.current_application_settings
-        Gitlab.config['asset_proxy'] ||= Settingslogic.new({})
+        Gitlab.config['asset_proxy'] ||= GitlabSettings::Options.build({})
 
         if application_settings.respond_to?(:asset_proxy_enabled)
           Gitlab.config.asset_proxy['enabled']       = application_settings.asset_proxy_enabled
diff --git a/lib/gitlab/consul/internal.rb b/lib/gitlab/consul/internal.rb
index 1994369dee959..c4feac412e48c 100644
--- a/lib/gitlab/consul/internal.rb
+++ b/lib/gitlab/consul/internal.rb
@@ -12,7 +12,7 @@ class Internal
       class << self
         def api_url
           Gitlab.config.consul.api_url.to_s.presence if Gitlab.config.consul
-        rescue Settingslogic::MissingSetting
+        rescue GitlabSettings::MissingSetting
           Gitlab::AppLogger.error('Consul api_url is not present in config/gitlab.yml')
 
           nil
diff --git a/lib/gitlab/omniauth_initializer.rb b/lib/gitlab/omniauth_initializer.rb
index a03533dcd9af4..f933be673ab4d 100644
--- a/lib/gitlab/omniauth_initializer.rb
+++ b/lib/gitlab/omniauth_initializer.rb
@@ -74,7 +74,7 @@ def arguments_for(provider)
         # An Array from the configuration will be expanded
         provider_arguments.concat arguments
         provider_arguments << defaults unless defaults.empty?
-      when Hash
+      when Hash, GitlabSettings::Options
         hash_arguments = arguments.deep_symbolize_keys.deep_merge(defaults)
         normalized = normalize_hash_arguments(hash_arguments)
 
diff --git a/lib/gitlab/prometheus/internal.rb b/lib/gitlab/prometheus/internal.rb
index fe06b97add622..55e1837d101d9 100644
--- a/lib/gitlab/prometheus/internal.rb
+++ b/lib/gitlab/prometheus/internal.rb
@@ -27,7 +27,7 @@ def self.uri
 
       def self.server_address
         Gitlab.config.prometheus.server_address.to_s if Gitlab.config.prometheus
-      rescue Settingslogic::MissingSetting
+      rescue GitlabSettings::MissingSetting
         Gitlab::AppLogger.error('Prometheus server_address is not present in config/gitlab.yml')
 
         nil
@@ -35,7 +35,7 @@ def self.server_address
 
       def self.prometheus_enabled?
         Gitlab.config.prometheus.enabled if Gitlab.config.prometheus
-      rescue Settingslogic::MissingSetting
+      rescue GitlabSettings::MissingSetting
         Gitlab::AppLogger.error('prometheus.enabled is not present in config/gitlab.yml')
 
         false
diff --git a/lib/gitlab/sidekiq_config.rb b/lib/gitlab/sidekiq_config.rb
index 7e2a934b3dda6..33a15d95d225d 100644
--- a/lib/gitlab/sidekiq_config.rb
+++ b/lib/gitlab/sidekiq_config.rb
@@ -57,13 +57,12 @@ def cron_jobs
         @cron_jobs ||= begin
           Gitlab.config.load_dynamic_cron_schedules!
 
-          # Load recurring jobs from gitlab.yml
-          # UGLY Hack to get nested hash from settingslogic
-          jobs = Gitlab::Json.parse(Gitlab.config.cron_jobs.to_json)
+          jobs = Gitlab.config.cron_jobs.to_hash
 
           jobs.delete('poll_interval') # Would be interpreted as a job otherwise
 
-          # UGLY hack: Settingslogic doesn't allow 'class' key
+          # Settingslogic (former gem used for yaml configuration) didn't allow 'class' key
+          # Therefore, we configure cron jobs with `job_class` as a workaround.
           required_keys = %w[job_class cron]
           jobs.each do |k, v|
             if jobs[k] && required_keys.all? { |s| jobs[k].key?(s) }
diff --git a/lib/gitlab/tracking.rb b/lib/gitlab/tracking.rb
index ef86c9d6007c5..deef3a9a38bca 100644
--- a/lib/gitlab/tracking.rb
+++ b/lib/gitlab/tracking.rb
@@ -59,7 +59,7 @@ def collector_hostname
 
       def snowplow_micro_enabled?
         Rails.env.development? && Gitlab.config.snowplow_micro.enabled
-      rescue Settingslogic::MissingSetting
+      rescue GitlabSettings::MissingSetting
         false
       end
 
diff --git a/lib/gitlab/tracking/destinations/snowplow_micro.rb b/lib/gitlab/tracking/destinations/snowplow_micro.rb
index 09480f261064d..e15c03b68081b 100644
--- a/lib/gitlab/tracking/destinations/snowplow_micro.rb
+++ b/lib/gitlab/tracking/destinations/snowplow_micro.rb
@@ -53,7 +53,7 @@ def base_uri
           url = Gitlab.config.snowplow_micro.address
           scheme = Gitlab.config.gitlab.https ? 'https' : 'http'
           "#{scheme}://#{url}"
-        rescue Settingslogic::MissingSetting
+        rescue GitlabSettings::MissingSetting
           DEFAULT_URI
         end
       end
diff --git a/lib/gitlab/url_blocker.rb b/lib/gitlab/url_blocker.rb
index 2c02874876a35..e7aab4fae79a6 100644
--- a/lib/gitlab/url_blocker.rb
+++ b/lib/gitlab/url_blocker.rb
@@ -344,8 +344,7 @@ def enabled_object_storage_endpoints
 
           next unless section_setting && section_setting['enabled']
 
-          # Use #to_h to avoid Settingslogic bug: https://gitlab.com/gitlab-org/gitlab/-/issues/286873
-          object_store_setting = section_setting['object_store']&.to_h
+          object_store_setting = section_setting['object_store']
 
           next unless object_store_setting && object_store_setting['enabled']
 
diff --git a/lib/gitlab_settings.rb b/lib/gitlab_settings.rb
new file mode 100644
index 0000000000000..b1f510eaeb127
--- /dev/null
+++ b/lib/gitlab_settings.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+require "active_support"
+require "active_support/core_ext/hash"
+
+require_relative 'gitlab_settings/settings'
+require_relative 'gitlab_settings/options'
+
+module GitlabSettings
+  MissingSetting = Class.new(StandardError)
+
+  def self.load(source = nil, section = nil, &block)
+    Settings
+    .new(source, section)
+    .extend(Module.new(&block))
+  end
+end
diff --git a/lib/gitlab_settings/options.rb b/lib/gitlab_settings/options.rb
new file mode 100644
index 0000000000000..b5f86a814e20c
--- /dev/null
+++ b/lib/gitlab_settings/options.rb
@@ -0,0 +1,80 @@
+# frozen_string_literal: true
+
+module GitlabSettings
+  class Options
+    # Recursively build GitlabSettings::Options
+    def self.build(obj)
+      case obj
+      when Hash
+        new(obj.transform_values { |value| build(value) })
+      when Array
+        obj.map { |value| build(value) }
+      else
+        obj
+      end
+    end
+
+    def initialize(value)
+      @options = value.deep_stringify_keys
+    end
+
+    def [](key)
+      @options[key.to_s]
+    end
+
+    def []=(key, value)
+      @options[key.to_s] = Options.build(value)
+    end
+
+    def key?(name)
+      @options.key?(name.to_s) || @options.key?(name.to_sym)
+    end
+    alias_method :has_key?, :key?
+
+    def to_hash
+      @options.deep_transform_values do |option|
+        case option
+        when GitlabSettings::Options
+          option.to_hash
+        else
+          option
+        end
+      end
+    end
+    alias_method :to_h, :to_hash
+
+    def merge(other)
+      Options.build(to_hash.merge(other.deep_stringify_keys))
+    end
+
+    def deep_merge(other)
+      Options.build(to_hash.deep_merge(other.deep_stringify_keys))
+    end
+
+    def is_a?(klass)
+      return true if klass == Hash
+
+      super(klass)
+    end
+
+    def method_missing(name, *args, &block)
+      name_string = +name.to_s
+
+      if name_string.chomp!("=")
+        return self[name_string] = args.first if key?(name_string)
+      elsif key?(name_string)
+        return self[name_string]
+      end
+
+      return @options.public_send(name, *args, &block) if @options.respond_to?(name) # rubocop: disable GitlabSecurity/PublicSend
+
+      raise ::GitlabSettings::MissingSetting, "option '#{name}' not defined"
+    end
+
+    def respond_to_missing?(name, include_all = false)
+      return true if key?(name)
+
+      @options.respond_to?(name, include_all)
+    end
+  end
+end
diff --git a/lib/gitlab_settings/settings.rb b/lib/gitlab_settings/settings.rb
new file mode 100644
index 0000000000000..79d006fb11809
--- /dev/null
+++ b/lib/gitlab_settings/settings.rb
@@ -0,0 +1,37 @@
+# frozen_string_literal: true
+
+module GitlabSettings
+  class Settings
+    attr_reader :source
+
+    def initialize(source, section)
+      raise(ArgumentError, 'config source is required') if source.blank?
+      raise(ArgumentError, 'config section is required') if section.blank?
+
+      @source = source
+      @section = section
+
+      reload!
+    end
+
+    def reload!
+      yaml = ActiveSupport::ConfigurationFile.parse(source)
+      all_configs = yaml.deep_stringify_keys
+      configs = all_configs[section]
+
+      @config = Options.build(configs)
+    end
+
+    def method_missing(name, *args)
+      config.public_send(name, *args) # rubocop: disable GitlabSecurity/PublicSend
+    end
+
+    def respond_to_missing?(name, include_all = false)
+      config.respond_to?(name, include_all)
+    end
+
+    private
+
+    attr_reader :config, :section
+  end
+end
diff --git a/lib/object_storage/direct_upload.rb b/lib/object_storage/direct_upload.rb
index 7d2f825e1192b..fee006f4e0f70 100644
--- a/lib/object_storage/direct_upload.rb
+++ b/lib/object_storage/direct_upload.rb
@@ -260,7 +260,7 @@ def upload_options
     end
 
     def connection
-      @connection ||= ::Fog::Storage.new(credentials)
+      @connection ||= ::Fog::Storage.new(credentials.to_hash)
     end
   end
 end
diff --git a/metrics_server/metrics_server.rb b/metrics_server/metrics_server.rb
index 269d03f6d0c1a..530f3a000a90a 100644
--- a/metrics_server/metrics_server.rb
+++ b/metrics_server/metrics_server.rb
@@ -162,7 +162,7 @@ def start
       when 'puma'
         Gitlab::Metrics::Exporter::WebExporter.instance(**default_opts)
       when 'sidekiq'
-        settings = Settings.new(Settings.monitoring.sidekiq_exporter)
+        settings = GitlabSettings::Options.build(Settings.monitoring.sidekiq_exporter)
         Gitlab::Metrics::Exporter::SidekiqExporter.instance(settings, **default_opts)
       end
 
diff --git a/spec/commands/metrics_server/metrics_server_spec.rb b/spec/commands/metrics_server/metrics_server_spec.rb
index 310e31da04501..88a28b02903d4 100644
--- a/spec/commands/metrics_server/metrics_server_spec.rb
+++ b/spec/commands/metrics_server/metrics_server_spec.rb
@@ -71,7 +71,7 @@
         if use_golang_server
           stub_env('GITLAB_GOLANG_METRICS_SERVER', '1')
           allow(Settings).to receive(:monitoring).and_return(
-            Settingslogic.new(config.dig('test', 'monitoring')))
+            GitlabSettings::Options.build(config.dig('test', 'monitoring')))
         else
           config_file.write(YAML.dump(config))
           config_file.close
diff --git a/spec/commands/sidekiq_cluster/cli_spec.rb b/spec/commands/sidekiq_cluster/cli_spec.rb
index 3951ef4928882..499432c260522 100644
--- a/spec/commands/sidekiq_cluster/cli_spec.rb
+++ b/spec/commands/sidekiq_cluster/cli_spec.rb
@@ -18,17 +18,12 @@
   let(:sidekiq_exporter_enabled) { false }
   let(:sidekiq_exporter_port) { '3807' }
 
-  let(:config_file) { Tempfile.new('gitlab.yml') }
   let(:config) do
     {
-      'test' => {
-        'monitoring' => {
-          'sidekiq_exporter' => {
-            'address' => 'localhost',
-            'enabled' => sidekiq_exporter_enabled,
-            'port' => sidekiq_exporter_port
-          }
-        }
+      'sidekiq_exporter' => {
+        'address' => 'localhost',
+        'enabled' => sidekiq_exporter_enabled,
+        'port' => sidekiq_exporter_port
       }
     }
   end
@@ -37,14 +32,6 @@
   let(:metrics_cleanup_service) { instance_double(Prometheus::CleanupMultiprocDirService, execute: nil) }
 
   before do
-    stub_env('RAILS_ENV', 'test')
-
-    config_file.write(YAML.dump(config))
-    config_file.close
-
-    allow(::Settings).to receive(:source).and_return(config_file.path)
-    ::Settings.reload!
-
     allow(Gitlab::ProcessManagement).to receive(:write_pid)
     allow(Gitlab::SidekiqCluster::SidekiqProcessSupervisor).to receive(:instance).and_return(supervisor)
     allow(supervisor).to receive(:supervise)
@@ -52,8 +39,13 @@
     allow(Prometheus::CleanupMultiprocDirService).to receive(:new).and_return(metrics_cleanup_service)
   end
 
-  after do
-    config_file.unlink
+  around do |example|
+    original = Settings['monitoring']
+    Settings['monitoring'] = config
+
+    example.run
+
+    Settings['monitoring'] = original
   end
 
   describe '#run' do
@@ -318,13 +310,7 @@
 
           context 'when sidekiq_exporter is not set up' do
             let(:config) do
-              {
-                'test' => {
-                  'monitoring' => {
-                    'sidekiq_exporter' => {}
-                  }
-                }
-              }
+              { 'sidekiq_exporter' => {} }
             end
 
             it 'does not start a sidekiq metrics server' do
@@ -336,13 +322,7 @@
 
           context 'with missing sidekiq_exporter setting' do
             let(:config) do
-              {
-                'test' => {
-                  'monitoring' => {
-                    'sidekiq_exporter' => nil
-                  }
-                }
-              }
+              { 'sidekiq_exporter' => nil }
             end
 
             it 'does not start a sidekiq metrics server' do
diff --git a/spec/config/object_store_settings_spec.rb b/spec/config/object_store_settings_spec.rb
index 025966c894005..b8e46affc2a54 100644
--- a/spec/config/object_store_settings_spec.rb
+++ b/spec/config/object_store_settings_spec.rb
@@ -5,7 +5,7 @@
 
 RSpec.describe ObjectStoreSettings, feature_category: :shared do
   describe '#parse!' do
-    let(:settings) { Settingslogic.new(config) }
+    let(:settings) { GitlabSettings::Options.build(config) }
 
     subject { described_class.new(settings).parse! }
 
@@ -68,7 +68,7 @@
 
         expect(settings.artifacts['enabled']).to be true
         expect(settings.artifacts['object_store']['enabled']).to be true
-        expect(settings.artifacts['object_store']['connection']).to eq(connection)
+        expect(settings.artifacts['object_store']['connection'].to_hash).to eq(connection)
         expect(settings.artifacts['object_store']['direct_upload']).to be true
         expect(settings.artifacts['object_store']['proxy_download']).to be false
         expect(settings.artifacts['object_store']['remote_directory']).to eq('artifacts')
@@ -78,7 +78,7 @@
 
         expect(settings.lfs['enabled']).to be true
         expect(settings.lfs['object_store']['enabled']).to be true
-        expect(settings.lfs['object_store']['connection']).to eq(connection)
+        expect(settings.lfs['object_store']['connection'].to_hash).to eq(connection)
         expect(settings.lfs['object_store']['direct_upload']).to be true
         expect(settings.lfs['object_store']['proxy_download']).to be true
         expect(settings.lfs['object_store']['remote_directory']).to eq('lfs-objects')
@@ -88,7 +88,7 @@
 
         expect(settings.pages['enabled']).to be true
         expect(settings.pages['object_store']['enabled']).to be true
-        expect(settings.pages['object_store']['connection']).to eq(connection)
+        expect(settings.pages['object_store']['connection'].to_hash).to eq(connection)
         expect(settings.pages['object_store']['remote_directory']).to eq('pages')
         expect(settings.pages['object_store']['bucket_prefix']).to eq(nil)
         expect(settings.pages['object_store']['consolidated_settings']).to be true
@@ -128,7 +128,7 @@
         it 'populates artifacts CDN config' do
           subject
 
-          expect(settings.artifacts['object_store']['cdn']).to eq(cdn_config)
+          expect(settings.artifacts['object_store']['cdn'].to_hash).to eq(cdn_config)
         end
       end
 
@@ -163,7 +163,7 @@
         it 'allows pages to define its own connection' do
           expect { subject }.not_to raise_error
 
-          expect(settings.pages['object_store']['connection']).to eq(pages_connection)
+          expect(settings.pages['object_store']['connection'].to_hash).to eq(pages_connection)
           expect(settings.pages['object_store']['consolidated_settings']).to be_falsey
         end
       end
@@ -230,7 +230,7 @@
     end
 
     it 'respects original values' do
-      original_settings = Settingslogic.new({
+      original_settings = GitlabSettings::Options.build({
         'enabled' => true,
         'remote_directory' => 'artifacts'
       })
@@ -244,7 +244,7 @@
     end
 
     it 'supports bucket prefixes' do
-      original_settings = Settingslogic.new({
+      original_settings = GitlabSettings::Options.build({
         'enabled' => true,
         'remote_directory' => 'gitlab/artifacts'
       })
diff --git a/spec/config/settings_spec.rb b/spec/config/settings_spec.rb
index b464a4eee8bf1..d6cddc215f50f 100644
--- a/spec/config/settings_spec.rb
+++ b/spec/config/settings_spec.rb
@@ -31,7 +31,7 @@
     with_them do
       before do
         allow(Gitlab.config).to receive(:gitlab).and_return(
-          Settingslogic.new({
+          GitlabSettings::Options.build({
             'host' => host,
             'https' => true,
             'port' => port,
diff --git a/spec/config/smime_signature_settings_spec.rb b/spec/config/smime_signature_settings_spec.rb
index 73dca66c66687..53e70f1f2cc3a 100644
--- a/spec/config/smime_signature_settings_spec.rb
+++ b/spec/config/smime_signature_settings_spec.rb
@@ -19,7 +19,7 @@
 
     context 'when providing custom values' do
       it 'sets correct default values to disabled' do
-        custom_settings = Settingslogic.new({})
+        custom_settings = GitlabSettings::Options.build({})
 
         parsed_settings = described_class.parse(custom_settings)
 
@@ -30,7 +30,7 @@
       end
 
       it 'enables smime with default key and cert' do
-        custom_settings = Settingslogic.new({
+        custom_settings = GitlabSettings::Options.build({
           'enabled' => true
         })
 
@@ -46,7 +46,7 @@
         custom_key = '/custom/key'
         custom_cert = '/custom/cert'
         custom_ca_certs = '/custom/ca_certs'
-        custom_settings = Settingslogic.new({
+        custom_settings = GitlabSettings::Options.build({
           'enabled' => true,
           'key_file' => custom_key,
           'cert_file' => custom_cert,
diff --git a/spec/initializers/settings_spec.rb b/spec/initializers/settings_spec.rb
index c3200d2fab194..09064a210993b 100644
--- a/spec/initializers/settings_spec.rb
+++ b/spec/initializers/settings_spec.rb
@@ -9,12 +9,7 @@
       expect(Gitlab.config.ldap.servers.main.label).to eq('ldap')
     end
 
-    # Specifically trying to cause this error discovered in EE when removing the
-    # reassignment of each server element with Settingslogic.
-    #
-    #   `undefined method `label' for #<Hash:0x007fbd18b59c08>`
-    #
-    it 'can be accessed in a very specific way that breaks without reassigning each element with Settingslogic' do
+    it 'can be accessed in a very specific way that breaks without reassigning each element' do
       server_settings = Gitlab.config.ldap.servers['main']
       expect(server_settings.label).to eq('ldap')
     end
diff --git a/spec/lib/gitlab/auth/o_auth/provider_spec.rb b/spec/lib/gitlab/auth/o_auth/provider_spec.rb
index 96a31c5098902..226669bab33e6 100644
--- a/spec/lib/gitlab/auth/o_auth/provider_spec.rb
+++ b/spec/lib/gitlab/auth/o_auth/provider_spec.rb
@@ -49,7 +49,7 @@
     context 'for an LDAP provider' do
       context 'when the provider exists' do
         it 'returns the config' do
-          expect(described_class.config_for('ldapmain')).to be_a(Hash)
+          expect(described_class.config_for('ldapmain')).to be_a(GitlabSettings::Options)
         end
       end
 
diff --git a/spec/lib/gitlab/ci/components/instance_path_spec.rb b/spec/lib/gitlab/ci/components/instance_path_spec.rb
index e037c37c81713..b80422d03e594 100644
--- a/spec/lib/gitlab/ci/components/instance_path_spec.rb
+++ b/spec/lib/gitlab/ci/components/instance_path_spec.rb
@@ -6,7 +6,7 @@
   let_it_be(:user) { create(:user) }
 
   let(:path) { described_class.new(address: address, content_filename: 'template.yml') }
-  let(:settings) { Settingslogic.new({ 'component_fqdn' => current_host }) }
+  let(:settings) { GitlabSettings::Options.build({ 'component_fqdn' => current_host }) }
   let(:current_host) { 'acme.com/' }
 
   before do
diff --git a/spec/lib/gitlab/consul/internal_spec.rb b/spec/lib/gitlab/consul/internal_spec.rb
index 28dcaac9ff2f9..5bad7be4396ce 100644
--- a/spec/lib/gitlab/consul/internal_spec.rb
+++ b/spec/lib/gitlab/consul/internal_spec.rb
@@ -22,7 +22,7 @@
 
     context 'when consul setting is not present in gitlab.yml' do
       before do
-        allow(Gitlab.config).to receive(:consul).and_raise(Settingslogic::MissingSetting)
+        allow(Gitlab.config).to receive(:consul).and_raise(GitlabSettings::MissingSetting)
       end
 
       it 'does not fail' do
diff --git a/spec/lib/gitlab/database_importers/self_monitoring/project/create_service_spec.rb b/spec/lib/gitlab/database_importers/self_monitoring/project/create_service_spec.rb
index ad91320c6eb84..5d193245b0215 100644
--- a/spec/lib/gitlab/database_importers/self_monitoring/project/create_service_spec.rb
+++ b/spec/lib/gitlab/database_importers/self_monitoring/project/create_service_spec.rb
@@ -228,7 +228,7 @@
 
       context 'when prometheus setting is not present in gitlab.yml' do
         before do
-          allow(Gitlab.config).to receive(:prometheus).and_raise(Settingslogic::MissingSetting)
+          allow(Gitlab.config).to receive(:prometheus).and_raise(GitlabSettings::MissingSetting)
         end
 
         it 'does not fail' do
diff --git a/spec/lib/gitlab/legacy_github_import/client_spec.rb b/spec/lib/gitlab/legacy_github_import/client_spec.rb
index 08679b7e9f15e..d0f63d11469e7 100644
--- a/spec/lib/gitlab/legacy_github_import/client_spec.rb
+++ b/spec/lib/gitlab/legacy_github_import/client_spec.rb
@@ -4,7 +4,7 @@
 
 RSpec.describe Gitlab::LegacyGithubImport::Client do
   let(:token) { '123456' }
-  let(:github_provider) { Settingslogic.new('app_id' => 'asd123', 'app_secret' => 'asd123', 'name' => 'github', 'args' => { 'client_options' => {} }) }
+  let(:github_provider) { GitlabSettings::Options.build('app_id' => 'asd123', 'app_secret' => 'asd123', 'name' => 'github', 'args' => { 'client_options' => {} }) }
   let(:wait_for_rate_limit_reset) { true }
 
   subject(:client) { described_class.new(token, wait_for_rate_limit_reset: wait_for_rate_limit_reset) }
@@ -17,7 +17,7 @@
     expect(client.client.options.keys).to all(be_kind_of(Symbol))
   end
 
-  it 'does not crash (e.g. Settingslogic::MissingSetting) when verify_ssl config is not present' do
+  it 'does not crash (e.g. GitlabSettings::MissingSetting) when verify_ssl config is not present' do
     expect { client.api }.not_to raise_error
   end
 
diff --git a/spec/lib/gitlab/prometheus/internal_spec.rb b/spec/lib/gitlab/prometheus/internal_spec.rb
index b08b88134704c..ff5da301347db 100644
--- a/spec/lib/gitlab/prometheus/internal_spec.rb
+++ b/spec/lib/gitlab/prometheus/internal_spec.rb
@@ -81,7 +81,7 @@
 
     context 'when prometheus setting is not present in gitlab.yml' do
       before do
-        allow(Gitlab.config).to receive(:prometheus).and_raise(Settingslogic::MissingSetting)
+        allow(Gitlab.config).to receive(:prometheus).and_raise(GitlabSettings::MissingSetting)
       end
 
       it 'does not fail' do
@@ -97,7 +97,7 @@
 
     context 'when prometheus setting is not present in gitlab.yml' do
       before do
-        allow(Gitlab.config).to receive(:prometheus).and_raise(Settingslogic::MissingSetting)
+        allow(Gitlab.config).to receive(:prometheus).and_raise(GitlabSettings::MissingSetting)
       end
 
       it 'does not fail' do
diff --git a/spec/lib/gitlab/sidekiq_config_spec.rb b/spec/lib/gitlab/sidekiq_config_spec.rb
index 5f72a3feba785..00b1666106f5c 100644
--- a/spec/lib/gitlab/sidekiq_config_spec.rb
+++ b/spec/lib/gitlab/sidekiq_config_spec.rb
@@ -17,6 +17,27 @@
     end
   end
 
+  describe '.cron_jobs' do
+    it 'renames job_class to class and removes incomplete jobs' do
+      expect(Gitlab)
+        .to receive(:config)
+        .twice
+        .and_return(GitlabSettings::Options.build(
+          load_dynamic_cron_schedules!: true,
+          cron_jobs: {
+            job: { cron: '0 * * * *', job_class: 'SomeWorker' },
+            incomplete_job: { cron: '0 * * * *' }
+          }))
+
+      expect(Gitlab::AppLogger)
+        .to receive(:error)
+        .with("Invalid cron_jobs config key: 'incomplete_job'. Check your gitlab config file.")
+
+      expect(described_class.cron_jobs)
+        .to eq('job' => { 'class' => 'SomeWorker', 'cron' => '0 * * * *' })
+    end
+  end
+
   describe '.worker_queues' do
     it 'includes all queues' do
       queues = described_class.worker_queues
diff --git a/spec/lib/gitlab/tracking/destinations/snowplow_micro_spec.rb b/spec/lib/gitlab/tracking/destinations/snowplow_micro_spec.rb
index 48092a33da31f..ea3c030541f09 100644
--- a/spec/lib/gitlab/tracking/destinations/snowplow_micro_spec.rb
+++ b/spec/lib/gitlab/tracking/destinations/snowplow_micro_spec.rb
@@ -45,7 +45,7 @@
 
     context 'when snowplow_micro config is not set' do
       before do
-        allow(Gitlab.config).to receive(:snowplow_micro).and_raise(Settingslogic::MissingSetting)
+        allow(Gitlab.config).to receive(:snowplow_micro).and_raise(GitlabSettings::MissingSetting)
       end
 
       it 'returns localhost hostname' do
diff --git a/spec/lib/gitlab/tracking_spec.rb b/spec/lib/gitlab/tracking_spec.rb
index 56be80678e92e..a3de64a002e50 100644
--- a/spec/lib/gitlab/tracking_spec.rb
+++ b/spec/lib/gitlab/tracking_spec.rb
@@ -312,7 +312,7 @@
     end
 
     it 'returns false when snowplow_micro is not configured' do
-      allow(Gitlab.config).to receive(:snowplow_micro).and_raise(Settingslogic::MissingSetting)
+      allow(Gitlab.config).to receive(:snowplow_micro).and_raise(GitlabSettings::MissingSetting)
 
       expect(described_class).not_to be_snowplow_micro_enabled
     end
diff --git a/spec/lib/gitlab/url_blocker_spec.rb b/spec/lib/gitlab/url_blocker_spec.rb
index 7b6c89b5dd347..d790d13f66c1d 100644
--- a/spec/lib/gitlab/url_blocker_spec.rb
+++ b/spec/lib/gitlab/url_blocker_spec.rb
@@ -166,8 +166,8 @@
           let(:lfs_config) do
             {
               'enabled' => lfs_enabled,
-              # This nesting of Settingslogic is necessary to trigger the bug
-              'object_store' => Settingslogic.new({ 'enabled' => true })
+              # This nesting of settings is necessary to trigger the bug
+              'object_store' => GitlabSettings::Options.build({ 'enabled' => true })
             }
           end
 
@@ -175,16 +175,15 @@
             {
               'gitlab' => Gitlab.config.gitlab,
               'repositories' => { 'storages' => { 'default' => 'test' } },
-              'lfs' => Settingslogic.new(lfs_config)
+              'lfs' => GitlabSettings::Options.build(lfs_config)
             }
           end
 
           let(:host) { 'http://127.0.0.1:9000' }
-          let(:settings) { Settingslogic.new(config) }
+          let(:settings) { GitlabSettings::Options.build(config) }
 
           before do
             allow(Gitlab).to receive(:config).and_return(settings)
-            # Triggers Settingslogic bug: https://gitlab.com/gitlab-org/gitlab/-/issues/286873
             settings.repositories.storages.default
           end
 
diff --git a/spec/lib/gitlab_settings/options_spec.rb b/spec/lib/gitlab_settings/options_spec.rb
new file mode 100644
index 0000000000000..4b57e91c2e113
--- /dev/null
+++ b/spec/lib/gitlab_settings/options_spec.rb
@@ -0,0 +1,155 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSettings::Options, :aggregate_failures, feature_category: :shared do
+  let(:config) { { foo: { bar: 'baz' } } }
+
+  subject(:options) { described_class.build(config) }
+
+  describe '.build' do
+    context 'when argument is a hash' do
+      it 'creates a new GitlabSettings::Options instance' do
+        options = described_class.build(config)
+
+        expect(options).to be_a described_class
+        expect(options.foo).to be_a described_class
+        expect(options.foo.bar).to eq 'baz'
+      end
+    end
+  end
+
+  describe '#[]' do
+    it 'accesses the configuration key as string' do
+      expect(options['foo']).to be_a described_class
+      expect(options['foo']['bar']).to eq 'baz'
+
+      expect(options['inexistent']).to be_nil
+    end
+
+    it 'accesses the configuration key as symbol' do
+      expect(options[:foo]).to be_a described_class
+      expect(options[:foo][:bar]).to eq 'baz'
+
+      expect(options[:inexistent]).to be_nil
+    end
+  end
+
+  describe '#[]=' do
+    it 'changes the configuration key as string' do
+      options['foo']['bar'] = 'anothervalue'
+
+      expect(options['foo']['bar']).to eq 'anothervalue'
+    end
+
+    it 'changes the configuration key as symbol' do
+      options[:foo][:bar] = 'anothervalue'
+
+      expect(options[:foo][:bar]).to eq 'anothervalue'
+    end
+
+    context 'when key does not exist' do
+      it 'creates a new configuration by string key' do
+        options['inexistent'] = 'value'
+
+        expect(options['inexistent']).to eq 'value'
+      end
+
+      it 'creates a new configuration by string key' do
+        options[:inexistent] = 'value'
+
+        expect(options[:inexistent]).to eq 'value'
+      end
+    end
+  end
+
+  describe '#key?' do
+    it 'checks if a string key exists' do
+      expect(options.key?('foo')).to be true
+      expect(options.key?('inexistent')).to be false
+    end
+
+    it 'checks if a symbol key exists' do
+      expect(options.key?(:foo)).to be true
+      expect(options.key?(:inexistent)).to be false
+    end
+  end
+
+  describe '#to_hash' do
+    it 'returns the hash representation of the config' do
+      expect(options.to_hash).to eq('foo' => { 'bar' => 'baz' })
+    end
+  end
+
+  describe '#merge' do
+    it 'merges a hash to the existing options' do
+      expect(options.merge(more: 'configs').to_hash).to eq(
+        'foo' => { 'bar' => 'baz' },
+        'more' => 'configs'
+      )
+    end
+
+    context 'when the merge hash replaces existing configs' do
+      it 'merges a hash to the existing options' do
+        expect(options.merge(foo: 'configs').to_hash).to eq('foo' => 'configs')
+      end
+    end
+  end
+
+  describe '#deep_merge' do
+    it 'merges a hash to the existing options' do
+      expect(options.deep_merge(foo: { more: 'configs' }).to_hash).to eq('foo' => {
+        'bar' => 'baz',
+        'more' => 'configs'
+      })
+    end
+
+    context 'when the merge hash replaces existing configs' do
+      it 'merges a hash to the existing options' do
+        expect(options.deep_merge(foo: { bar: 'configs' }).to_hash).to eq('foo' => {
+          'bar' => 'configs'
+        })
+      end
+    end
+  end
+
+  describe '#is_a?' do
+    it 'returns false for anything different of Hash or GitlabSettings::Options' do
+      expect(options.is_a?(described_class)).to be true
+      expect(options.is_a?(Hash)).to be true
+      expect(options.is_a?(String)).to be false
+    end
+  end
+
+  describe '#method_missing' do
+    context 'when method is an option' do
+      it 'delegates methods to options keys' do
+        expect(options.foo.bar).to eq('baz')
+      end
+
+      it 'uses methods to change options values' do
+        expect { options.foo = 1 }
+          .to change { options.foo }
+          .to(1)
+      end
+    end
+
+    context 'when method is not an option' do
+      it 'delegates the method to the internal options hash' do
+        expect { options.foo.delete('bar') }
+          .to change { options.to_hash }
+          .to({ 'foo' => {} })
+      end
+    end
+
+    context 'when method is not an option and does not exist in hash' do
+      it 'raises GitlabSettings::MissingSetting' do
+        expect { options.anything }
+          .to raise_error(
+            ::GitlabSettings::MissingSetting,
+            "option 'anything' not defined"
+          )
+      end
+    end
+  end
+end
diff --git a/spec/lib/gitlab_settings/settings_spec.rb b/spec/lib/gitlab_settings/settings_spec.rb
new file mode 100644
index 0000000000000..55ceff4ce821c
--- /dev/null
+++ b/spec/lib/gitlab_settings/settings_spec.rb
@@ -0,0 +1,53 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSettings::Settings, :aggregate_failures, feature_category: :shared do
+  let(:config) do
+    {
+      section1: {
+        config1: {
+          value1: 1
+        }
+      }
+    }
+  end
+
+  let(:source) { Tempfile.new('config.yaml') }
+
+  before do
+    File.write(source, config.to_yaml)
+  end
+
+  subject(:settings) { described_class.new(source.path, 'section1') }
+
+  it 'requires a source' do
+    expect { described_class.new('', '') }
+      .to raise_error(ArgumentError, 'config source is required')
+  end
+
+  it 'requires a section' do
+    expect { described_class.new(source, '') }
+      .to raise_error(ArgumentError, 'config section is required')
+  end
+
+  it 'loads the given section config' do
+    expect(settings.config1.value1).to eq(1)
+  end
+
+  describe '#reload!' do
+    it 'reloads the config' do
+      expect(settings.config1.value1).to eq(1)
+
+      File.write(source, { section1: { config1: { value1: 2 } } }.to_yaml)
+
+      # config doesn't change when source changes
+      expect(settings.config1.value1).to eq(1)
+
+      settings.reload!
+
+      # config changes after reload! if source changed
+      expect(settings.config1.value1).to eq(2)
+    end
+  end
+end
diff --git a/spec/metrics_server/metrics_server_spec.rb b/spec/metrics_server/metrics_server_spec.rb
index efa716754f171..ad80835549f65 100644
--- a/spec/metrics_server/metrics_server_spec.rb
+++ b/spec/metrics_server/metrics_server_spec.rb
@@ -99,7 +99,7 @@
         context 'for Golang server' do
           let(:log_enabled) { false }
           let(:settings) do
-            Settingslogic.new(
+            GitlabSettings::Options.build(
               {
                 'web_exporter' => {
                   'enabled' => true,
@@ -304,7 +304,7 @@
   end
 
   context 'for sidekiq' do
-    let(:settings) { Settingslogic.new({ "sidekiq_exporter" => { "enabled" => true } }) }
+    let(:settings) { GitlabSettings::Options.build({ "sidekiq_exporter" => { "enabled" => true } }) }
 
     before do
       allow(::Settings).to receive(:monitoring).and_return(settings)
diff --git a/spec/models/instance_configuration_spec.rb b/spec/models/instance_configuration_spec.rb
index f57667cc5d6e4..3a03fec51a659 100644
--- a/spec/models/instance_configuration_spec.rb
+++ b/spec/models/instance_configuration_spec.rb
@@ -73,7 +73,7 @@ def stub_pub_file(path)
         it 'returns Settings.pages' do
           gitlab_pages.delete(:ip_address)
 
-          expect(gitlab_pages).to eq(Settings.pages.symbolize_keys)
+          expect(gitlab_pages).to eq(Settings.pages.to_hash.deep_symbolize_keys)
         end
 
         it 'returns the GitLab\'s pages host ip address' do
diff --git a/spec/support/helpers/stub_configuration.rb b/spec/support/helpers/stub_configuration.rb
index 2a7b36a4c00a0..4c997aceeee1c 100644
--- a/spec/support/helpers/stub_configuration.rb
+++ b/spec/support/helpers/stub_configuration.rb
@@ -102,7 +102,7 @@ def stub_storage_settings(messages)
       messages[storage_name] = Gitlab::GitalyClient::StorageSettings.new(storage_hash.to_h)
     end
 
-    allow(Gitlab.config.repositories).to receive(:storages).and_return(Settingslogic.new(messages))
+    allow(Gitlab.config.repositories).to receive(:storages).and_return(::GitlabSettings::Options.build(messages))
   end
 
   def stub_sentry_settings(enabled: true)
@@ -175,11 +175,11 @@ def add_predicates(messages)
     end
   end
 
-  # Support nested hashes by converting all values into Settingslogic objects
+  # Support nested hashes by converting all values into GitlabSettings::Objects objects
   def to_settings(hash)
     hash.transform_values do |value|
       if value.is_a? Hash
-        Settingslogic.new(value.to_h.deep_stringify_keys)
+        ::GitlabSettings::Options.build(value)
       else
         value
       end
diff --git a/spec/support/helpers/stub_object_storage.rb b/spec/support/helpers/stub_object_storage.rb
index 4efe2a98a4503..88fef1aa0b360 100644
--- a/spec/support/helpers/stub_object_storage.rb
+++ b/spec/support/helpers/stub_object_storage.rb
@@ -17,7 +17,7 @@ def stub_object_storage_uploader(
         direct_upload: false,
         cdn: {}
   )
-    old_config = Settingslogic.new(config.to_h.deep_stringify_keys)
+    old_config = ::GitlabSettings::Options.build(config.to_h.deep_stringify_keys)
     new_config = config.to_h.deep_symbolize_keys.merge({
       enabled: enabled,
       proxy_download: proxy_download,
@@ -32,7 +32,7 @@ def stub_object_storage_uploader(
     allow(config).to receive(:proxy_download) { proxy_download }
     allow(config).to receive(:direct_upload) { direct_upload }
 
-    uploader_config = Settingslogic.new(new_config.to_h.deep_stringify_keys)
+    uploader_config = ::GitlabSettings::Options.build(new_config.to_h.deep_stringify_keys)
     allow(uploader).to receive(:object_store_options).and_return(uploader_config)
     allow(uploader.options).to receive(:object_store).and_return(uploader_config)
 
diff --git a/spec/uploaders/object_storage/cdn_spec.rb b/spec/uploaders/object_storage/cdn_spec.rb
index 0c1966b4df24a..28b3313428b52 100644
--- a/spec/uploaders/object_storage/cdn_spec.rb
+++ b/spec/uploaders/object_storage/cdn_spec.rb
@@ -41,7 +41,7 @@ def dynamic_segment
 
     before do
       stub_artifacts_object_storage(enabled: true)
-      options = Settingslogic.new(Gitlab.config.uploads.deep_merge(cdn_options))
+      options = Gitlab.config.uploads.deep_merge(cdn_options)
       allow(uploader_class).to receive(:options).and_return(options)
     end
 
diff --git a/spec/uploaders/object_storage_spec.rb b/spec/uploaders/object_storage_spec.rb
index 0e293ec973cd4..ef46803fc5684 100644
--- a/spec/uploaders/object_storage_spec.rb
+++ b/spec/uploaders/object_storage_spec.rb
@@ -446,7 +446,7 @@ def when_file_is_in_use
   end
 
   describe '#fog_credentials' do
-    let(:connection) { Settingslogic.new("provider" => "AWS") }
+    let(:connection) { GitlabSettings::Options.build("provider" => "AWS") }
 
     before do
       allow(uploader_class).to receive(:options) do
@@ -479,7 +479,7 @@ def when_file_is_in_use
         }
       end
 
-      let(:options) { Settingslogic.new(raw_options) }
+      let(:options) { GitlabSettings::Options.build(raw_options) }
 
       before do
         allow(uploader_class).to receive(:options) do
-- 
GitLab