Skip to content
代码片段 群组 项目
未验证 提交 8bb2d8c3 编辑于 作者: Maxime Orefice's avatar Maxime Orefice 提交者: GitLab
浏览文件

Add web_hook_logs_daily table

This commit introduces a new table to partition web_hook_logs by day.

Changelog: added
上级 4c07e40d
No related branches found
No related tags found
无相关合并请求
...@@ -106,4 +106,16 @@ ...@@ -106,4 +106,16 @@
] ]
) )
# Enable partition management for the backfill table during web_hook_logs partitioning.
# This way new partitions will be created as the trigger syncs new rows across to this table.
Gitlab::Database::Partitioning.register_tables(
[
{
limit_connection_names: %i[main],
table_name: 'web_hook_logs_daily',
partitioned_column: :created_at, strategy: :daily, retain_for: 14.days
}
]
)
Gitlab::Database::Partitioning.sync_partitions_ignore_db_error Gitlab::Database::Partitioning.sync_partitions_ignore_db_error
---
table_name: web_hook_logs_daily
classes:
- WebHookLog
feature_categories:
- integrations
description: Webhooks logs data partitioned by day.
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/175379
milestone: '17.8'
gitlab_schema: gitlab_main_cell
sharding_key_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/463856
table_size: small
# frozen_string_literal: true
class CreateDailyPartitionedWebHookLogCopy < Gitlab::Database::Migration[2.2]
disable_ddl_transaction!
milestone '17.8'
TABLE_NAME = :web_hook_logs_daily
SOURCE_TABLE_NAME = :web_hook_logs
def up
transaction do
execute(<<~SQL)
CREATE TABLE #{TABLE_NAME} (
LIKE #{SOURCE_TABLE_NAME} INCLUDING ALL EXCLUDING INDEXES,
PRIMARY KEY (id, created_at)
) PARTITION BY RANGE (created_at);
CREATE TABLE IF NOT EXISTS #{partition_name(nil)}
PARTITION OF #{TABLE_NAME}
FOR VALUES FROM (MINVALUE) TO (\'#{current_date.prev_day}\');
CREATE TABLE IF NOT EXISTS #{partition_name(current_date.prev_day)}
PARTITION OF #{TABLE_NAME}
FOR VALUES FROM (\'#{current_date.prev_day}\') TO (\'#{current_date}\');
CREATE TABLE IF NOT EXISTS #{partition_name(current_date)}
PARTITION OF #{TABLE_NAME}
FOR VALUES FROM (\'#{current_date}\') TO (\'#{current_date.next_day}\');
CREATE TABLE IF NOT EXISTS #{partition_name(current_date.next_day)}
PARTITION OF #{TABLE_NAME}
FOR VALUES FROM (\'#{current_date.next_day}\') TO (\'#{current_date.next_day.next_day}\')
SQL
end
end
def down
execute(<<~SQL)
DROP TABLE #{TABLE_NAME}
SQL
end
private
def current_date
Date.current
end
def partition_name(date)
suffix = date&.strftime('%Y%m%d') || '00000000'
"gitlab_partitions_dynamic.#{TABLE_NAME}_#{suffix}"
end
end
# frozen_string_literal: true
class CreateTriggerToWebHookLogs < Gitlab::Database::Migration[2.2]
include Gitlab::Database::SchemaHelpers
include Gitlab::Database::PartitioningMigrationHelpers::TableManagementHelpers
milestone '17.8'
SOURCE_TABLE = :web_hook_logs
TARGET_TABLE = :web_hook_logs_daily
UNIQUE_KEY = [:id, :created_at].freeze
def up
create_trigger_to_sync_tables(SOURCE_TABLE, TARGET_TABLE, UNIQUE_KEY)
end
def down
drop_sync_trigger(SOURCE_TABLE)
end
end
db476eb6b416a71e0b45bfa186e437ffdd30e1cb71e843aedf4de7c06ab65c48
\ No newline at end of file
08e0001b798c0ad73bca409cdabf8c1dcb13099b4c08e0d49fa1c10051870393
\ No newline at end of file
...@@ -752,6 +752,64 @@ $$; ...@@ -752,6 +752,64 @@ $$;
   
COMMENT ON FUNCTION table_sync_function_0992e728d3() IS 'Partitioning migration: table sync for merge_request_diff_commits table'; COMMENT ON FUNCTION table_sync_function_0992e728d3() IS 'Partitioning migration: table sync for merge_request_diff_commits table';
   
CREATE FUNCTION table_sync_function_29bc99d6db() RETURNS trigger
LANGUAGE plpgsql
AS $$
BEGIN
IF (TG_OP = 'DELETE') THEN
DELETE FROM web_hook_logs_daily where "id" = OLD."id" AND "created_at" = OLD."created_at";
ELSIF (TG_OP = 'UPDATE') THEN
UPDATE web_hook_logs_daily
SET "web_hook_id" = NEW."web_hook_id",
"trigger" = NEW."trigger",
"url" = NEW."url",
"request_headers" = NEW."request_headers",
"request_data" = NEW."request_data",
"response_headers" = NEW."response_headers",
"response_body" = NEW."response_body",
"response_status" = NEW."response_status",
"execution_duration" = NEW."execution_duration",
"internal_error_message" = NEW."internal_error_message",
"updated_at" = NEW."updated_at",
"url_hash" = NEW."url_hash"
WHERE web_hook_logs_daily."id" = NEW."id" AND web_hook_logs_daily."created_at" = NEW."created_at";
ELSIF (TG_OP = 'INSERT') THEN
INSERT INTO web_hook_logs_daily ("id",
"web_hook_id",
"trigger",
"url",
"request_headers",
"request_data",
"response_headers",
"response_body",
"response_status",
"execution_duration",
"internal_error_message",
"updated_at",
"created_at",
"url_hash")
VALUES (NEW."id",
NEW."web_hook_id",
NEW."trigger",
NEW."url",
NEW."request_headers",
NEW."request_data",
NEW."response_headers",
NEW."response_body",
NEW."response_status",
NEW."execution_duration",
NEW."internal_error_message",
NEW."updated_at",
NEW."created_at",
NEW."url_hash");
END IF;
RETURN NULL;
END
$$;
COMMENT ON FUNCTION table_sync_function_29bc99d6db() IS 'Partitioning migration: table sync for web_hook_logs table';
CREATE FUNCTION table_sync_function_3f39f64fc3() RETURNS trigger CREATE FUNCTION table_sync_function_3f39f64fc3() RETURNS trigger
LANGUAGE plpgsql LANGUAGE plpgsql
AS $$ AS $$
...@@ -3991,6 +4049,34 @@ CREATE TABLE web_hook_logs ( ...@@ -3991,6 +4049,34 @@ CREATE TABLE web_hook_logs (
) )
PARTITION BY RANGE (created_at); PARTITION BY RANGE (created_at);
   
CREATE SEQUENCE web_hook_logs_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER SEQUENCE web_hook_logs_id_seq OWNED BY web_hook_logs.id;
CREATE TABLE web_hook_logs_daily (
id bigint DEFAULT nextval('web_hook_logs_id_seq'::regclass) NOT NULL,
web_hook_id bigint NOT NULL,
trigger character varying,
url character varying,
request_headers text,
request_data text,
response_headers text,
response_body text,
response_status character varying,
execution_duration double precision,
internal_error_message character varying,
updated_at timestamp without time zone NOT NULL,
created_at timestamp without time zone NOT NULL,
url_hash text,
CONSTRAINT check_df72cb58f5 CHECK ((char_length(url_hash) <= 44))
)
PARTITION BY RANGE (created_at);
CREATE TABLE zoekt_tasks ( CREATE TABLE zoekt_tasks (
id bigint NOT NULL, id bigint NOT NULL,
partition_id bigint DEFAULT 1 NOT NULL, partition_id bigint DEFAULT 1 NOT NULL,
...@@ -22217,15 +22303,6 @@ CREATE SEQUENCE vulnerability_user_mentions_id_seq ...@@ -22217,15 +22303,6 @@ CREATE SEQUENCE vulnerability_user_mentions_id_seq
   
ALTER SEQUENCE vulnerability_user_mentions_id_seq OWNED BY vulnerability_user_mentions.id; ALTER SEQUENCE vulnerability_user_mentions_id_seq OWNED BY vulnerability_user_mentions.id;
   
CREATE SEQUENCE web_hook_logs_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER SEQUENCE web_hook_logs_id_seq OWNED BY web_hook_logs.id;
CREATE TABLE web_hooks ( CREATE TABLE web_hooks (
id bigint NOT NULL, id bigint NOT NULL,
project_id bigint, project_id bigint,
...@@ -27436,6 +27513,9 @@ ALTER TABLE ONLY vulnerability_statistics ...@@ -27436,6 +27513,9 @@ ALTER TABLE ONLY vulnerability_statistics
ALTER TABLE ONLY vulnerability_user_mentions ALTER TABLE ONLY vulnerability_user_mentions
ADD CONSTRAINT vulnerability_user_mentions_pkey PRIMARY KEY (id); ADD CONSTRAINT vulnerability_user_mentions_pkey PRIMARY KEY (id);
   
ALTER TABLE ONLY web_hook_logs_daily
ADD CONSTRAINT web_hook_logs_daily_pkey PRIMARY KEY (id, created_at);
ALTER TABLE ONLY web_hook_logs ALTER TABLE ONLY web_hook_logs
ADD CONSTRAINT web_hook_logs_pkey PRIMARY KEY (id, created_at); ADD CONSTRAINT web_hook_logs_pkey PRIMARY KEY (id, created_at);
   
...@@ -35743,6 +35823,8 @@ CREATE TRIGGER table_sync_trigger_57c8465cd7 AFTER INSERT OR DELETE OR UPDATE ON ...@@ -35743,6 +35823,8 @@ CREATE TRIGGER table_sync_trigger_57c8465cd7 AFTER INSERT OR DELETE OR UPDATE ON
   
CREATE TRIGGER table_sync_trigger_61879721b5 AFTER INSERT OR DELETE OR UPDATE ON ci_runners FOR EACH ROW EXECUTE FUNCTION table_sync_function_686d6c7993(); CREATE TRIGGER table_sync_trigger_61879721b5 AFTER INSERT OR DELETE OR UPDATE ON ci_runners FOR EACH ROW EXECUTE FUNCTION table_sync_function_686d6c7993();
   
CREATE TRIGGER table_sync_trigger_b99eb6998c AFTER INSERT OR DELETE OR UPDATE ON web_hook_logs FOR EACH ROW EXECUTE FUNCTION table_sync_function_29bc99d6db();
CREATE TRIGGER table_sync_trigger_bc3e7b56bd AFTER INSERT OR DELETE OR UPDATE ON ci_runner_machines FOR EACH ROW EXECUTE FUNCTION table_sync_function_e438f29263(); CREATE TRIGGER table_sync_trigger_bc3e7b56bd AFTER INSERT OR DELETE OR UPDATE ON ci_runner_machines FOR EACH ROW EXECUTE FUNCTION table_sync_function_e438f29263();
   
CREATE TRIGGER table_sync_trigger_cd362c20e2 AFTER INSERT OR DELETE OR UPDATE ON merge_request_diff_files FOR EACH ROW EXECUTE FUNCTION table_sync_function_3f39f64fc3(); CREATE TRIGGER table_sync_trigger_cd362c20e2 AFTER INSERT OR DELETE OR UPDATE ON merge_request_diff_files FOR EACH ROW EXECUTE FUNCTION table_sync_function_3f39f64fc3();
...@@ -233,6 +233,7 @@ ...@@ -233,6 +233,7 @@
# See: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/87584 # See: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/87584
# Fixes performance issues with the deletion of web-hooks with many log entries # Fixes performance issues with the deletion of web-hooks with many log entries
web_hook_logs: %w[web_hook_id], web_hook_logs: %w[web_hook_id],
web_hook_logs_daily: %w[web_hook_id],
webauthn_registrations: %w[u2f_registration_id], # this column will be dropped webauthn_registrations: %w[u2f_registration_id], # this column will be dropped
ml_candidates: %w[internal_id], ml_candidates: %w[internal_id],
value_stream_dashboard_counts: %w[namespace_id], value_stream_dashboard_counts: %w[namespace_id],
......
...@@ -14,7 +14,8 @@ ...@@ -14,7 +14,8 @@
'merge_request_diff_files_99208b8fac', # has a desired sharding key instead 'merge_request_diff_files_99208b8fac', # has a desired sharding key instead
'ml_model_metadata', # has a desired sharding key instead. 'ml_model_metadata', # has a desired sharding key instead.
'p_ci_pipeline_variables', # has a desired sharding key instead 'p_ci_pipeline_variables', # has a desired sharding key instead
'sbom_occurrences_vulnerabilities' # has desired sharding key instead 'sbom_occurrences_vulnerabilities', # has desired sharding key instead
'web_hook_logs_daily' # temporary copy of web_hook_logs
] ]
end end
......
0% 加载中 .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册