Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions .github/workflows/ci.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -101,6 +101,7 @@ jobs:
TEST_DB_HOST: ${{ env.TEST_DB_HOST }}
# ... (include all relevant DB_ and TEST_DB_ vars)
TEST_DB_URL: ${{ env.TEST_DB_URL }} # Ensure your script uses this
ENABLE_AUDIT_LOG: false # Disable audit log for tests

- name: Run Unittests
# CWD is ./src
Expand All @@ -118,3 +119,4 @@ jobs:
TEST_DB_PASSWORD: ${{ env.TEST_DB_PASSWORD }}
TEST_DB_NAME: ${{ env.TEST_DB_NAME }}
TEST_DB_URL: ${{ env.TEST_DB_URL }}
ENABLE_AUDIT_LOG: false # Disable audit log for tests
28 changes: 20 additions & 8 deletions src/backend/scripts/reset.py
Original file line number Diff line number Diff line change
@@ -1,26 +1,29 @@
from .run_all_ddls import *

def reset_dev():
def reset_dev(no_trigger=False):
main(
sql_scripts_dir,
ddl_scripts_dir,
create_engine(db_config.DB_URL),
refresh=True,
refresh_data=False,
no_ddl=False
no_ddl=False,
no_trigger=no_trigger
)

def reset_test():
def reset_test(no_trigger=False):
main(
sql_scripts_dir,
ddl_scripts_dir,
create_engine(test_db_config.DB_URL),
refresh=True,
refresh_data=False,
no_ddl=False
no_ddl=False,
no_trigger=no_trigger
)


if __name__ == "__main__":
import argparse
import os

parser = argparse.ArgumentParser(description="Reset the database.")
parser.add_argument(
Expand All @@ -29,11 +32,20 @@ def reset_test():
default="test",
help="Specify the mode to run the DDL scripts. Default is 'test'."
)
parser.add_argument(
"--no-trigger",
action="store_true",
help="Skip running trigger scripts."
)
args = parser.parse_args()

ENABLE_AUDIT_LOG = os.getenv("ENABLE_AUDIT_LOG", "false").lower() == "true"

no_trigger = args.no_trigger or not ENABLE_AUDIT_LOG

print(f"Resetting {args.target} database: drop all tables and rerun all DDLs.")

if args.target == "dev":
reset_dev()
reset_dev(no_trigger=no_trigger)
elif args.target == "test":
reset_test()
reset_test(no_trigger=no_trigger)
67 changes: 38 additions & 29 deletions src/backend/scripts/run_all_ddls.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,37 +9,44 @@
root_dir = Path(__file__).parent.parent
drop_all_script = root_dir / "sql" / "utils" / "drop_all.sql"
drop_all_data_script = root_dir / "sql" / "utils" / "drop_all_data.sql"
sql_scripts_dir = root_dir / "sql" / "ddl"
ddl_scripts_dir = root_dir / "sql" / "ddl"
trigger_scripts_dir = root_dir / "sql" / "triggers"


def execute_sql_script(engine, script_text):
def execute_sql_script(engine, script_text, split_statements=True):
"""
Execute a SQL script using the provided SQLAlchemy engine.
"""
with engine.begin() as connection:

# Execute the SQL script stmt by stmt
sql_statements = script_text.split(';')
if split_statements:
# Split the script into individual statements
sql_statements = script_text.split(";")
else:
sql_statements = [script_text]
for statement in sql_statements:
# Strip leading/trailing whitespace
statement = statement.strip()
# Execute the statement
if not statement:
continue
try:
connection.execute(text(statement))
stmt = text(statement)
connection.execute(stmt)
except Exception as e:
logger.error(f"Error executing statement: {statement}")
logger.error(f"{e}")
continue
logger.info(f"stmt: {stmt}")
raise e

def execute_sql_scripts_in_order(engine, directory):
def execute_sql_scripts_in_order(engine, directory, split_statements=True):
"""
Execute all SQL scripts in the specified directory in order.
The scripts should be named in a way that allows them to be sorted correctly (e.g., 001_create_table.sql).
"""
# Get a list of all SQL files in the directory
logger.info(f"Running all SQL scripts in {sql_scripts_dir}...")
logger.info(f"Running all SQL scripts in {ddl_scripts_dir}...")
# Get a list of all SQL files in the directory
sql_files = sorted(directory.glob("*.sql"))
logger.info(f"Found {len(sql_files)} SQL files.")
Expand All @@ -48,20 +55,15 @@ def execute_sql_scripts_in_order(engine, directory):
for sql_file in sql_files:
logger.info(f"Running script: {sql_file.name}")
# Read the SQL file
with open(sql_file, 'r') as file:
with open(sql_file, "r") as file:
sql_script = file.read()

# Execute the SQL script
execute_sql_script(engine, sql_script)

execute_sql_script(engine, sql_script, split_statements=split_statements)


def main(
directory: Path,
engine=None,
refresh=False,
refresh_data=False,
no_ddl=False
directory: Path, engine=None, refresh=False, refresh_data=False, no_ddl=False, no_trigger=False
):
"""
Run all DDL scripts in the specified directory in order.
Expand All @@ -70,22 +72,25 @@ def main(
"""
if refresh:
logger.info(f"Running drop_all.sql script...")
with open(drop_all_script, 'r') as file:
with open(drop_all_script, "r") as file:
drop_script = file.read()
execute_sql_script(engine, drop_script)

if refresh_data:
logger.info(f"Running drop_all_data.sql script...")
with open(drop_all_data_script, 'r') as file:
with open(drop_all_data_script, "r") as file:
drop_data_script = file.read()
execute_sql_script(engine, drop_data_script)

if no_ddl:
logger.info("Skipping DDL scripts execution as per --no-ddl flag.")
return

execute_sql_scripts_in_order(engine, directory)
else:
execute_sql_scripts_in_order(engine, directory)

if no_trigger:
logger.info("Skipping trigger scripts execution as per --no-trigger flag.")
else:
execute_sql_scripts_in_order(engine, trigger_scripts_dir, split_statements=False)


if __name__ == "__main__":
Expand All @@ -94,26 +99,23 @@ def main(
"--mode",
choices=["dev", "test"],
default="test",
help="Specify the mode to run the DDL scripts. Default is 'test'."
help="Specify the mode to run the DDL scripts. Default is 'test'.",
)
parser.add_argument(
"--refresh",
"-r",
action="store_true",
help="Drop all tables and before running the DDL scripts."
help="Drop all tables and before running the DDL scripts.",
)
parser.add_argument(
"--refresh-data",
"-rd",
action="store_true",
help="Drop all data in the tables (after refresh, before running the DDL scripts)."
help="Drop all data in the tables (after refresh, before running the DDL scripts).",
)
parser.add_argument("--no-ddl", action="store_true", help="Skip running DDL scripts.")
parser.add_argument("--no-trigger", action="store_true", help="Skip running trigger scripts.")

parser.add_argument(
"--no-ddl",
action="store_true",
help="Skip running DDL scripts."
)
args = parser.parse_args()

logger.info(f"Running in {args.mode} mode.")
Expand All @@ -124,4 +126,11 @@ def main(
elif args.mode == "test":
_engine = create_engine(test_db_config.DB_URL)

main(sql_scripts_dir, _engine, refresh=args.refresh, refresh_data=args.refresh_data, no_ddl=args.no_ddl)
main(
ddl_scripts_dir,
_engine,
refresh=args.refresh,
refresh_data=args.refresh_data,
no_ddl=args.no_ddl,
no_trigger=args.no_trigger,
)
18 changes: 18 additions & 0 deletions src/backend/sql/ddl/014_create_audit_log_table.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
-- -----------------------------------------------------
-- Table `AuditLog` (审计日志)
-- -----------------------------------------------------
CREATE TABLE IF NOT EXISTS `AuditLog`
(
`AuditLogID` BIGINT NOT NULL AUTO_INCREMENT COMMENT '审计日志ID,主键',
`TableName` VARCHAR(128) NOT NULL COMMENT '被操作的表名',
`RowPKValue` VARCHAR(255) NULL COMMENT '被操作行的主键值 (如果是复合主键,可以考虑用JSON或拼接)',
`OperationType` ENUM ('INSERT', 'UPDATE', 'DELETE') NOT NULL COMMENT '操作类型',
`ChangedByUserID` INT NULL COMMENT '执行操作的用户ID (通过会话变量 @actor_id 获取)',
`ChangeTimestamp` DATETIME(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6) COMMENT '操作发生的时间戳',
`OldValues_JSON` JSON NULL COMMENT '操作前行的完整数据 (JSON格式)',
`NewValues_JSON` JSON NULL COMMENT '操作后行的完整数据 (JSON格式)',
PRIMARY KEY (`AuditLogID`),
INDEX `idx_AuditLog_TableName_RowPK` (`TableName` ASC, `RowPKValue` ASC),
INDEX `idx_AuditLog_ChangedByUserID` (`ChangedByUserID` ASC),
INDEX `idx_AuditLog_ChangeTimestamp` (`ChangeTimestamp` ASC)
) ENGINE = InnoDB COMMENT = '通用审计日志表,记录数据变更历史';
141 changes: 141 additions & 0 deletions src/backend/sql/trigger/001_user_audit.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,141 @@
-- -----------------------------------------------------
-- Trigger for AFTER INSERT on User table
-- -----------------------------------------------------
SET GLOBAL log_bin_trust_function_creators = 1;

DELIMITER //

CREATE TRIGGER `trg_User_Audit_Insert`
AFTER INSERT
ON `User`
FOR EACH ROW
BEGIN
INSERT INTO `AuditLog` (`TableName`,
`RowPKValue`,
`OperationType`,
`ChangedByUserID`,
`OldValues_JSON`,
`NewValues_JSON`)
VALUES ('User',
CAST(NEW.UserID AS CHAR), -- 主键值
'INSERT',
@actor_id, -- 从会话变量获取操作者ID
NULL, -- INSERT 操作没有旧值
JSON_OBJECT(
'UserID', NEW.UserID,
'Username', NEW.Username,
'PasswordHash', '[SENSITIVE_DATA_LOGGED]', -- 或 NEW.PasswordHash,但请注意安全风险
'Email', NEW.Email,
'PhoneNumber', NEW.PhoneNumber,
'UserRole', NEW.UserRole,
'RegistrationDate', NEW.RegistrationDate,
'LastLoginDate', NEW.LastLoginDate,
'DefaultAddressID', NEW.DefaultAddressID,
'AccountStatus', NEW.AccountStatus
));
END//

DELIMITER ;

-- -----------------------------------------------------
-- Trigger for AFTER UPDATE on User table
-- -----------------------------------------------------
DELIMITER //

CREATE TRIGGER `trg_User_Audit_Update`
AFTER UPDATE
ON `User`
FOR EACH ROW
BEGIN
-- 只有当受监控的业务字段实际发生变化时才记录日志
-- 注意:PasswordHash 通常不应直接比较,但如果允许更新,则需要记录
-- LastLoginDate 和 LastUpdatedDate (如果User表有) 通常是自动更新的,
-- 您可以决定是否因为这些时间戳的变化而触发审计日志。
-- 这里我们假设如果任何受关注的列发生变化,就记录整行的新旧值。
IF OLD.Username <=> NEW.Username OR
OLD.Email <=> NEW.Email OR
OLD.PhoneNumber <=> NEW.PhoneNumber OR
OLD.UserRole <=> NEW.UserRole OR
OLD.LastLoginDate <=> NEW.LastLoginDate OR -- 如果应用会更新它
OLD.DefaultAddressID <=> NEW.DefaultAddressID OR
OLD.AccountStatus <=> NEW.AccountStatus OR
OLD.PasswordHash <=> NEW.PasswordHash -- 监控密码哈希变化
THEN
INSERT INTO `AuditLog` (`TableName`,
`RowPKValue`,
`OperationType`,
`ChangedByUserID`,
`OldValues_JSON`,
`NewValues_JSON`)
VALUES ('User',
CAST(NEW.UserID AS CHAR), -- 主键通常不变,使用 NEW 或 OLD 都可以
'UPDATE',
@actor_id,
JSON_OBJECT(
'UserID', OLD.UserID,
'Username', OLD.Username,
'PasswordHash', '[SENSITIVE_DATA_LOGGED]', -- 或 OLD.PasswordHash
'Email', OLD.Email,
'PhoneNumber', OLD.PhoneNumber,
'UserRole', OLD.UserRole,
'RegistrationDate', OLD.RegistrationDate,
'LastLoginDate', OLD.LastLoginDate,
'DefaultAddressID', OLD.DefaultAddressID,
'AccountStatus', OLD.AccountStatus
),
JSON_OBJECT(
'UserID', NEW.UserID,
'Username', NEW.Username,
'PasswordHash', '[SENSITIVE_DATA_LOGGED]', -- 或 NEW.PasswordHash
'Email', NEW.Email,
'PhoneNumber', NEW.PhoneNumber,
'UserRole', NEW.UserRole,
'RegistrationDate', NEW.RegistrationDate, -- 创建日期通常不变
'LastLoginDate', NEW.LastLoginDate,
'DefaultAddressID', NEW.DefaultAddressID,
'AccountStatus', NEW.AccountStatus
));
END IF;
END//

DELIMITER ;

-- -----------------------------------------------------
-- Trigger for AFTER DELETE on User table
-- -----------------------------------------------------
DELIMITER //

CREATE TRIGGER `trg_User_Audit_Delete`
AFTER DELETE
ON `User`
FOR EACH ROW
BEGIN
INSERT INTO `AuditLog` (`TableName`,
`RowPKValue`,
`OperationType`,
`ChangedByUserID`,
`OldValues_JSON`,
`NewValues_JSON`)
VALUES ('User',
CAST(OLD.UserID AS CHAR),
'DELETE',
@actor_id,
JSON_OBJECT(
'UserID', OLD.UserID,
'Username', OLD.Username,
'PasswordHash', '[SENSITIVE_DATA_LOGGED]', -- 或 OLD.PasswordHash
'Email', OLD.Email,
'PhoneNumber', OLD.PhoneNumber,
'UserRole', OLD.UserRole,
'RegistrationDate', OLD.RegistrationDate,
'LastLoginDate', OLD.LastLoginDate,
'DefaultAddressID', OLD.DefaultAddressID,
'AccountStatus', OLD.AccountStatus
),
NULL -- DELETE 操作没有新值
);
END//

DELIMITER ;

SET GLOBAL log_bin_trust_function_creators = 0;
Loading