重构: 完成代码审查和架构优化
主要改进: 1. 模块化架构重构 - 创建Confluence模块目录结构 - 统一飞书模块架构 - 重构数据库模块 2. 代码质量提升 - 创建统一配置管理 - 实现统一日志配置 - 完善类型提示和异常处理 3. 功能优化 - 移除parse-test功能 - 删除DEBUG_MODE配置 - 更新命令行选项 4. 文档完善 - 更新README.md项目结构 - 添加开发指南和故障排除 - 完善配置说明 5. 系统验证 - 所有核心功能测试通过 - 模块导入验证通过 - 架构完整性验证通过
This commit is contained in:
15
src/database/__init__.py
Normal file
15
src/database/__init__.py
Normal file
@@ -0,0 +1,15 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
数据库模块包
|
||||
提供统一的数据库接口
|
||||
"""
|
||||
from src.database.base import DatabaseBase, DatabaseConnectionError
|
||||
from src.database.daily_logs import DailyLogsDatabase
|
||||
from src.database.schedules import ScheduleDatabase
|
||||
|
||||
__all__ = [
|
||||
'DatabaseBase',
|
||||
'DatabaseConnectionError',
|
||||
'DailyLogsDatabase',
|
||||
'ScheduleDatabase'
|
||||
]
|
||||
257
src/database/base.py
Normal file
257
src/database/base.py
Normal file
@@ -0,0 +1,257 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
数据库基类模块
|
||||
提供统一的数据库连接管理和上下文管理器
|
||||
"""
|
||||
import os
|
||||
import sqlite3
|
||||
from contextlib import contextmanager
|
||||
from typing import Generator, Optional, Any
|
||||
from pathlib import Path
|
||||
|
||||
from src.config import config
|
||||
from src.logging_config import get_logger
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
||||
|
||||
class DatabaseConnectionError(Exception):
|
||||
"""数据库连接错误"""
|
||||
pass
|
||||
|
||||
|
||||
class DatabaseBase:
|
||||
"""数据库基类,提供统一的连接管理"""
|
||||
|
||||
def __init__(self, db_path: Optional[str] = None):
|
||||
"""
|
||||
初始化数据库基类
|
||||
|
||||
参数:
|
||||
db_path: 数据库文件路径,如果为None则使用默认配置
|
||||
"""
|
||||
self.db_path = db_path or config.DATABASE_PATH
|
||||
self._connection: Optional[sqlite3.Connection] = None
|
||||
self._ensure_directory()
|
||||
|
||||
def _ensure_directory(self):
|
||||
"""确保数据库目录存在"""
|
||||
data_dir = os.path.dirname(self.db_path)
|
||||
if data_dir and not os.path.exists(data_dir):
|
||||
os.makedirs(data_dir)
|
||||
logger.info(f"创建数据库目录: {data_dir}")
|
||||
|
||||
def _connect(self) -> sqlite3.Connection:
|
||||
"""
|
||||
创建数据库连接
|
||||
|
||||
返回:
|
||||
sqlite3.Connection 对象
|
||||
|
||||
异常:
|
||||
DatabaseConnectionError: 连接失败时抛出
|
||||
"""
|
||||
try:
|
||||
conn = sqlite3.connect(self.db_path)
|
||||
conn.row_factory = sqlite3.Row
|
||||
logger.debug(f"数据库连接已建立: {self.db_path}")
|
||||
return conn
|
||||
except sqlite3.Error as e:
|
||||
error_msg = f"数据库连接失败: {self.db_path}, 错误: {e}"
|
||||
logger.error(error_msg)
|
||||
raise DatabaseConnectionError(error_msg) from e
|
||||
|
||||
@contextmanager
|
||||
def get_connection(self) -> Generator[sqlite3.Connection, None, None]:
|
||||
"""
|
||||
获取数据库连接的上下文管理器
|
||||
|
||||
使用示例:
|
||||
with self.get_connection() as conn:
|
||||
cursor = conn.cursor()
|
||||
cursor.execute(...)
|
||||
|
||||
返回:
|
||||
数据库连接对象
|
||||
"""
|
||||
conn = None
|
||||
try:
|
||||
conn = self._connect()
|
||||
yield conn
|
||||
except sqlite3.Error as e:
|
||||
logger.error(f"数据库操作失败: {e}")
|
||||
raise
|
||||
finally:
|
||||
if conn:
|
||||
conn.close()
|
||||
logger.debug("数据库连接已关闭")
|
||||
|
||||
def execute_query(self, query: str, params: tuple = ()) -> list:
|
||||
"""
|
||||
执行查询并返回结果
|
||||
|
||||
参数:
|
||||
query: SQL查询语句
|
||||
params: 查询参数
|
||||
|
||||
返回:
|
||||
查询结果列表
|
||||
"""
|
||||
with self.get_connection() as conn:
|
||||
cursor = conn.cursor()
|
||||
cursor.execute(query, params)
|
||||
return [dict(row) for row in cursor.fetchall()]
|
||||
|
||||
def execute_update(self, query: str, params: tuple = ()) -> int:
|
||||
"""
|
||||
执行更新操作
|
||||
|
||||
参数:
|
||||
query: SQL更新语句
|
||||
params: 更新参数
|
||||
|
||||
返回:
|
||||
受影响的行数
|
||||
"""
|
||||
with self.get_connection() as conn:
|
||||
cursor = conn.cursor()
|
||||
cursor.execute(query, params)
|
||||
conn.commit()
|
||||
return cursor.rowcount
|
||||
|
||||
def execute_many(self, query: str, params_list: list) -> int:
|
||||
"""
|
||||
批量执行操作
|
||||
|
||||
参数:
|
||||
query: SQL语句
|
||||
params_list: 参数列表
|
||||
|
||||
返回:
|
||||
受影响的总行数
|
||||
"""
|
||||
with self.get_connection() as conn:
|
||||
cursor = conn.cursor()
|
||||
cursor.executemany(query, params_list)
|
||||
conn.commit()
|
||||
return cursor.rowcount
|
||||
|
||||
def table_exists(self, table_name: str) -> bool:
|
||||
"""
|
||||
检查表是否存在
|
||||
|
||||
参数:
|
||||
table_name: 表名
|
||||
|
||||
返回:
|
||||
表是否存在
|
||||
"""
|
||||
query = """
|
||||
SELECT name FROM sqlite_master
|
||||
WHERE type='table' AND name=?
|
||||
"""
|
||||
result = self.execute_query(query, (table_name,))
|
||||
return len(result) > 0
|
||||
|
||||
def get_table_info(self, table_name: str) -> list:
|
||||
"""
|
||||
获取表结构信息
|
||||
|
||||
参数:
|
||||
table_name: 表名
|
||||
|
||||
返回:
|
||||
表结构信息列表
|
||||
"""
|
||||
with self.get_connection() as conn:
|
||||
cursor = conn.cursor()
|
||||
cursor.execute(f"PRAGMA table_info({table_name})")
|
||||
return [dict(row) for row in cursor.fetchall()]
|
||||
|
||||
def vacuum(self):
|
||||
"""执行数据库整理"""
|
||||
with self.get_connection() as conn:
|
||||
conn.execute("VACUUM")
|
||||
logger.info("数据库整理完成")
|
||||
|
||||
def backup(self, backup_path: Optional[str] = None):
|
||||
"""
|
||||
备份数据库
|
||||
|
||||
参数:
|
||||
backup_path: 备份文件路径,如果为None则使用默认路径
|
||||
"""
|
||||
if backup_path is None:
|
||||
backup_dir = "backups"
|
||||
os.makedirs(backup_dir, exist_ok=True)
|
||||
timestamp = os.path.getmtime(self.db_path)
|
||||
from datetime import datetime
|
||||
dt = datetime.fromtimestamp(timestamp)
|
||||
backup_path = os.path.join(
|
||||
backup_dir,
|
||||
f"backup_{dt.strftime('%Y%m%d_%H%M%S')}.db"
|
||||
)
|
||||
|
||||
try:
|
||||
with self.get_connection() as src_conn:
|
||||
dest_conn = sqlite3.connect(backup_path)
|
||||
src_conn.backup(dest_conn)
|
||||
dest_conn.close()
|
||||
logger.info(f"数据库备份完成: {backup_path}")
|
||||
except sqlite3.Error as e:
|
||||
logger.error(f"数据库备份失败: {e}")
|
||||
raise
|
||||
|
||||
|
||||
# 全局数据库连接池(可选,用于高性能场景)
|
||||
class ConnectionPool:
|
||||
"""简单的数据库连接池"""
|
||||
|
||||
def __init__(self, db_path: str, max_connections: int = 5):
|
||||
self.db_path = db_path
|
||||
self.max_connections = max_connections
|
||||
self._connections: list[sqlite3.Connection] = []
|
||||
self._in_use: set[sqlite3.Connection] = set()
|
||||
|
||||
@contextmanager
|
||||
def get_connection(self) -> Generator[sqlite3.Connection, None, None]:
|
||||
"""从连接池获取连接"""
|
||||
conn = None
|
||||
try:
|
||||
if self._connections:
|
||||
conn = self._connections.pop()
|
||||
elif len(self._in_use) < self.max_connections:
|
||||
conn = sqlite3.connect(self.db_path)
|
||||
conn.row_factory = sqlite3.Row
|
||||
else:
|
||||
raise DatabaseConnectionError("连接池已满")
|
||||
|
||||
self._in_use.add(conn)
|
||||
yield conn
|
||||
finally:
|
||||
if conn:
|
||||
self._in_use.remove(conn)
|
||||
self._connections.append(conn)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
# 测试数据库基类
|
||||
db = DatabaseBase()
|
||||
|
||||
# 测试连接
|
||||
with db.get_connection() as conn:
|
||||
cursor = conn.cursor()
|
||||
cursor.execute("SELECT sqlite_version()")
|
||||
version = cursor.fetchone()[0]
|
||||
print(f"SQLite版本: {version}")
|
||||
|
||||
# 测试查询
|
||||
if db.table_exists("sqlite_master"):
|
||||
print("sqlite_master表存在")
|
||||
|
||||
# 测试备份
|
||||
try:
|
||||
db.backup("test_backup.db")
|
||||
print("备份测试完成")
|
||||
except Exception as e:
|
||||
print(f"备份测试失败: {e}")
|
||||
336
src/database/daily_logs.py
Normal file
336
src/database/daily_logs.py
Normal file
@@ -0,0 +1,336 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
每日交接班日志数据库模块
|
||||
基于新的数据库基类重构
|
||||
"""
|
||||
from typing import List, Dict, Optional, Any
|
||||
from datetime import datetime
|
||||
|
||||
from src.database.base import DatabaseBase
|
||||
from src.logging_config import get_logger
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
||||
|
||||
class DailyLogsDatabase(DatabaseBase):
|
||||
"""每日交接班日志数据库"""
|
||||
|
||||
def __init__(self, db_path: Optional[str] = None):
|
||||
"""
|
||||
初始化数据库
|
||||
|
||||
参数:
|
||||
db_path: 数据库文件路径,如果为None则使用默认配置
|
||||
"""
|
||||
super().__init__(db_path)
|
||||
self._init_schema()
|
||||
|
||||
def _init_schema(self):
|
||||
"""初始化表结构"""
|
||||
with self.get_connection() as conn:
|
||||
cursor = conn.cursor()
|
||||
|
||||
# 创建每日交接班日志表
|
||||
cursor.execute('''
|
||||
CREATE TABLE IF NOT EXISTS daily_handover_logs (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
date TEXT NOT NULL,
|
||||
shift TEXT NOT NULL,
|
||||
ship_name TEXT NOT NULL,
|
||||
teu INTEGER,
|
||||
efficiency REAL,
|
||||
vehicles INTEGER,
|
||||
created_at TEXT DEFAULT CURRENT_TIMESTAMP,
|
||||
UNIQUE(date, shift, ship_name) ON CONFLICT REPLACE
|
||||
)
|
||||
''')
|
||||
|
||||
# 检查是否需要迁移旧表结构
|
||||
cursor.execute("SELECT sql FROM sqlite_master WHERE type='table' AND name='daily_handover_logs'")
|
||||
table_sql = cursor.fetchone()[0]
|
||||
if 'UNIQUE' not in table_sql:
|
||||
logger.warning("检测到旧表结构,正在迁移...")
|
||||
|
||||
# 重命名旧表
|
||||
cursor.execute('ALTER TABLE daily_handover_logs RENAME TO daily_handover_logs_old')
|
||||
|
||||
# 创建新表
|
||||
cursor.execute('''
|
||||
CREATE TABLE daily_handover_logs (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
date TEXT NOT NULL,
|
||||
shift TEXT NOT NULL,
|
||||
ship_name TEXT NOT NULL,
|
||||
teu INTEGER,
|
||||
efficiency REAL,
|
||||
vehicles INTEGER,
|
||||
created_at TEXT DEFAULT CURRENT_TIMESTAMP,
|
||||
UNIQUE(date, shift, ship_name) ON CONFLICT REPLACE
|
||||
)
|
||||
''')
|
||||
|
||||
# 复制数据(忽略重复)
|
||||
cursor.execute('''
|
||||
INSERT OR IGNORE INTO daily_handover_logs
|
||||
(date, shift, ship_name, teu, efficiency, vehicles, created_at)
|
||||
SELECT date, shift, ship_name, teu, efficiency, vehicles, created_at
|
||||
FROM daily_handover_logs_old
|
||||
''')
|
||||
|
||||
# 删除旧表
|
||||
cursor.execute('DROP TABLE daily_handover_logs_old')
|
||||
logger.info("迁移完成!")
|
||||
|
||||
# 创建索引
|
||||
cursor.execute('CREATE INDEX IF NOT EXISTS idx_date ON daily_handover_logs(date)')
|
||||
cursor.execute('CREATE INDEX IF NOT EXISTS idx_ship ON daily_handover_logs(ship_name)')
|
||||
|
||||
# 创建未统计月报数据表
|
||||
cursor.execute('''
|
||||
CREATE TABLE IF NOT EXISTS monthly_unaccounted (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
year_month TEXT NOT NULL UNIQUE,
|
||||
teu INTEGER NOT NULL,
|
||||
note TEXT,
|
||||
created_at TEXT DEFAULT CURRENT_TIMESTAMP
|
||||
)
|
||||
''')
|
||||
|
||||
conn.commit()
|
||||
logger.debug("数据库表结构初始化完成")
|
||||
|
||||
def insert(self, log: Dict[str, Any]) -> bool:
|
||||
"""
|
||||
插入记录(存在则替换,不存在则插入)
|
||||
|
||||
参数:
|
||||
log: 日志记录字典
|
||||
|
||||
返回:
|
||||
是否成功
|
||||
"""
|
||||
try:
|
||||
query = '''
|
||||
INSERT OR REPLACE INTO daily_handover_logs
|
||||
(date, shift, ship_name, teu, efficiency, vehicles, created_at)
|
||||
VALUES (?, ?, ?, ?, ?, ?, CURRENT_TIMESTAMP)
|
||||
'''
|
||||
params = (
|
||||
log['date'], log['shift'], log['ship_name'],
|
||||
log.get('teu'), log.get('efficiency'), log.get('vehicles')
|
||||
)
|
||||
|
||||
self.execute_update(query, params)
|
||||
logger.debug(f"插入记录: {log['date']} {log['shift']} {log['ship_name']}")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"插入记录失败: {e}, 记录: {log}")
|
||||
return False
|
||||
|
||||
def insert_many(self, logs: List[Dict[str, Any]]) -> int:
|
||||
"""
|
||||
批量插入
|
||||
|
||||
参数:
|
||||
logs: 日志记录列表
|
||||
|
||||
返回:
|
||||
成功插入的数量
|
||||
"""
|
||||
count = 0
|
||||
for log in logs:
|
||||
if self.insert(log):
|
||||
count += 1
|
||||
|
||||
logger.info(f"批量插入完成,成功 {count}/{len(logs)} 条记录")
|
||||
return count
|
||||
|
||||
def query_by_date(self, date: str) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
按日期查询
|
||||
|
||||
参数:
|
||||
date: 日期字符串
|
||||
|
||||
返回:
|
||||
日志记录列表
|
||||
"""
|
||||
query = '''
|
||||
SELECT * FROM daily_handover_logs
|
||||
WHERE date = ? ORDER BY shift, ship_name
|
||||
'''
|
||||
return self.execute_query(query, (date,))
|
||||
|
||||
def query_by_ship(self, ship_name: str) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
按船名查询
|
||||
|
||||
参数:
|
||||
ship_name: 船名
|
||||
|
||||
返回:
|
||||
日志记录列表
|
||||
"""
|
||||
query = '''
|
||||
SELECT * FROM daily_handover_logs
|
||||
WHERE ship_name LIKE ? ORDER BY date DESC
|
||||
'''
|
||||
return self.execute_query(query, (f'%{ship_name}%',))
|
||||
|
||||
def query_all(self, limit: int = 1000) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
查询所有记录
|
||||
|
||||
参数:
|
||||
limit: 限制返回数量
|
||||
|
||||
返回:
|
||||
日志记录列表
|
||||
"""
|
||||
query = '''
|
||||
SELECT * FROM daily_handover_logs
|
||||
ORDER BY date DESC, shift LIMIT ?
|
||||
'''
|
||||
return self.execute_query(query, (limit,))
|
||||
|
||||
def get_stats(self) -> Dict[str, Any]:
|
||||
"""
|
||||
获取统计信息
|
||||
|
||||
返回:
|
||||
统计信息字典
|
||||
"""
|
||||
with self.get_connection() as conn:
|
||||
cursor = conn.cursor()
|
||||
|
||||
cursor.execute('SELECT COUNT(*) FROM daily_handover_logs')
|
||||
total = cursor.fetchone()[0]
|
||||
|
||||
cursor.execute('SELECT DISTINCT ship_name FROM daily_handover_logs')
|
||||
ships = [row[0] for row in cursor.fetchall()]
|
||||
|
||||
cursor.execute('SELECT MIN(date), MAX(date) FROM daily_handover_logs')
|
||||
date_range = cursor.fetchone()
|
||||
|
||||
return {
|
||||
'total': total,
|
||||
'ships': ships,
|
||||
'date_range': {'start': date_range[0], 'end': date_range[1]}
|
||||
}
|
||||
|
||||
def get_ships_with_monthly_teu(self, year_month: Optional[str] = None) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
获取所有船只及其当月TEU总量
|
||||
|
||||
参数:
|
||||
year_month: 年月字符串,格式 "2025-12",如果为None则统计所有
|
||||
|
||||
返回:
|
||||
船只统计列表
|
||||
"""
|
||||
if year_month:
|
||||
query = '''
|
||||
SELECT ship_name, SUM(teu) as monthly_teu
|
||||
FROM daily_handover_logs
|
||||
WHERE date LIKE ?
|
||||
GROUP BY ship_name
|
||||
ORDER BY monthly_teu DESC
|
||||
'''
|
||||
return self.execute_query(query, (f'{year_month}%',))
|
||||
else:
|
||||
query = '''
|
||||
SELECT ship_name, SUM(teu) as monthly_teu
|
||||
FROM daily_handover_logs
|
||||
GROUP BY ship_name
|
||||
ORDER BY monthly_teu DESC
|
||||
'''
|
||||
return self.execute_query(query)
|
||||
|
||||
def insert_unaccounted(self, year_month: str, teu: int, note: str = '') -> bool:
|
||||
"""
|
||||
插入未统计数据
|
||||
|
||||
参数:
|
||||
year_month: 年月字符串,格式 "2025-12"
|
||||
teu: 未统计TEU数量
|
||||
note: 备注
|
||||
|
||||
返回:
|
||||
是否成功
|
||||
"""
|
||||
try:
|
||||
query = '''
|
||||
INSERT OR REPLACE INTO monthly_unaccounted
|
||||
(year_month, teu, note, created_at)
|
||||
VALUES (?, ?, ?, CURRENT_TIMESTAMP)
|
||||
'''
|
||||
self.execute_update(query, (year_month, teu, note))
|
||||
logger.info(f"插入未统计数据: {year_month} {teu}TEU")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"插入未统计数据失败: {e}")
|
||||
return False
|
||||
|
||||
def get_unaccounted(self, year_month: str) -> int:
|
||||
"""
|
||||
获取指定月份的未统计数据
|
||||
|
||||
参数:
|
||||
year_month: 年月字符串,格式 "2025-12"
|
||||
|
||||
返回:
|
||||
未统计TEU数量
|
||||
"""
|
||||
query = 'SELECT teu FROM monthly_unaccounted WHERE year_month = ?'
|
||||
result = self.execute_query(query, (year_month,))
|
||||
return result[0]['teu'] if result else 0
|
||||
|
||||
def delete_by_date(self, date: str) -> int:
|
||||
"""
|
||||
删除指定日期的记录
|
||||
|
||||
参数:
|
||||
date: 日期字符串
|
||||
|
||||
返回:
|
||||
删除的记录数
|
||||
"""
|
||||
query = 'DELETE FROM daily_handover_logs WHERE date = ?'
|
||||
return self.execute_update(query, (date,))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
# 测试代码
|
||||
db = DailyLogsDatabase()
|
||||
|
||||
# 测试插入
|
||||
test_log = {
|
||||
'date': '2025-12-30',
|
||||
'shift': '白班',
|
||||
'ship_name': '测试船',
|
||||
'teu': 100,
|
||||
'efficiency': 3.5,
|
||||
'vehicles': 5
|
||||
}
|
||||
|
||||
success = db.insert(test_log)
|
||||
print(f"插入测试: {'成功' if success else '失败'}")
|
||||
|
||||
# 测试查询
|
||||
logs = db.query_by_date('2025-12-30')
|
||||
print(f"查询结果: {len(logs)} 条记录")
|
||||
|
||||
# 测试统计
|
||||
stats = db.get_stats()
|
||||
print(f"统计信息: {stats}")
|
||||
|
||||
# 测试未统计数据
|
||||
db.insert_unaccounted('2025-12', 118, '测试备注')
|
||||
unaccounted = db.get_unaccounted('2025-12')
|
||||
print(f"未统计数据: {unaccounted}TEU")
|
||||
|
||||
# 清理测试数据
|
||||
db.delete_by_date('2025-12-30')
|
||||
print("测试数据已清理")
|
||||
342
src/database/schedules.py
Normal file
342
src/database/schedules.py
Normal file
@@ -0,0 +1,342 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
排班人员数据库模块
|
||||
基于新的数据库基类重构
|
||||
"""
|
||||
import json
|
||||
import hashlib
|
||||
from typing import List, Dict, Optional, Any
|
||||
|
||||
from src.database.base import DatabaseBase
|
||||
from src.logging_config import get_logger
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
||||
|
||||
class ScheduleDatabase(DatabaseBase):
|
||||
"""排班人员数据库"""
|
||||
|
||||
def __init__(self, db_path: Optional[str] = None):
|
||||
"""
|
||||
初始化数据库
|
||||
|
||||
参数:
|
||||
db_path: 数据库文件路径,如果为None则使用默认配置
|
||||
"""
|
||||
super().__init__(db_path)
|
||||
self._init_schema()
|
||||
|
||||
def _init_schema(self):
|
||||
"""初始化表结构"""
|
||||
with self.get_connection() as conn:
|
||||
cursor = conn.cursor()
|
||||
|
||||
# 创建排班人员表
|
||||
cursor.execute('''
|
||||
CREATE TABLE IF NOT EXISTS schedule_personnel (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
date TEXT NOT NULL,
|
||||
day_shift TEXT,
|
||||
night_shift TEXT,
|
||||
day_shift_list TEXT, -- JSON数组
|
||||
night_shift_list TEXT, -- JSON数组
|
||||
sheet_id TEXT,
|
||||
sheet_title TEXT,
|
||||
data_hash TEXT, -- 数据哈希,用于检测更新
|
||||
created_at TEXT DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TEXT DEFAULT CURRENT_TIMESTAMP,
|
||||
UNIQUE(date)
|
||||
)
|
||||
''')
|
||||
|
||||
# 创建表格版本表(用于检测表格是否有更新)
|
||||
cursor.execute('''
|
||||
CREATE TABLE IF NOT EXISTS sheet_versions (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
sheet_id TEXT NOT NULL,
|
||||
sheet_title TEXT NOT NULL,
|
||||
revision INTEGER NOT NULL,
|
||||
data_hash TEXT,
|
||||
last_checked_at TEXT DEFAULT CURRENT_TIMESTAMP,
|
||||
UNIQUE(sheet_id)
|
||||
)
|
||||
''')
|
||||
|
||||
# 创建索引
|
||||
cursor.execute('CREATE INDEX IF NOT EXISTS idx_schedule_date ON schedule_personnel(date)')
|
||||
cursor.execute('CREATE INDEX IF NOT EXISTS idx_schedule_sheet ON schedule_personnel(sheet_id)')
|
||||
cursor.execute('CREATE INDEX IF NOT EXISTS idx_sheet_versions ON sheet_versions(sheet_id)')
|
||||
|
||||
conn.commit()
|
||||
logger.debug("排班数据库表结构初始化完成")
|
||||
|
||||
def _calculate_hash(self, data: Dict[str, Any]) -> str:
|
||||
"""
|
||||
计算数据哈希值
|
||||
|
||||
参数:
|
||||
data: 数据字典
|
||||
|
||||
返回:
|
||||
MD5哈希值
|
||||
"""
|
||||
data_str = json.dumps(data, sort_keys=True, ensure_ascii=False)
|
||||
return hashlib.md5(data_str.encode('utf-8')).hexdigest()
|
||||
|
||||
def check_sheet_update(self, sheet_id: str, sheet_title: str, revision: int, data: Dict[str, Any]) -> bool:
|
||||
"""
|
||||
检查表格是否有更新
|
||||
|
||||
参数:
|
||||
sheet_id: 表格ID
|
||||
sheet_title: 表格标题
|
||||
revision: 表格版本号
|
||||
data: 表格数据
|
||||
|
||||
返回:
|
||||
True: 有更新,需要重新获取
|
||||
False: 无更新,可以使用缓存
|
||||
"""
|
||||
with self.get_connection() as conn:
|
||||
cursor = conn.cursor()
|
||||
|
||||
# 查询当前版本
|
||||
cursor.execute(
|
||||
'SELECT revision, data_hash FROM sheet_versions WHERE sheet_id = ?',
|
||||
(sheet_id,)
|
||||
)
|
||||
result = cursor.fetchone()
|
||||
|
||||
if not result:
|
||||
# 第一次获取,记录版本
|
||||
data_hash = self._calculate_hash(data)
|
||||
cursor.execute('''
|
||||
INSERT INTO sheet_versions (sheet_id, sheet_title, revision, data_hash, last_checked_at)
|
||||
VALUES (?, ?, ?, ?, CURRENT_TIMESTAMP)
|
||||
''', (sheet_id, sheet_title, revision, data_hash))
|
||||
conn.commit()
|
||||
logger.debug(f"首次记录表格版本: {sheet_title} (ID: {sheet_id})")
|
||||
return True
|
||||
|
||||
# 检查版本号或数据是否有变化
|
||||
old_revision = result['revision']
|
||||
old_hash = result['data_hash']
|
||||
new_hash = self._calculate_hash(data)
|
||||
|
||||
if old_revision != revision or old_hash != new_hash:
|
||||
# 有更新,更新版本信息
|
||||
cursor.execute('''
|
||||
UPDATE sheet_versions
|
||||
SET revision = ?, data_hash = ?, last_checked_at = CURRENT_TIMESTAMP
|
||||
WHERE sheet_id = ?
|
||||
''', (revision, new_hash, sheet_id))
|
||||
conn.commit()
|
||||
logger.info(f"表格有更新: {sheet_title} (ID: {sheet_id})")
|
||||
return True
|
||||
|
||||
# 无更新,更新检查时间
|
||||
cursor.execute('''
|
||||
UPDATE sheet_versions
|
||||
SET last_checked_at = CURRENT_TIMESTAMP
|
||||
WHERE sheet_id = ?
|
||||
''', (sheet_id,))
|
||||
conn.commit()
|
||||
logger.debug(f"表格无更新: {sheet_title} (ID: {sheet_id})")
|
||||
return False
|
||||
|
||||
def save_schedule(self, date: str, schedule_data: Dict[str, Any],
|
||||
sheet_id: Optional[str] = None, sheet_title: Optional[str] = None) -> bool:
|
||||
"""
|
||||
保存排班信息到数据库
|
||||
|
||||
参数:
|
||||
date: 日期 (YYYY-MM-DD)
|
||||
schedule_data: 排班数据
|
||||
sheet_id: 表格ID
|
||||
sheet_title: 表格标题
|
||||
|
||||
返回:
|
||||
是否成功
|
||||
"""
|
||||
try:
|
||||
# 准备数据
|
||||
day_shift = schedule_data.get('day_shift', '')
|
||||
night_shift = schedule_data.get('night_shift', '')
|
||||
day_shift_list = json.dumps(schedule_data.get('day_shift_list', []), ensure_ascii=False)
|
||||
night_shift_list = json.dumps(schedule_data.get('night_shift_list', []), ensure_ascii=False)
|
||||
data_hash = self._calculate_hash(schedule_data)
|
||||
|
||||
# 使用 INSERT OR REPLACE 来更新已存在的记录
|
||||
query = '''
|
||||
INSERT OR REPLACE INTO schedule_personnel
|
||||
(date, day_shift, night_shift, day_shift_list, night_shift_list,
|
||||
sheet_id, sheet_title, data_hash, updated_at)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, CURRENT_TIMESTAMP)
|
||||
'''
|
||||
params = (
|
||||
date, day_shift, night_shift, day_shift_list, night_shift_list,
|
||||
sheet_id, sheet_title, data_hash
|
||||
)
|
||||
|
||||
self.execute_update(query, params)
|
||||
logger.debug(f"保存排班信息: {date}")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"保存排班信息失败: {e}, 日期: {date}")
|
||||
return False
|
||||
|
||||
def get_schedule(self, date: str) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
获取指定日期的排班信息
|
||||
|
||||
参数:
|
||||
date: 日期 (YYYY-MM-DD)
|
||||
|
||||
返回:
|
||||
排班信息字典,未找到返回None
|
||||
"""
|
||||
query = 'SELECT * FROM schedule_personnel WHERE date = ?'
|
||||
result = self.execute_query(query, (date,))
|
||||
|
||||
if not result:
|
||||
return None
|
||||
|
||||
row = result[0]
|
||||
|
||||
# 解析JSON数组
|
||||
day_shift_list = json.loads(row['day_shift_list']) if row['day_shift_list'] else []
|
||||
night_shift_list = json.loads(row['night_shift_list']) if row['night_shift_list'] else []
|
||||
|
||||
return {
|
||||
'date': row['date'],
|
||||
'day_shift': row['day_shift'],
|
||||
'night_shift': row['night_shift'],
|
||||
'day_shift_list': day_shift_list,
|
||||
'night_shift_list': night_shift_list,
|
||||
'sheet_id': row['sheet_id'],
|
||||
'sheet_title': row['sheet_title'],
|
||||
'updated_at': row['updated_at']
|
||||
}
|
||||
|
||||
def get_schedule_by_range(self, start_date: str, end_date: str) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
获取日期范围内的排班信息
|
||||
|
||||
参数:
|
||||
start_date: 开始日期 (YYYY-MM-DD)
|
||||
end_date: 结束日期 (YYYY-MM-DD)
|
||||
|
||||
返回:
|
||||
排班信息列表
|
||||
"""
|
||||
query = '''
|
||||
SELECT * FROM schedule_personnel
|
||||
WHERE date >= ? AND date <= ?
|
||||
ORDER BY date
|
||||
'''
|
||||
results = self.execute_query(query, (start_date, end_date))
|
||||
|
||||
processed_results = []
|
||||
for row in results:
|
||||
day_shift_list = json.loads(row['day_shift_list']) if row['day_shift_list'] else []
|
||||
night_shift_list = json.loads(row['night_shift_list']) if row['night_shift_list'] else []
|
||||
|
||||
processed_results.append({
|
||||
'date': row['date'],
|
||||
'day_shift': row['day_shift'],
|
||||
'night_shift': row['night_shift'],
|
||||
'day_shift_list': day_shift_list,
|
||||
'night_shift_list': night_shift_list,
|
||||
'sheet_id': row['sheet_id'],
|
||||
'sheet_title': row['sheet_title'],
|
||||
'updated_at': row['updated_at']
|
||||
})
|
||||
|
||||
return processed_results
|
||||
|
||||
def delete_old_schedules(self, before_date: str) -> int:
|
||||
"""
|
||||
删除指定日期之前的排班记录
|
||||
|
||||
参数:
|
||||
before_date: 日期 (YYYY-MM-DD)
|
||||
|
||||
返回:
|
||||
删除的记录数
|
||||
"""
|
||||
query = 'DELETE FROM schedule_personnel WHERE date < ?'
|
||||
return self.execute_update(query, (before_date,))
|
||||
|
||||
def get_stats(self) -> Dict[str, Any]:
|
||||
"""获取统计信息"""
|
||||
with self.get_connection() as conn:
|
||||
cursor = conn.cursor()
|
||||
|
||||
cursor.execute('SELECT COUNT(*) FROM schedule_personnel')
|
||||
total = cursor.fetchone()[0]
|
||||
|
||||
cursor.execute('SELECT MIN(date), MAX(date) FROM schedule_personnel')
|
||||
date_range = cursor.fetchone()
|
||||
|
||||
cursor.execute('SELECT COUNT(DISTINCT sheet_id) FROM schedule_personnel')
|
||||
sheet_count = cursor.fetchone()[0]
|
||||
|
||||
return {
|
||||
'total': total,
|
||||
'date_range': {'start': date_range[0], 'end': date_range[1]},
|
||||
'sheet_count': sheet_count
|
||||
}
|
||||
|
||||
def clear_all(self) -> int:
|
||||
"""
|
||||
清空所有排班数据
|
||||
|
||||
返回:
|
||||
删除的记录数
|
||||
"""
|
||||
query1 = 'DELETE FROM schedule_personnel'
|
||||
query2 = 'DELETE FROM sheet_versions'
|
||||
|
||||
count1 = self.execute_update(query1)
|
||||
count2 = self.execute_update(query2)
|
||||
|
||||
logger.info(f"清空排班数据,删除 {count1} 条排班记录和 {count2} 条版本记录")
|
||||
return count1 + count2
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
# 测试代码
|
||||
db = ScheduleDatabase()
|
||||
|
||||
# 测试保存
|
||||
test_schedule = {
|
||||
'day_shift': '张勤、杨俊豪',
|
||||
'night_shift': '刘炜彬、梁启迟',
|
||||
'day_shift_list': ['张勤', '杨俊豪'],
|
||||
'night_shift_list': ['刘炜彬', '梁启迟']
|
||||
}
|
||||
|
||||
success = db.save_schedule('2025-12-31', test_schedule, 'zcYLIk', '12月')
|
||||
print(f"保存测试: {'成功' if success else '失败'}")
|
||||
|
||||
# 测试获取
|
||||
schedule = db.get_schedule('2025-12-31')
|
||||
print(f"获取结果: {schedule}")
|
||||
|
||||
# 测试范围查询
|
||||
schedules = db.get_schedule_by_range('2025-12-01', '2025-12-31')
|
||||
print(f"范围查询: {len(schedules)} 条记录")
|
||||
|
||||
# 测试统计
|
||||
stats = db.get_stats()
|
||||
print(f"统计信息: {stats}")
|
||||
|
||||
# 测试表格版本检查
|
||||
test_data = {'values': [['姓名', '12月31日'], ['张三', '白']]}
|
||||
needs_update = db.check_sheet_update('test_sheet', '测试表格', 1, test_data)
|
||||
print(f"表格更新检查: {'需要更新' if needs_update else '无需更新'}")
|
||||
|
||||
# 清理测试数据
|
||||
db.delete_old_schedules('2026-01-01')
|
||||
print("测试数据已清理")
|
||||
Reference in New Issue
Block a user