feat: 添加三个重要补丁

补丁 1: 分块读取与流式传输
- 8KB 分块读取大文件,避免内存飙升
- 流式计算文件哈希,无需加载完整内容
- 差异对比限制,大文件只显示头尾各 500 行
- 新增 chunked 参数支持流式传输

补丁 2: .lobsterignore 机制
- 创建 IgnorePattern 类实现模式匹配
- 支持 .lobsterignore 文件配置
- 添加默认忽略规则(.DS_Store, node_modules 等)
- 支持通配符匹配(*, ?, 目录匹配)
- 新增 API: GET /api/ignore/patterns/, POST /api/ignore/reload/

补丁 3: 操作溯源(Audit Log)
- 新增 SyncHistory 模型记录同步历史
- 创建 AuditLogger 类用于记录操作
- 所有同步操作自动记录日志
- 记录操作者、版本变化、哈希变化、执行时间等
- 新增 API: GET /api/history/

更新内容:
- models.py: 新增 SyncHistory 模型
- services.py: 新增 IgnorePattern, AuditLogger, 分块读取方法
- views.py: 所有同步操作添加日志记录, 新增历史和忽略规则接口
- serializers.py: 新增 SyncHistorySerializer
- urls.py: 新增历史和忽略规则路由
- .lobsterignore.example: 示例忽略文件
- CHANGELOG.md: 详细更新日志
This commit is contained in:
道童
2026-04-05 12:20:57 +00:00
parent d9420b6cc6
commit 077656a6cf
7 changed files with 1007 additions and 43 deletions

View File

@@ -3,8 +3,9 @@ from rest_framework.response import Response
from rest_framework import status
from .models import LobsterMemory
from .serializers import LobsterMemorySerializer, FileDiffSerializer
from .services import FileScanner, DiffChecker
from .services import FileScanner, DiffChecker, AuditLogger
import json
import time
@api_view(['GET'])
@@ -69,10 +70,11 @@ def check_sync_status(request):
@api_view(['GET'])
def get_file_diff(request):
"""
获取文件差异
获取文件差异(支持大文件优化)
"""
file_path = request.query_params.get('file_path')
lobster_id = request.query_params.get('lobster_id', 'daotong')
chunked = request.query_params.get('chunked', 'false').lower() == 'true'
if not file_path:
return Response({
@@ -82,9 +84,9 @@ def get_file_diff(request):
scanner = FileScanner()
# 获取本地内容
# 获取本地内容(支持分块读取)
try:
local_content, local_hash = scanner.get_file_content(file_path)
local_content, local_hash = scanner.get_file_content(file_path, chunked=chunked)
except FileNotFoundError:
local_content = None
local_hash = None
@@ -108,7 +110,7 @@ def get_file_diff(request):
'error': str(e)
}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
# 获取差异
# 获取差异(支持大文件限制)
checker = DiffChecker()
if local_content and db_content:
diff = checker.get_file_diff(local_content, db_content)
@@ -116,18 +118,19 @@ def get_file_diff(request):
diff = {
'local_lines': local_content.split('\n') if local_content else [],
'db_lines': db_content.split('\n') if db_content else [],
'has_diff': local_content != db_content
'has_diff': local_content != db_content,
'is_truncated': False
}
# 确定状态
if local_hash == db_hash:
status = 'consistent'
sync_status = 'consistent'
elif local_hash and not db_hash:
status = 'local_newer'
sync_status = 'local_newer'
elif not local_hash and db_hash:
status = 'db_newer'
sync_status = 'db_newer'
else:
status = 'conflict'
sync_status = 'conflict'
return Response({
'success': True,
@@ -138,7 +141,7 @@ def get_file_diff(request):
'db_content': db_content,
'local_hash': local_hash,
'db_hash': db_hash,
'status': status,
'status': sync_status,
'diff': diff
}
})
@@ -147,10 +150,11 @@ def get_file_diff(request):
@api_view(['POST'])
def sync_to_db(request):
"""
同步到数据库
同步到数据库(带操作日志)
"""
lobster_id = request.data.get('lobster_id', 'daotong')
file_path = request.data.get('file_path')
operator = request.data.get('operator', 'system')
if not file_path:
return Response({
@@ -159,6 +163,9 @@ def sync_to_db(request):
}, status=status.HTTP_400_BAD_REQUEST)
scanner = FileScanner()
audit_logger = AuditLogger()
start_time = time.time()
try:
# 读取本地文件
@@ -170,6 +177,9 @@ def sync_to_db(request):
file_path=file_path
).order_by('-version').first()
old_version = existing.version if existing else None
old_hash = existing.hash if existing else None
if existing:
# 创建新版本
new_version = existing.version + 1
@@ -186,6 +196,23 @@ def sync_to_db(request):
version=new_version,
)
execution_time = time.time() - start_time
# 记录操作日志
audit_logger.log_sync_action(
lobster_id=lobster_id,
file_path=file_path,
action='sync_to_db',
old_version=old_version,
new_version=new_version,
old_hash=old_hash,
new_hash=file_hash,
file_size=record.size,
operator=operator,
status='success',
execution_time=execution_time
)
return Response({
'success': True,
'message': '已同步到数据库',
@@ -193,6 +220,19 @@ def sync_to_db(request):
})
except Exception as e:
execution_time = time.time() - start_time
# 记录失败日志
audit_logger.log_sync_action(
lobster_id=lobster_id,
file_path=file_path,
action='sync_to_db',
operator=operator,
status='failed',
error_message=str(e),
execution_time=execution_time
)
return Response({
'success': False,
'error': str(e)
@@ -202,10 +242,11 @@ def sync_to_db(request):
@api_view(['POST'])
def sync_to_local(request):
"""
同步到本地
同步到本地(带操作日志)
"""
lobster_id = request.data.get('lobster_id', 'daotong')
file_path = request.data.get('file_path')
operator = request.data.get('operator', 'system')
if not file_path:
return Response({
@@ -214,6 +255,9 @@ def sync_to_local(request):
}, status=status.HTTP_400_BAD_REQUEST)
scanner = FileScanner()
audit_logger = AuditLogger()
start_time = time.time()
try:
# 从数据库获取最新版本
@@ -228,9 +272,32 @@ def sync_to_local(request):
'error': 'File not found in database'
}, status=status.HTTP_404_NOT_FOUND)
# 获取本地哈希(如果存在)
try:
local_content, local_hash = scanner.get_file_content(file_path)
except FileNotFoundError:
local_hash = None
# 写入本地文件
scanner.write_file(file_path, db_record.content)
execution_time = time.time() - start_time
# 记录操作日志
audit_logger.log_sync_action(
lobster_id=lobster_id,
file_path=file_path,
action='sync_to_local',
old_version=None,
new_version=db_record.version,
old_hash=local_hash,
new_hash=db_record.hash,
file_size=db_record.size,
operator=operator,
status='success',
execution_time=execution_time
)
return Response({
'success': True,
'message': '已同步到本地',
@@ -238,6 +305,19 @@ def sync_to_local(request):
})
except Exception as e:
execution_time = time.time() - start_time
# 记录失败日志
audit_logger.log_sync_action(
lobster_id=lobster_id,
file_path=file_path,
action='sync_to_local',
operator=operator,
status='failed',
error_message=str(e),
execution_time=execution_time
)
return Response({
'success': False,
'error': str(e)
@@ -300,4 +380,69 @@ def get_stats(request):
'total_size': total_size,
'total_size_mb': round(total_size / 1024 / 1024, 2)
}
})
@api_view(['GET'])
def get_history(request):
"""
获取操作历史
"""
lobster_id = request.query_params.get('lobster_id', 'daotong')
file_path = request.query_params.get('file_path')
action = request.query_params.get('action')
limit = int(request.query_params.get('limit', 100))
audit_logger = AuditLogger()
history = audit_logger.get_history(
lobster_id=lobster_id,
file_path=file_path,
action=action,
limit=limit
)
return Response({
'success': True,
'data': history,
'total': len(history)
})
@api_view(['GET'])
def get_ignore_patterns(request):
"""
获取 .lobsterignore 模式列表
"""
lobster_id = request.query_params.get('lobster_id', 'daotong')
scanner = FileScanner()
patterns = scanner.ignore.patterns
return Response({
'success': True,
'data': {
'patterns': patterns,
'total': len(patterns)
}
})
@api_view(['POST'])
def reload_ignore_patterns(request):
"""
重新加载 .lobsterignore 模式
"""
lobster_id = request.data.get('lobster_id', 'daotong')
scanner = FileScanner()
# 重新加载忽略规则
scanner.ignore.load_patterns()
return Response({
'success': True,
'message': '已重新加载忽略规则',
'data': {
'patterns': scanner.ignore.patterns,
'total': len(scanner.ignore.patterns)
}
})