Files
道童 077656a6cf feat: 添加三个重要补丁
补丁 1: 分块读取与流式传输
- 8KB 分块读取大文件,避免内存飙升
- 流式计算文件哈希,无需加载完整内容
- 差异对比限制,大文件只显示头尾各 500 行
- 新增 chunked 参数支持流式传输

补丁 2: .lobsterignore 机制
- 创建 IgnorePattern 类实现模式匹配
- 支持 .lobsterignore 文件配置
- 添加默认忽略规则(.DS_Store, node_modules 等)
- 支持通配符匹配(*, ?, 目录匹配)
- 新增 API: GET /api/ignore/patterns/, POST /api/ignore/reload/

补丁 3: 操作溯源(Audit Log)
- 新增 SyncHistory 模型记录同步历史
- 创建 AuditLogger 类用于记录操作
- 所有同步操作自动记录日志
- 记录操作者、版本变化、哈希变化、执行时间等
- 新增 API: GET /api/history/

更新内容:
- models.py: 新增 SyncHistory 模型
- services.py: 新增 IgnorePattern, AuditLogger, 分块读取方法
- views.py: 所有同步操作添加日志记录, 新增历史和忽略规则接口
- serializers.py: 新增 SyncHistorySerializer
- urls.py: 新增历史和忽略规则路由
- .lobsterignore.example: 示例忽略文件
- CHANGELOG.md: 详细更新日志
2026-04-05 12:20:57 +00:00

448 lines
12 KiB
Python

from rest_framework.decorators import api_view
from rest_framework.response import Response
from rest_framework import status
from .models import LobsterMemory
from .serializers import LobsterMemorySerializer, FileDiffSerializer
from .services import FileScanner, DiffChecker, AuditLogger
import json
import time
@api_view(['GET'])
def scan_files(request):
"""
扫描本地文件
"""
lobster_id = request.query_params.get('lobster_id', 'daotong')
scanner = FileScanner()
files = scanner.scan_directory(lobster_id)
return Response({
'success': True,
'data': files,
'total': len(files)
})
@api_view(['GET'])
def get_file_tree(request):
"""
获取文件树
"""
lobster_id = request.query_params.get('lobster_id', 'daotong')
scanner = FileScanner()
tree = scanner.get_file_tree(lobster_id)
return Response({
'success': True,
'data': tree
})
@api_view(['GET'])
def check_sync_status(request):
"""
检查同步状态
"""
lobster_id = request.query_params.get('lobster_id', 'daotong')
# 获取本地文件
scanner = FileScanner()
local_files = scanner.scan_directory(lobster_id)
# 获取数据库文件
db_files = list(LobsterMemory.objects.filter(
lobster_id=lobster_id
).values('file_path', 'hash', 'version', 'updated_at'))
# 检查同步状态
checker = DiffChecker()
sync_status = checker.check_sync_status(local_files, db_files)
return Response({
'success': True,
'data': sync_status
})
@api_view(['GET'])
def get_file_diff(request):
"""
获取文件差异(支持大文件优化)
"""
file_path = request.query_params.get('file_path')
lobster_id = request.query_params.get('lobster_id', 'daotong')
chunked = request.query_params.get('chunked', 'false').lower() == 'true'
if not file_path:
return Response({
'success': False,
'error': 'file_path is required'
}, status=status.HTTP_400_BAD_REQUEST)
scanner = FileScanner()
# 获取本地内容(支持分块读取)
try:
local_content, local_hash = scanner.get_file_content(file_path, chunked=chunked)
except FileNotFoundError:
local_content = None
local_hash = None
# 获取数据库内容
try:
db_record = LobsterMemory.objects.filter(
lobster_id=lobster_id,
file_path=file_path
).order_by('-version').first()
if db_record:
db_content = db_record.content
db_hash = db_record.hash
else:
db_content = None
db_hash = None
except Exception as e:
return Response({
'success': False,
'error': str(e)
}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
# 获取差异(支持大文件限制)
checker = DiffChecker()
if local_content and db_content:
diff = checker.get_file_diff(local_content, db_content)
else:
diff = {
'local_lines': local_content.split('\n') if local_content else [],
'db_lines': db_content.split('\n') if db_content else [],
'has_diff': local_content != db_content,
'is_truncated': False
}
# 确定状态
if local_hash == db_hash:
sync_status = 'consistent'
elif local_hash and not db_hash:
sync_status = 'local_newer'
elif not local_hash and db_hash:
sync_status = 'db_newer'
else:
sync_status = 'conflict'
return Response({
'success': True,
'data': {
'file_path': file_path,
'lobster_id': lobster_id,
'local_content': local_content,
'db_content': db_content,
'local_hash': local_hash,
'db_hash': db_hash,
'status': sync_status,
'diff': diff
}
})
@api_view(['POST'])
def sync_to_db(request):
"""
同步到数据库(带操作日志)
"""
lobster_id = request.data.get('lobster_id', 'daotong')
file_path = request.data.get('file_path')
operator = request.data.get('operator', 'system')
if not file_path:
return Response({
'success': False,
'error': 'file_path is required'
}, status=status.HTTP_400_BAD_REQUEST)
scanner = FileScanner()
audit_logger = AuditLogger()
start_time = time.time()
try:
# 读取本地文件
content, file_hash = scanner.get_file_content(file_path)
# 查找现有记录
existing = LobsterMemory.objects.filter(
lobster_id=lobster_id,
file_path=file_path
).order_by('-version').first()
old_version = existing.version if existing else None
old_hash = existing.hash if existing else None
if existing:
# 创建新版本
new_version = existing.version + 1
else:
new_version = 1
# 创建新记录
record = LobsterMemory.objects.create(
lobster_id=lobster_id,
file_path=file_path,
content=content,
hash=file_hash,
status='consistent',
version=new_version,
)
execution_time = time.time() - start_time
# 记录操作日志
audit_logger.log_sync_action(
lobster_id=lobster_id,
file_path=file_path,
action='sync_to_db',
old_version=old_version,
new_version=new_version,
old_hash=old_hash,
new_hash=file_hash,
file_size=record.size,
operator=operator,
status='success',
execution_time=execution_time
)
return Response({
'success': True,
'message': '已同步到数据库',
'data': LobsterMemorySerializer(record).data
})
except Exception as e:
execution_time = time.time() - start_time
# 记录失败日志
audit_logger.log_sync_action(
lobster_id=lobster_id,
file_path=file_path,
action='sync_to_db',
operator=operator,
status='failed',
error_message=str(e),
execution_time=execution_time
)
return Response({
'success': False,
'error': str(e)
}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
@api_view(['POST'])
def sync_to_local(request):
"""
同步到本地(带操作日志)
"""
lobster_id = request.data.get('lobster_id', 'daotong')
file_path = request.data.get('file_path')
operator = request.data.get('operator', 'system')
if not file_path:
return Response({
'success': False,
'error': 'file_path is required'
}, status=status.HTTP_400_BAD_REQUEST)
scanner = FileScanner()
audit_logger = AuditLogger()
start_time = time.time()
try:
# 从数据库获取最新版本
db_record = LobsterMemory.objects.filter(
lobster_id=lobster_id,
file_path=file_path
).order_by('-version').first()
if not db_record:
return Response({
'success': False,
'error': 'File not found in database'
}, status=status.HTTP_404_NOT_FOUND)
# 获取本地哈希(如果存在)
try:
local_content, local_hash = scanner.get_file_content(file_path)
except FileNotFoundError:
local_hash = None
# 写入本地文件
scanner.write_file(file_path, db_record.content)
execution_time = time.time() - start_time
# 记录操作日志
audit_logger.log_sync_action(
lobster_id=lobster_id,
file_path=file_path,
action='sync_to_local',
old_version=None,
new_version=db_record.version,
old_hash=local_hash,
new_hash=db_record.hash,
file_size=db_record.size,
operator=operator,
status='success',
execution_time=execution_time
)
return Response({
'success': True,
'message': '已同步到本地',
'data': LobsterMemorySerializer(db_record).data
})
except Exception as e:
execution_time = time.time() - start_time
# 记录失败日志
audit_logger.log_sync_action(
lobster_id=lobster_id,
file_path=file_path,
action='sync_to_local',
operator=operator,
status='failed',
error_message=str(e),
execution_time=execution_time
)
return Response({
'success': False,
'error': str(e)
}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
@api_view(['GET'])
def get_versions(request):
"""
获取文件的所有版本
"""
file_path = request.query_params.get('file_path')
lobster_id = request.query_params.get('lobster_id', 'daotong')
if not file_path:
return Response({
'success': False,
'error': 'file_path is required'
}, status=status.HTTP_400_BAD_REQUEST)
versions = LobsterMemory.objects.filter(
lobster_id=lobster_id,
file_path=file_path
).order_by('-version')
return Response({
'success': True,
'data': LobsterMemorySerializer(versions, many=True).data
})
@api_view(['GET'])
def get_stats(request):
"""
获取统计信息
"""
lobster_id = request.query_params.get('lobster_id', 'daotong')
total_files = LobsterMemory.objects.filter(lobster_id=lobster_id).count()
status_counts = {}
for status_choice, _ in LobsterMemory.STATUS_CHOICES:
count = LobsterMemory.objects.filter(
lobster_id=lobster_id,
status=status_choice
).count()
status_counts[status_choice] = count
# 获取总大小
from django.db.models import Sum
total_size = LobsterMemory.objects.filter(
lobster_id=lobster_id
).aggregate(total=Sum('size'))['total'] or 0
return Response({
'success': True,
'data': {
'total_files': total_files,
'status_counts': status_counts,
'total_size': total_size,
'total_size_mb': round(total_size / 1024 / 1024, 2)
}
})
@api_view(['GET'])
def get_history(request):
"""
获取操作历史
"""
lobster_id = request.query_params.get('lobster_id', 'daotong')
file_path = request.query_params.get('file_path')
action = request.query_params.get('action')
limit = int(request.query_params.get('limit', 100))
audit_logger = AuditLogger()
history = audit_logger.get_history(
lobster_id=lobster_id,
file_path=file_path,
action=action,
limit=limit
)
return Response({
'success': True,
'data': history,
'total': len(history)
})
@api_view(['GET'])
def get_ignore_patterns(request):
"""
获取 .lobsterignore 模式列表
"""
lobster_id = request.query_params.get('lobster_id', 'daotong')
scanner = FileScanner()
patterns = scanner.ignore.patterns
return Response({
'success': True,
'data': {
'patterns': patterns,
'total': len(patterns)
}
})
@api_view(['POST'])
def reload_ignore_patterns(request):
"""
重新加载 .lobsterignore 模式
"""
lobster_id = request.data.get('lobster_id', 'daotong')
scanner = FileScanner()
# 重新加载忽略规则
scanner.ignore.load_patterns()
return Response({
'success': True,
'message': '已重新加载忽略规则',
'data': {
'patterns': scanner.ignore.patterns,
'total': len(scanner.ignore.patterns)
}
})