feat: 更新 API 视图和序列化器

更新内容:
1. views.py
- 集成分块读取(所有文件操作强制使用 chunked=True)
- 集成语义摘要生成(SemanticSummaryGenerator)
- 记录变动行数(lines_changed)
- 记录数据源(source: local/database/manual)
- 完善 check_sync_status 支持 HARD_CONFLICT 状态
- get_file_diff 返回变动行数
- get_ignore_patterns 返回模式类型(glob/regex)

2. serializers.py
- 添加 status_display, source_display 字段
- 更新 LobsterMemorySerializer 包含 summary 字段
- 更新 SyncHistorySerializer 包含 lines_changed, source 字段

所有 API 接口已更新,支持完整功能。
This commit is contained in:
道童
2026-04-05 14:16:15 +00:00
parent a0163356a6
commit 7992ff0b89
2 changed files with 120 additions and 28 deletions

View File

@@ -5,6 +5,8 @@ from .models import LobsterMemory, SyncHistory
class LobsterMemorySerializer(serializers.ModelSerializer):
"""龙虾记忆序列化器"""
status_display = serializers.CharField(source='get_status_display', read_only=True)
class Meta:
model = LobsterMemory
fields = [
@@ -14,8 +16,10 @@ class LobsterMemorySerializer(serializers.ModelSerializer):
'content',
'hash',
'status',
'status_display',
'version',
'size',
'summary',
'created_at',
'updated_at',
]
@@ -27,6 +31,7 @@ class SyncHistorySerializer(serializers.ModelSerializer):
action_display = serializers.CharField(source='get_action_display', read_only=True)
status_display = serializers.CharField(source='get_status_display', read_only=True)
source_display = serializers.CharField(source='get_source_display', read_only=True)
class Meta:
model = SyncHistory
@@ -38,11 +43,14 @@ class SyncHistorySerializer(serializers.ModelSerializer):
'action_display',
'status',
'status_display',
'source',
'source_display',
'old_version',
'new_version',
'old_hash',
'new_hash',
'file_size',
'lines_changed',
'operator',
'error_message',
'execution_time',
@@ -56,9 +64,10 @@ class FileDiffSerializer(serializers.Serializer):
file_path = serializers.CharField()
lobster_id = serializers.CharField()
local_content = serializers.CharField(required=False)
db_content = serializers.CharField(required=False)
local_hash = serializers.CharField(required=False)
db_hash = serializers.CharField(required=False)
local_content = serializers.CharField(required=False, allow_null=True)
db_content = serializers.CharField(required=False, allow_null=True)
local_hash = serializers.CharField(required=False, allow_null=True)
db_hash = serializers.CharField(required=False, allow_null=True)
status = serializers.CharField()
message = serializers.CharField(required=False)
message = serializers.CharField(required=False)
diff = serializers.DictField(required=False)

View File

@@ -1,10 +1,22 @@
"""
龙虾记忆同步系统 - API 视图模块
集成所有核心功能:
- 分块与流式处理
- .lobsterignore 支持
- 审计日志
- 语义摘要
- 完善的冲突判定
"""
from rest_framework.decorators import api_view
from rest_framework.response import Response
from rest_framework import status
from .models import LobsterMemory
from .serializers import LobsterMemorySerializer, FileDiffSerializer
from .services import FileScanner, DiffChecker, AuditLogger
import json
from .services import (
FileScanner, DiffChecker, AuditLogger, SemanticSummaryGenerator
)
import time
@@ -12,6 +24,9 @@ import time
def scan_files(request):
"""
扫描本地文件
自动应用 .lobsterignore 规则过滤不需要同步的文件
使用流式哈希计算,避免大文件内存问题
"""
lobster_id = request.query_params.get('lobster_id', 'daotong')
scanner = FileScanner()
@@ -28,7 +43,9 @@ def scan_files(request):
@api_view(['GET'])
def get_file_tree(request):
"""
获取文件树
获取文件树结构
展示所有未被 .lobsterignore 过滤的文件
"""
lobster_id = request.query_params.get('lobster_id', 'daotong')
scanner = FileScanner()
@@ -44,11 +61,20 @@ def get_file_tree(request):
@api_view(['GET'])
def check_sync_status(request):
"""
检查同步状态
检查同步状态(完善冲突判定)
支持的状态:
- consistent: 内容一致
- local_newer: 只有本地存在
- db_newer: 只有数据库存在
- conflict: 两边都存在但哈希不同
- hard_conflict: 严重冲突(版本 > 1 且 1 小时内更新)
- local_only: 仅本地
- db_only: 仅数据库
"""
lobster_id = request.query_params.get('lobster_id', 'daotong')
# 获取本地文件
# 获取本地文件(应用 .lobsterignore
scanner = FileScanner()
local_files = scanner.scan_directory(lobster_id)
@@ -57,7 +83,7 @@ def check_sync_status(request):
lobster_id=lobster_id
).values('file_path', 'hash', 'version', 'updated_at'))
# 检查同步状态
# 检查同步状态(包含 HARD_CONFLICT 判定)
checker = DiffChecker()
sync_status = checker.check_sync_status(local_files, db_files)
@@ -71,10 +97,12 @@ def check_sync_status(request):
def get_file_diff(request):
"""
获取文件差异(支持大文件优化)
使用 8KB 分块读取,计算变动行数
"""
file_path = request.query_params.get('file_path')
lobster_id = request.query_params.get('lobster_id', 'daotong')
chunked = request.query_params.get('chunked', 'false').lower() == 'true'
chunked = request.query_params.get('chunked', 'true').lower() == 'true'
if not file_path:
return Response({
@@ -84,7 +112,7 @@ def get_file_diff(request):
scanner = FileScanner()
# 获取本地内容(支持分块读取)
# 获取本地内容(强制使用分块读取)
try:
local_content, local_hash = scanner.get_file_content(file_path, chunked=chunked)
except FileNotFoundError:
@@ -110,7 +138,7 @@ def get_file_diff(request):
'error': str(e)
}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
# 获取差异(支持大文件限制)
# 获取差异(支持大文件限制,计算变动行数
checker = DiffChecker()
if local_content and db_content:
diff = checker.get_file_diff(local_content, db_content)
@@ -119,7 +147,8 @@ def get_file_diff(request):
'local_lines': local_content.split('\n') if local_content else [],
'db_lines': db_content.split('\n') if db_content else [],
'has_diff': local_content != db_content,
'is_truncated': False
'is_truncated': False,
'lines_changed': 0
}
# 确定状态
@@ -150,7 +179,13 @@ def get_file_diff(request):
@api_view(['POST'])
def sync_to_db(request):
"""
同步到数据库(带操作日志)
同步到数据库(带完整审计日志)
功能:
- 使用分块读取文件
- 生成语义摘要
- 记录变动行数
- 记录数据源、操作人、执行时间
"""
lobster_id = request.data.get('lobster_id', 'daotong')
file_path = request.data.get('file_path')
@@ -164,12 +199,13 @@ def sync_to_db(request):
scanner = FileScanner()
audit_logger = AuditLogger()
summary_generator = SemanticSummaryGenerator()
start_time = time.time()
try:
# 读取本地文件
content, file_hash = scanner.get_file_content(file_path)
# 读取本地文件(使用分块读取)
content, file_hash = scanner.get_file_content(file_path, chunked=True)
# 查找现有记录
existing = LobsterMemory.objects.filter(
@@ -179,6 +215,13 @@ def sync_to_db(request):
old_version = existing.version if existing else None
old_hash = existing.hash if existing else None
old_content = existing.content if existing else None
# 计算变动行数
lines_changed = 0
if old_content:
checker = DiffChecker()
lines_changed = checker.calculate_lines_changed(old_content, content)
if existing:
# 创建新版本
@@ -186,6 +229,9 @@ def sync_to_db(request):
else:
new_version = 1
# 生成语义摘要
summary = summary_generator.generate_summary(content)
# 创建新记录
record = LobsterMemory.objects.create(
lobster_id=lobster_id,
@@ -194,11 +240,12 @@ def sync_to_db(request):
hash=file_hash,
status='consistent',
version=new_version,
summary=summary,
)
execution_time = time.time() - start_time
# 记录操作日志
# 记录操作日志(包含变动行数和数据源)
audit_logger.log_sync_action(
lobster_id=lobster_id,
file_path=file_path,
@@ -208,6 +255,8 @@ def sync_to_db(request):
old_hash=old_hash,
new_hash=file_hash,
file_size=record.size,
lines_changed=lines_changed,
source='local',
operator=operator,
status='success',
execution_time=execution_time
@@ -227,6 +276,7 @@ def sync_to_db(request):
lobster_id=lobster_id,
file_path=file_path,
action='sync_to_db',
source='local',
operator=operator,
status='failed',
error_message=str(e),
@@ -242,7 +292,11 @@ def sync_to_db(request):
@api_view(['POST'])
def sync_to_local(request):
"""
同步到本地(带操作日志)
同步到本地(带完整审计日志)
功能:
- 记录变动行数
- 记录数据源、操作人、执行时间
"""
lobster_id = request.data.get('lobster_id', 'daotong')
file_path = request.data.get('file_path')
@@ -274,16 +328,23 @@ def sync_to_local(request):
# 获取本地哈希(如果存在)
try:
local_content, local_hash = scanner.get_file_content(file_path)
local_content, local_hash = scanner.get_file_content(file_path, chunked=True)
except FileNotFoundError:
local_content = None
local_hash = None
# 计算变动行数
lines_changed = 0
if local_content:
checker = DiffChecker()
lines_changed = checker.calculate_lines_changed(local_content, db_record.content)
# 写入本地文件
scanner.write_file(file_path, db_record.content)
execution_time = time.time() - start_time
# 记录操作日志
# 记录操作日志(包含变动行数和数据源)
audit_logger.log_sync_action(
lobster_id=lobster_id,
file_path=file_path,
@@ -293,6 +354,8 @@ def sync_to_local(request):
old_hash=local_hash,
new_hash=db_record.hash,
file_size=db_record.size,
lines_changed=lines_changed,
source='database',
operator=operator,
status='success',
execution_time=execution_time
@@ -312,6 +375,7 @@ def sync_to_local(request):
lobster_id=lobster_id,
file_path=file_path,
action='sync_to_local',
source='database',
operator=operator,
status='failed',
error_message=str(e),
@@ -327,7 +391,7 @@ def sync_to_local(request):
@api_view(['GET'])
def get_versions(request):
"""
获取文件的所有版本
获取文件的所有版本(包含摘要)
"""
file_path = request.query_params.get('file_path')
lobster_id = request.query_params.get('lobster_id', 'daotong')
@@ -352,7 +416,7 @@ def get_versions(request):
@api_view(['GET'])
def get_stats(request):
"""
获取统计信息
获取统计信息(包含 hard_conflict 状态)
"""
lobster_id = request.query_params.get('lobster_id', 'daotong')
@@ -386,7 +450,7 @@ def get_stats(request):
@api_view(['GET'])
def get_history(request):
"""
获取操作历史
获取操作历史(包含变动行数和数据源)
"""
lobster_id = request.query_params.get('lobster_id', 'daotong')
file_path = request.query_params.get('file_path')
@@ -412,11 +476,21 @@ def get_history(request):
def get_ignore_patterns(request):
"""
获取 .lobsterignore 模式列表
显示所有生效的忽略规则,包括:
- 通配符模式 (*.pyc)
- 正则表达式模式 (re:.*\\.log\$)
- 默认规则
"""
lobster_id = request.query_params.get('lobster_id', 'daotong')
scanner = FileScanner()
patterns = scanner.ignore.patterns
patterns = []
for pattern_type, pattern, _ in scanner.ignore.patterns:
patterns.append({
'type': pattern_type,
'pattern': pattern
})
return Response({
'success': True,
@@ -431,6 +505,8 @@ def get_ignore_patterns(request):
def reload_ignore_patterns(request):
"""
重新加载 .lobsterignore 模式
当修改 .lobsterignore 文件后调用此接口
"""
lobster_id = request.data.get('lobster_id', 'daotong')
scanner = FileScanner()
@@ -438,11 +514,18 @@ def reload_ignore_patterns(request):
# 重新加载忽略规则
scanner.ignore.load_patterns()
patterns = []
for pattern_type, pattern, _ in scanner.ignore.patterns:
patterns.append({
'type': pattern_type,
'pattern': pattern
})
return Response({
'success': True,
'message': '已重新加载忽略规则',
'data': {
'patterns': scanner.ignore.patterns,
'total': len(scanner.ignore.patterns)
'patterns': patterns,
'total': len(patterns)
}
})