Files
道童 077656a6cf feat: 添加三个重要补丁
补丁 1: 分块读取与流式传输
- 8KB 分块读取大文件,避免内存飙升
- 流式计算文件哈希,无需加载完整内容
- 差异对比限制,大文件只显示头尾各 500 行
- 新增 chunked 参数支持流式传输

补丁 2: .lobsterignore 机制
- 创建 IgnorePattern 类实现模式匹配
- 支持 .lobsterignore 文件配置
- 添加默认忽略规则(.DS_Store, node_modules 等)
- 支持通配符匹配(*, ?, 目录匹配)
- 新增 API: GET /api/ignore/patterns/, POST /api/ignore/reload/

补丁 3: 操作溯源(Audit Log)
- 新增 SyncHistory 模型记录同步历史
- 创建 AuditLogger 类用于记录操作
- 所有同步操作自动记录日志
- 记录操作者、版本变化、哈希变化、执行时间等
- 新增 API: GET /api/history/

更新内容:
- models.py: 新增 SyncHistory 模型
- services.py: 新增 IgnorePattern, AuditLogger, 分块读取方法
- views.py: 所有同步操作添加日志记录, 新增历史和忽略规则接口
- serializers.py: 新增 SyncHistorySerializer
- urls.py: 新增历史和忽略规则路由
- .lobsterignore.example: 示例忽略文件
- CHANGELOG.md: 详细更新日志
2026-04-05 12:20:57 +00:00

125 lines
3.9 KiB
Python

from django.db import models
from django.core.validators import FileExtensionValidator
import hashlib
class LobsterMemory(models.Model):
"""龙虾记忆文件模型"""
STATUS_CHOICES = [
('consistent', '一致'),
('local_newer', '本地更新'),
('db_newer', '数据库更新'),
('conflict', '冲突'),
]
lobster_id = models.CharField(max_length=50, help_text='龙虾ID')
file_path = models.CharField(max_length=500, help_text='文件相对路径')
content = models.TextField(help_text='文件内容')
hash = models.CharField(max_length=64, help_text='SHA256哈希')
status = models.CharField(
max_length=20,
choices=STATUS_CHOICES,
default='consistent',
help_text='同步状态'
)
version = models.IntegerField(default=1, help_text='版本号')
size = models.IntegerField(default=0, help_text='文件大小(字节)')
created_at = models.DateTimeField(auto_now_add=True, help_text='创建时间')
updated_at = models.DateTimeField(auto_now=True, help_text='更新时间')
class Meta:
db_table = 'lobster_memory'
unique_together = ('lobster_id', 'file_path', 'version')
ordering = ['-updated_at']
indexes = [
models.Index(fields=['lobster_id', 'file_path']),
models.Index(fields=['status']),
models.Index(fields=['updated_at']),
]
def __str__(self):
return f"{self.lobster_id}/{self.file_path} (v{self.version})"
def compute_hash(self, content):
"""计算SHA256哈希"""
return hashlib.sha256(content.encode('utf-8')).hexdigest()
def save(self, *args, **kwargs):
"""保存时自动计算哈希和大小"""
if self.content:
self.hash = self.compute_hash(self.content)
self.size = len(self.content.encode('utf-8'))
super().save(*args, **kwargs)
class SyncHistory(models.Model):
"""同步操作历史记录"""
ACTION_CHOICES = [
('sync_to_db', '同步到数据库'),
('sync_to_local', '同步到本地'),
('auto_sync', '自动同步'),
('manual_merge', '手动合并'),
]
STATUS_CHOICES = [
('success', '成功'),
('failed', '失败'),
('partial', '部分成功'),
]
lobster_id = models.CharField(max_length=50, help_text='龙虾ID')
file_path = models.CharField(max_length=500, help_text='文件相对路径')
action = models.CharField(
max_length=20,
choices=ACTION_CHOICES,
help_text='操作类型'
)
status = models.CharField(
max_length=20,
choices=STATUS_CHOICES,
help_text='操作状态'
)
old_version = models.IntegerField(null=True, blank=True, help_text='操作前版本')
new_version = models.IntegerField(null=True, blank=True, help_text='操作后版本')
old_hash = models.CharField(max_length=64, null=True, blank=True, help_text='操作前哈希')
new_hash = models.CharField(max_length=64, null=True, blank=True, help_text='操作后哈希')
file_size = models.IntegerField(default=0, help_text='文件大小(字节)')
operator = models.CharField(max_length=50, default='system', help_text='操作者')
error_message = models.TextField(null=True, blank=True, help_text='错误信息')
execution_time = models.FloatField(default=0, help_text='执行时间(秒)')
created_at = models.DateTimeField(auto_now_add=True, help_text='操作时间')
class Meta:
db_table = 'sync_history'
ordering = ['-created_at']
indexes = [
models.Index(fields=['lobster_id', 'file_path']),
models.Index(fields=['action']),
models.Index(fields=['status']),
models.Index(fields=['created_at']),
]
def __str__(self):
return f"{self.action} - {self.lobster_id}/{self.file_path} ({self.status})"