v0.1.0: CRM/ERP 系统内测版本 - 安全加固完成
- Docker bridge 网络隔离(8000 端口封死) - Gunicorn 4 Worker 多进程 - Alembic 数据库迁移基线 - 日志轮转 20m×3 - JWT 密钥 + DB 密码 + CORS 收紧 - 3-2-1 备份链路(NAS + R740-B 冷备) - 连接池 pool_pre_ping + pool_recycle=3600
This commit is contained in:
@@ -0,0 +1,354 @@
|
||||
"""
|
||||
AI 复盘报告路由 —— /api/reports
|
||||
- POST /generate: SSE 流式生成复盘报告
|
||||
- POST /confirm: 确认存档报告
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import uuid
|
||||
from datetime import date, datetime
|
||||
|
||||
from fastapi import APIRouter, Body, Depends, Header, Query
|
||||
from fastapi.responses import StreamingResponse
|
||||
from sqlalchemy import and_, select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.api.deps import get_current_user
|
||||
from app.db.database import get_db
|
||||
from app.schemas.auth import CurrentUserPayload
|
||||
from app.schemas.response import ok
|
||||
|
||||
router = APIRouter(prefix="/reports", tags=["AI 复盘报告"])
|
||||
|
||||
|
||||
@router.post("/generate", summary="SSE 流式生成复盘报告")
|
||||
async def generate_report(
|
||||
start_date: date = Body(..., embed=True),
|
||||
end_date: date = Body(..., embed=True),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
current_user: CurrentUserPayload = Depends(get_current_user),
|
||||
authorization: str | None = Header(None),
|
||||
):
|
||||
"""
|
||||
1. 聚合该用户在时间范围内的 sales_logs 内容
|
||||
2. 调用 Dify Workflow (streaming) 生成复盘报告
|
||||
3. SSE 流式返回给前端
|
||||
"""
|
||||
return StreamingResponse(
|
||||
_report_sse_generator(db, current_user, start_date, end_date, authorization or ""),
|
||||
media_type="text/event-stream",
|
||||
)
|
||||
|
||||
|
||||
async def _report_sse_generator(
|
||||
db: AsyncSession,
|
||||
user: CurrentUserPayload,
|
||||
start_date: date,
|
||||
end_date: date,
|
||||
authorization: str = "",
|
||||
):
|
||||
import httpx
|
||||
from app.core.config import settings
|
||||
from app.models.ai import SalesLog
|
||||
|
||||
# 1. 聚合日志
|
||||
stmt = (
|
||||
select(SalesLog)
|
||||
.where(
|
||||
SalesLog.salesperson_id == user.user_id,
|
||||
SalesLog.log_date >= start_date,
|
||||
SalesLog.log_date <= end_date,
|
||||
SalesLog.is_deleted.is_(False),
|
||||
)
|
||||
.order_by(SalesLog.log_date)
|
||||
)
|
||||
logs = (await db.execute(stmt)).scalars().all()
|
||||
|
||||
if not logs:
|
||||
yield f"data: {json.dumps({'type': 'text', 'content': '⚠️ 该时间段内暂无销售日志数据,无法生成复盘报告。'}, ensure_ascii=False)}\n\n"
|
||||
yield f"data: {json.dumps({'type': 'done'}, ensure_ascii=False)}\n\n"
|
||||
return
|
||||
|
||||
# 拼接日志摘要
|
||||
log_summary = "\n".join([
|
||||
f"[{log.log_date}] {log.content}" for log in logs
|
||||
])
|
||||
|
||||
yield f"data: {json.dumps({'type': 'text', 'content': f'📊 找到 {len(logs)} 条日志,正在调用 AI 生成复盘报告...\\n\\n'}, ensure_ascii=False)}\n\n"
|
||||
|
||||
# 2. 调用 Dify Workflow
|
||||
if not settings.DIFY_WORKFLOW_REPORT_KEY or not settings.DIFY_API_BASE_URL:
|
||||
yield f"data: {json.dumps({'type': 'text', 'content': '⚠️ 周报 Workflow 未配置,请联系管理员设置 DIFY_WORKFLOW_REPORT_KEY。'}, ensure_ascii=False)}\n\n"
|
||||
yield f"data: {json.dumps({'type': 'done'}, ensure_ascii=False)}\n\n"
|
||||
return
|
||||
|
||||
url = f"{settings.DIFY_API_BASE_URL}/v1/workflows/run"
|
||||
headers = {
|
||||
"Authorization": f"Bearer {settings.DIFY_WORKFLOW_REPORT_KEY}",
|
||||
"Content-Type": "application/json",
|
||||
}
|
||||
payload = {
|
||||
"inputs": {
|
||||
"user_id": str(user.user_id),
|
||||
"user_name": user.real_name or user.username,
|
||||
"period_start": start_date.isoformat(),
|
||||
"period_end": end_date.isoformat(),
|
||||
"report_type": "monthly",
|
||||
"sales_logs": log_summary,
|
||||
"request": f"请基于以上 {len(logs)} 条销售日志,生成 {start_date} 至 {end_date} 的复盘报告。",
|
||||
"authorization": authorization,
|
||||
},
|
||||
"response_mode": "streaming",
|
||||
"user": str(user.user_id),
|
||||
}
|
||||
|
||||
print(f"[Report SSE] 开始调用 Dify: {url}")
|
||||
print(f"[Report SSE] payload inputs keys: {list(payload['inputs'].keys())}")
|
||||
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=httpx.Timeout(600.0, connect=30.0)) as client:
|
||||
async with client.stream("POST", url, json=payload, headers=headers) as resp:
|
||||
print(f"[Report SSE] Dify 响应状态: {resp.status_code}")
|
||||
if resp.status_code != 200:
|
||||
error_text = ""
|
||||
async for chunk in resp.aiter_text():
|
||||
error_text += chunk
|
||||
print(f"[Report SSE] Dify 错误: {error_text[:500]}")
|
||||
if resp.status_code in (401, 403):
|
||||
yield f"data: {json.dumps({'type': 'text', 'content': '⚠️ Dify API Key 无效或已过期 (HTTP {}), 请在系统设置中检查 DIFY_WORKFLOW_REPORT_KEY 配置。'.format(resp.status_code)}, ensure_ascii=False)}\n\n"
|
||||
else:
|
||||
yield f"data: {json.dumps({'type': 'text', 'content': f'⚠️ Dify 返回错误 ({resp.status_code}): {error_text[:200]}'}, ensure_ascii=False)}\n\n"
|
||||
yield f"data: {json.dumps({'type': 'done'}, ensure_ascii=False)}\n\n"
|
||||
return
|
||||
|
||||
buf = ""
|
||||
chunk_count = 0
|
||||
async for chunk in resp.aiter_text():
|
||||
chunk_count += 1
|
||||
buf += chunk
|
||||
while "\n\n" in buf:
|
||||
event_block, buf = buf.split("\n\n", 1)
|
||||
data_line = ""
|
||||
for line in event_block.split("\n"):
|
||||
if line.startswith("data: "):
|
||||
data_line = line[6:]
|
||||
if not data_line or data_line.strip() == "[DONE]":
|
||||
continue
|
||||
try:
|
||||
event = json.loads(data_line)
|
||||
except json.JSONDecodeError:
|
||||
print(f"[Report SSE] JSON 解析失败: {data_line[:100]}")
|
||||
continue
|
||||
|
||||
event_type = event.get("event", "")
|
||||
# 打印所有事件的概要信息
|
||||
event_data = event.get("data", {})
|
||||
node_id = event_data.get("node_id", "")
|
||||
node_type = event_data.get("node_type", "")
|
||||
status = event_data.get("status", "")
|
||||
error_msg = event_data.get("error", "")
|
||||
print(f"[Report SSE] event={event_type} node_type={node_type} status={status} error={str(error_msg)[:200]}")
|
||||
|
||||
if event_type == "text_chunk":
|
||||
text = event.get("data", {}).get("text", "")
|
||||
if text:
|
||||
yield f"data: {json.dumps({'type': 'text', 'content': text}, ensure_ascii=False)}\n\n"
|
||||
|
||||
elif event_type == "node_finished":
|
||||
node_data = event.get("data", {})
|
||||
node_type = node_data.get("node_type", "")
|
||||
print(f"[Report SSE] node_finished: node_type={node_type}")
|
||||
# 捕获 LLM 节点的输出
|
||||
if node_type == "llm":
|
||||
llm_outputs = node_data.get("outputs", {})
|
||||
llm_text = llm_outputs.get("text", "")
|
||||
if llm_text:
|
||||
print(f"[Report SSE] LLM 节点输出: {len(llm_text)} 字符")
|
||||
yield f"data: {json.dumps({'type': 'text', 'content': llm_text}, ensure_ascii=False)}\n\n"
|
||||
|
||||
elif event_type == "workflow_finished":
|
||||
outputs = event.get("data", {}).get("outputs", {})
|
||||
print(f"[Report SSE] workflow_finished data keys: {list(event.get('data', {}).keys())}")
|
||||
print(f"[Report SSE] workflow_finished outputs keys: {list(outputs.keys())}")
|
||||
print(f"[Report SSE] workflow_finished outputs preview: {str(outputs)[:500]}")
|
||||
output_text = outputs.get("text", "") or outputs.get("output", "") or outputs.get("result", "")
|
||||
if not output_text:
|
||||
# 尝试取第一个非空值
|
||||
for v in outputs.values():
|
||||
if isinstance(v, str) and len(v) > 20:
|
||||
output_text = v
|
||||
break
|
||||
if output_text:
|
||||
yield f"data: {json.dumps({'type': 'text', 'content': output_text}, ensure_ascii=False)}\n\n"
|
||||
print(f"[Report SSE] Workflow 完成, output_text长度: {len(output_text)}")
|
||||
|
||||
print(f"[Report SSE] 流结束,共收到 {chunk_count} 个 chunk")
|
||||
|
||||
except httpx.TimeoutException:
|
||||
print(f"[Report SSE] 超时!")
|
||||
yield f"data: {json.dumps({'type': 'text', 'content': '\\n⚠️ Dify 响应超时(120秒),请稍后重试'}, ensure_ascii=False)}\n\n"
|
||||
except Exception as e:
|
||||
print(f"[Report SSE] 异常: {e!s}")
|
||||
yield f"data: {json.dumps({'type': 'text', 'content': f'\\n⚠️ 报告生成失败: {e!s}'}, ensure_ascii=False)}\n\n"
|
||||
|
||||
yield f"data: {json.dumps({'type': 'done'}, ensure_ascii=False)}\n\n"
|
||||
print(f"[Report SSE] SSE 流完全结束")
|
||||
|
||||
|
||||
@router.post("/confirm", summary="确认并存档复盘报告")
|
||||
async def confirm_report(
|
||||
start_date: date = Body(..., embed=True),
|
||||
end_date: date = Body(..., embed=True),
|
||||
content_md: str = Body(..., embed=True),
|
||||
report_type: str = Body("monthly", embed=True),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
current_user: CurrentUserPayload = Depends(get_current_user),
|
||||
) -> dict:
|
||||
from app.models.ai import AiReportDraft
|
||||
|
||||
report = AiReportDraft(
|
||||
author_id=current_user.user_id,
|
||||
report_type=report_type,
|
||||
period_start=start_date,
|
||||
period_end=end_date,
|
||||
content_md=content_md,
|
||||
status="confirmed",
|
||||
)
|
||||
db.add(report)
|
||||
await db.commit()
|
||||
await db.refresh(report)
|
||||
return ok(
|
||||
data={"id": str(report.id), "status": report.status},
|
||||
message="复盘报告已确认存档",
|
||||
)
|
||||
|
||||
|
||||
@router.get("/history", summary="查询复盘报告历史列表")
|
||||
async def list_reports(
|
||||
page: int = Query(1, ge=1),
|
||||
size: int = Query(20, ge=1, le=100),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
current_user: CurrentUserPayload = Depends(get_current_user),
|
||||
) -> dict:
|
||||
from sqlalchemy import func as sa_func, desc
|
||||
from app.models.ai import AiReportDraft
|
||||
|
||||
where = [
|
||||
AiReportDraft.author_id == current_user.user_id,
|
||||
AiReportDraft.is_deleted.is_(False),
|
||||
]
|
||||
|
||||
total = (
|
||||
await db.execute(select(sa_func.count()).select_from(AiReportDraft).where(*where))
|
||||
).scalar() or 0
|
||||
|
||||
stmt = (
|
||||
select(AiReportDraft)
|
||||
.where(*where)
|
||||
.order_by(desc(AiReportDraft.created_at))
|
||||
.offset((page - 1) * size)
|
||||
.limit(size)
|
||||
)
|
||||
rows = (await db.execute(stmt)).scalars().all()
|
||||
|
||||
items = [
|
||||
{
|
||||
"id": str(r.id),
|
||||
"report_type": r.report_type,
|
||||
"period_start": r.period_start.isoformat(),
|
||||
"period_end": r.period_end.isoformat(),
|
||||
"status": r.status,
|
||||
"content_md": r.content_md,
|
||||
"created_at": r.created_at.isoformat() if r.created_at else None,
|
||||
"updated_at": r.updated_at.isoformat() if r.updated_at else None,
|
||||
}
|
||||
for r in rows
|
||||
]
|
||||
return ok(data={"total": total, "items": items, "page": page, "size": size})
|
||||
|
||||
|
||||
@router.put("/{report_id}", summary="修改复盘报告内容")
|
||||
async def update_report(
|
||||
report_id: uuid.UUID,
|
||||
content_md: str = Body(..., embed=True),
|
||||
status: str = Body("confirmed", embed=True),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
current_user: CurrentUserPayload = Depends(get_current_user),
|
||||
) -> dict:
|
||||
from app.models.ai import AiReportDraft
|
||||
|
||||
report = (
|
||||
await db.execute(
|
||||
select(AiReportDraft).where(
|
||||
AiReportDraft.id == report_id,
|
||||
AiReportDraft.author_id == current_user.user_id,
|
||||
AiReportDraft.is_deleted.is_(False),
|
||||
)
|
||||
)
|
||||
).scalar_one_or_none()
|
||||
if report is None:
|
||||
from app.core.exceptions import NotFoundException
|
||||
raise NotFoundException("报告不存在")
|
||||
|
||||
report.content_md = content_md
|
||||
report.status = status
|
||||
await db.commit()
|
||||
return ok(message="报告已更新")
|
||||
|
||||
|
||||
@router.delete("/{report_id}", summary="删除复盘报告")
|
||||
async def delete_report(
|
||||
report_id: uuid.UUID,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
current_user: CurrentUserPayload = Depends(get_current_user),
|
||||
) -> dict:
|
||||
from app.models.ai import AiReportDraft
|
||||
|
||||
report = (
|
||||
await db.execute(
|
||||
select(AiReportDraft).where(
|
||||
AiReportDraft.id == report_id,
|
||||
AiReportDraft.author_id == current_user.user_id,
|
||||
AiReportDraft.is_deleted.is_(False),
|
||||
)
|
||||
)
|
||||
).scalar_one_or_none()
|
||||
if report is None:
|
||||
from app.core.exceptions import NotFoundException
|
||||
raise NotFoundException("报告不存在")
|
||||
|
||||
report.is_deleted = True
|
||||
await db.commit()
|
||||
return ok(message="报告已删除")
|
||||
|
||||
|
||||
@router.post("/drafts", summary="Dify Workflow 回调 — 接收 LLM 生成的复盘报告")
|
||||
async def receive_draft(
|
||||
author_id: uuid.UUID = Body(..., embed=True),
|
||||
report_type: str = Body("monthly", embed=True),
|
||||
period_start: date = Body(..., embed=True),
|
||||
period_end: date = Body(..., embed=True),
|
||||
content_md: str = Body(..., embed=True),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
) -> dict:
|
||||
"""供 Dify Workflow HTTP 请求 2 回调使用,无需 CRM 用户认证。"""
|
||||
from app.models.ai import AiReportDraft
|
||||
|
||||
report = AiReportDraft(
|
||||
author_id=author_id,
|
||||
report_type=report_type,
|
||||
period_start=period_start,
|
||||
period_end=period_end,
|
||||
content_md=content_md,
|
||||
status="confirmed",
|
||||
)
|
||||
db.add(report)
|
||||
await db.commit()
|
||||
await db.refresh(report)
|
||||
print(f"[Report Drafts] Dify 回调存储成功: {report.id}, 内容长度: {len(content_md)}")
|
||||
return ok(
|
||||
data={"id": str(report.id), "status": report.status},
|
||||
message="复盘报告已由 Dify Workflow 存档",
|
||||
)
|
||||
|
||||
Reference in New Issue
Block a user