423baff73b
- Docker bridge 网络隔离(8000 端口封死) - Gunicorn 4 Worker 多进程 - Alembic 数据库迁移基线 - 日志轮转 20m×3 - JWT 密钥 + DB 密码 + CORS 收紧 - 3-2-1 备份链路(NAS + R740-B 冷备) - 连接池 pool_pre_ping + pool_recycle=3600
165 lines
5.2 KiB
Python
165 lines
5.2 KiB
Python
"""
|
||
销售日志服务 — CRUD + Dify 工作流异步触发
|
||
"""
|
||
from __future__ import annotations
|
||
|
||
import uuid
|
||
from datetime import date
|
||
from typing import Any
|
||
|
||
import httpx
|
||
from sqlalchemy import select, func, desc, and_
|
||
from sqlalchemy.ext.asyncio import AsyncSession
|
||
|
||
from app.models.ai import SalesLog
|
||
from app.schemas.auth import CurrentUserPayload
|
||
|
||
|
||
async def create_log(
|
||
db: AsyncSession,
|
||
user: CurrentUserPayload,
|
||
content: str,
|
||
customer_id: str | None = None,
|
||
contact_ids: list[str] | None = None,
|
||
log_date: date | None = None,
|
||
) -> dict:
|
||
"""创建销售日志"""
|
||
log = SalesLog(
|
||
salesperson_id=user.user_id,
|
||
customer_id=uuid.UUID(customer_id) if customer_id else None,
|
||
contact_ids=contact_ids or [],
|
||
content=content,
|
||
log_date=log_date or date.today(),
|
||
)
|
||
db.add(log)
|
||
await db.commit()
|
||
await db.refresh(log)
|
||
return _to_dict(log)
|
||
|
||
|
||
async def list_logs(
|
||
db: AsyncSession,
|
||
user: CurrentUserPayload,
|
||
page: int = 1,
|
||
size: int = 20,
|
||
customer_id: str | None = None,
|
||
user_id: str | None = None,
|
||
start_date: str | None = None,
|
||
end_date: str | None = None,
|
||
) -> dict:
|
||
"""查询销售日志列表"""
|
||
conditions = [SalesLog.is_deleted.is_(False)]
|
||
|
||
# 数据权限
|
||
if user.data_scope == "self":
|
||
conditions.append(SalesLog.salesperson_id == user.user_id)
|
||
elif user_id:
|
||
conditions.append(SalesLog.salesperson_id == uuid.UUID(user_id))
|
||
|
||
if start_date:
|
||
conditions.append(SalesLog.log_date >= start_date)
|
||
if end_date:
|
||
conditions.append(SalesLog.log_date <= end_date)
|
||
if customer_id:
|
||
conditions.append(SalesLog.customer_id == uuid.UUID(customer_id))
|
||
|
||
where = and_(*conditions)
|
||
|
||
# count
|
||
count_stmt = select(func.count()).select_from(SalesLog).where(where)
|
||
total = (await db.execute(count_stmt)).scalar() or 0
|
||
|
||
# data
|
||
stmt = (
|
||
select(SalesLog)
|
||
.where(where)
|
||
.order_by(desc(SalesLog.created_at))
|
||
.offset((page - 1) * size)
|
||
.limit(size)
|
||
)
|
||
rows = (await db.execute(stmt)).scalars().all()
|
||
|
||
return {
|
||
"total": total,
|
||
"page": page,
|
||
"size": size,
|
||
"items": [_to_dict(r) for r in rows],
|
||
}
|
||
|
||
|
||
async def trigger_persona_workflow(
|
||
log_id: uuid.UUID,
|
||
customer_id: uuid.UUID,
|
||
content: str,
|
||
salesperson_name: str = "",
|
||
contact_ids: list[str] | None = None,
|
||
) -> None:
|
||
"""异步触发 Dify 画像提取 Workflow(fire-and-forget)"""
|
||
from app.core.config import settings
|
||
|
||
if not settings.DIFY_WORKFLOW_PERSONA_KEY or not settings.DIFY_API_BASE_URL:
|
||
print("[Workflow] 画像提取 Workflow 未配置,跳过")
|
||
return
|
||
|
||
url = f"{settings.DIFY_API_BASE_URL}/v1/workflows/run"
|
||
headers = {
|
||
"Authorization": f"Bearer {settings.DIFY_WORKFLOW_PERSONA_KEY}",
|
||
"Content-Type": "application/json",
|
||
}
|
||
payload = {
|
||
"inputs": {
|
||
"customer_id": str(customer_id),
|
||
"content": content,
|
||
"salesperson_name": salesperson_name,
|
||
"contact_ids": ",".join(contact_ids) if contact_ids else "",
|
||
},
|
||
"response_mode": "blocking",
|
||
"user": str(customer_id),
|
||
}
|
||
|
||
max_retries = 3
|
||
for attempt in range(max_retries):
|
||
try:
|
||
async with httpx.AsyncClient(timeout=300) as client:
|
||
resp = await client.post(url, json=payload, headers=headers)
|
||
print(f"[Workflow] 画像提取触发 status={resp.status_code}, body={resp.text[:200]}")
|
||
|
||
# 成功后回写 ai_processed
|
||
if resp.status_code == 200:
|
||
try:
|
||
from app.db.database import async_session_factory
|
||
from sqlalchemy import update as sa_update
|
||
async with async_session_factory() as session:
|
||
await session.execute(
|
||
sa_update(SalesLog)
|
||
.where(SalesLog.id == log_id)
|
||
.values(ai_processed=True)
|
||
)
|
||
await session.commit()
|
||
print(f"[Workflow] ai_processed 已更新 log_id={log_id}")
|
||
except Exception as db_err:
|
||
print(f"[Workflow] ai_processed 回写失败: {db_err}")
|
||
return # 成功,退出重试循环
|
||
else:
|
||
print(f"[Workflow] HTTP {resp.status_code},第 {attempt+1}/{max_retries} 次")
|
||
except Exception as e:
|
||
print(f"[Workflow] 画像提取触发失败 (第 {attempt+1}/{max_retries} 次): {e}")
|
||
|
||
# 重试前等待
|
||
if attempt < max_retries - 1:
|
||
import asyncio
|
||
await asyncio.sleep(10 * (attempt + 1))
|
||
|
||
|
||
def _to_dict(log: SalesLog) -> dict:
|
||
return {
|
||
"id": str(log.id),
|
||
"salesperson_id": str(log.salesperson_id),
|
||
"customer_id": str(log.customer_id) if log.customer_id else None,
|
||
"contact_ids": log.contact_ids or [],
|
||
"content": log.content,
|
||
"log_date": log.log_date.isoformat() if log.log_date else None,
|
||
"ai_processed": log.ai_processed,
|
||
"created_at": log.created_at.isoformat() if log.created_at else None,
|
||
}
|