v0.1.0: CRM/ERP 系统内测版本 - 安全加固完成

- Docker bridge 网络隔离(8000 端口封死)
- Gunicorn 4 Worker 多进程
- Alembic 数据库迁移基线
- 日志轮转 20m×3
- JWT 密钥 + DB 密码 + CORS 收紧
- 3-2-1 备份链路(NAS + R740-B 冷备)
- 连接池 pool_pre_ping + pool_recycle=3600
This commit is contained in:
hankin
2026-03-16 07:31:37 +00:00
commit 423baff73b
2578 changed files with 824643 additions and 0 deletions
+1
View File
@@ -0,0 +1 @@
# SHBL-CRM Backend Application Package
View File
+40
View File
@@ -0,0 +1,40 @@
# -*- coding: utf-8 -*-
"""
API 公共依赖
提供 JWT 令牌验证依赖,用于需要认证的路由。
"""
from fastapi import Depends, HTTPException, status
from fastapi.security import HTTPAuthorizationCredentials, HTTPBearer
from app.core.security import decode_access_token
# Bearer Token 提取器
bearer_scheme = HTTPBearer(auto_error=True)
async def get_current_user(
credentials: HTTPAuthorizationCredentials = Depends(bearer_scheme),
) -> dict:
"""
从 Authorization: Bearer <token> 中解码 JWT,返回 payload。
用法: current_user: dict = Depends(get_current_user)
"""
payload = decode_access_token(credentials.credentials)
if payload is None:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="令牌无效或已过期",
headers={"WWW-Authenticate": "Bearer"},
)
return payload
async def require_admin(current_user: dict = Depends(get_current_user)) -> dict:
"""仅允许 admin 角色访问,否则 403"""
if current_user.get("role") != "admin":
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail="权限不足,需要管理员角色",
)
return current_user
View File
+37
View File
@@ -0,0 +1,37 @@
# -*- coding: utf-8 -*-
"""
认证端点
处理用户登录,签发 JWT 令牌。
"""
from fastapi import APIRouter, Depends, HTTPException, status
from sqlalchemy.ext.asyncio import AsyncSession
from app.core.database import get_db
from app.core.security import create_access_token
from app.crud.user import authenticate_user
from app.schemas.user import Token, UserLogin
router = APIRouter()
@router.post("/login", response_model=Token, summary="用户登录", tags=["认证"])
async def login(body: UserLogin, db: AsyncSession = Depends(get_db)):
"""
校验用户名密码,成功后签发 JWT access_token。
前端后续请求需在 Authorization 头携带 Bearer <token>。
"""
user = await authenticate_user(db, body.username, body.password)
if not user:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="用户名或密码错误",
headers={"WWW-Authenticate": "Bearer"},
)
token = create_access_token(subject=user.username, role=user.role)
return Token(
access_token=token,
role=user.role,
username=user.username,
)
+31
View File
@@ -0,0 +1,31 @@
# -*- coding: utf-8 -*-
"""
健康检查端点
用于 Nginx/LB 探活和数据库连接状态探测。
"""
from fastapi import APIRouter, Depends
from sqlalchemy import text
from sqlalchemy.ext.asyncio import AsyncSession
from app.core.database import get_db
router = APIRouter()
@router.get("/health", summary="健康检查", tags=["系统"])
async def health_check(db: AsyncSession = Depends(get_db)):
"""
探测服务与数据库连接是否存活。
- 数据库可达 → {"status": "healthy", "database": "connected"}
- 数据库不可达 → {"status": "degraded", "database": "disconnected", "detail": "..."}
"""
try:
await db.execute(text("SELECT 1"))
return {"status": "healthy", "database": "connected"}
except Exception as e:
return {
"status": "degraded",
"database": "disconnected",
"detail": str(e),
}
+81
View File
@@ -0,0 +1,81 @@
# -*- coding: utf-8 -*-
"""
客户沟通日志 API
POST /api/v1/logs - 提交日志并触发后台 AI 标签提取
"""
import uuid
from fastapi import APIRouter, BackgroundTasks, Depends, HTTPException, status
from pydantic import BaseModel, Field
from sqlalchemy.ext.asyncio import AsyncSession
from app.api.deps import get_current_user
from app.core.database import get_db
from app.models.crm_business import CustomerLog
from app.services.ai_workflow import process_log_with_ai
router = APIRouter()
# ---- 请求/响应模型 ----
class LogCreate(BaseModel):
"""提交沟通日志请求"""
customer_id: uuid.UUID = Field(..., description="关联客户 ID")
content: str = Field(..., min_length=5, max_length=5000, description="沟通日志内容")
class LogResponse(BaseModel):
"""提交成功响应"""
id: uuid.UUID
message: str = "日志已提交,AI 正在后台分析标签和待办"
# ---- 路由 ----
@router.post(
"",
response_model=LogResponse,
status_code=status.HTTP_200_OK,
summary="提交客户沟通日志",
tags=["客户日志"],
)
async def create_customer_log(
body: LogCreate,
background_tasks: BackgroundTasks,
db: AsyncSession = Depends(get_db),
current_user: dict = Depends(get_current_user),
):
"""
接收前端提交的客户沟通日志:
1. 立即存入 customer_logs 表
2. 将 AI 标签提取任务加入 BackgroundTasks 后台队列
3. 立即返回 200 OK(不等待 AI 处理完成)
AI 后台任务会:
- 调用 qwen3:14b 分析日志内容
- 自动提取最多 3 个客户标签 → customer_tags
- 自动生成 1 个跟进待办 → follow_up_todos
"""
# Step 1: 立即写入日志记录
log = CustomerLog(
customer_id=body.customer_id,
content=body.content,
)
db.add(log)
await db.flush()
await db.refresh(log)
# Step 2: 将 AI 处理加入后台队列
# *** 关键:传入 log.id / body.content / body.customer_id 三个值 ***
# process_log_with_ai 会创建独立的 DB Session,不与当前请求的 db 共享
background_tasks.add_task(
process_log_with_ai,
log_id=log.id,
content=body.content,
customer_id=body.customer_id,
)
# Step 3: 立即返回(不等待 AI)
return LogResponse(id=log.id)
+55
View File
@@ -0,0 +1,55 @@
# -*- coding: utf-8 -*-
"""
销售复盘报告 API
GET /api/v1/reports/monthly - 获取当月销售复盘报告 (AI 生成)
"""
from fastapi import APIRouter, Depends
from pydantic import BaseModel
from sqlalchemy.ext.asyncio import AsyncSession
from app.api.deps import get_current_user
from app.core.database import get_db
from app.services.analytics import generate_monthly_report
router = APIRouter()
# ---- 响应模型 ----
class StageMetric(BaseModel):
"""单个阶段的统计指标"""
stage: str
count: int
total_amount: float
class MonthlyReportResponse(BaseModel):
"""月度复盘报告响应"""
metrics: list[StageMetric]
report: str
# ---- 路由 ----
@router.get(
"/monthly",
response_model=MonthlyReportResponse,
summary="获取当月销售复盘报告",
tags=["数据报告"],
)
async def get_monthly_report(
db: AsyncSession = Depends(get_db),
current_user: dict = Depends(get_current_user),
):
"""
生成当月销售复盘报告:
1. SQL 预聚合统计各阶段的机会数量和金额
2. 将真实数据注入 Prompt,调用 qwen3:14b 生成分析报告
3. 同步返回结构化数据 + AI 报告文本
注意:此接口为同步等待模式(用户主动触发),
AI 生成可能需要 10-30 秒,前端应显示加载状态。
"""
result = await generate_monthly_report(db)
return MonthlyReportResponse(**result)
+22
View File
@@ -0,0 +1,22 @@
# -*- coding: utf-8 -*-
"""
API v1 路由汇总
所有 v1 版本的子路由在此注册,由 main.py 统一挂载到 /api/v1 前缀。
"""
from fastapi import APIRouter
from app.api.v1.endpoints import auth, health, logs, reports
api_v1_router = APIRouter()
# 挂载各业务模块路由
api_v1_router.include_router(health.router, prefix="", tags=["系统"])
api_v1_router.include_router(auth.router, prefix="/auth", tags=["认证"])
api_v1_router.include_router(logs.router, prefix="/logs", tags=["客户日志"])
api_v1_router.include_router(reports.router, prefix="/reports", tags=["数据报告"])
# 后续新增模块在此追加,例如:
# from app.api.v1.endpoints import clients, expenses
# api_v1_router.include_router(clients.router, prefix="/clients", tags=["客户管理"])
# api_v1_router.include_router(expenses.router, prefix="/expenses", tags=["报销管理"])
View File
+65
View File
@@ -0,0 +1,65 @@
# -*- coding: utf-8 -*-
"""
核心配置模块
使用 Pydantic v2 Settings 管理所有环境变量,支持 .env 文件自动加载。
"""
from pydantic_settings import BaseSettings, SettingsConfigDict
class Settings(BaseSettings):
"""应用全局配置,所有敏感信息通过环境变量注入,禁止硬编码。"""
model_config = SettingsConfigDict(
env_file=".env",
env_file_encoding="utf-8",
case_sensitive=False,
)
# ---- 应用基础 ----
APP_NAME: str = "SHBL-CRM"
APP_VERSION: str = "2.0.0"
DEBUG: bool = False
# ---- 数据库 (PostgreSQL + asyncpg) ----
DB_HOST: str = "127.0.0.1"
DB_PORT: int = 5432
DB_USER: str = "crm_admin"
DB_PASSWORD: str = "change_me_in_production"
DB_NAME: str = "shbl_crm"
@property
def DATABASE_URL(self) -> str:
"""构造异步 PostgreSQL 连接字符串 (asyncpg 驱动)"""
return (
f"postgresql+asyncpg://{self.DB_USER}:{self.DB_PASSWORD}"
f"@{self.DB_HOST}:{self.DB_PORT}/{self.DB_NAME}"
)
@property
def DATABASE_URL_SYNC(self) -> str:
"""同步连接字符串,仅供 Alembic 迁移使用"""
return (
f"postgresql+psycopg2://{self.DB_USER}:{self.DB_PASSWORD}"
f"@{self.DB_HOST}:{self.DB_PORT}/{self.DB_NAME}"
)
# ---- JWT 安全 ----
SECRET_KEY: str = "REPLACE_WITH_RANDOM_64_CHAR_HEX"
JWT_ALGORITHM: str = "HS256"
ACCESS_TOKEN_EXPIRE_MINUTES: int = 60 * 24 # 24小时
# ---- CORS 白名单 (严格模式,禁止 "*") ----
CORS_ORIGINS: list[str] = [
"http://localhost:5173", # Vite 开发服务器
"http://localhost:8080", # Nginx 生产前端
]
# ---- AI 服务 (Dify BaaS 平台) ----
DIFY_BASE_URL: str = "http://192.168.1.88/v1"
DIFY_LOG_APP_API_KEY: str = "" # 日志分析 App (completion)
DIFY_REPORT_APP_API_KEY: str = "" # 月度报告 App (completion)
# 全局单例,其他模块通过 from app.core.config import settings 引用
settings = Settings()
+58
View File
@@ -0,0 +1,58 @@
# -*- coding: utf-8 -*-
"""
异步数据库引擎与会话管理
使用 SQLAlchemy 2.0 异步模式 + asyncpg 驱动,配置连接池参数。
提供 get_db() 依赖注入函数供 FastAPI 路由使用。
"""
from collections.abc import AsyncGenerator
from sqlalchemy.ext.asyncio import (
AsyncSession,
async_sessionmaker,
create_async_engine,
)
from sqlalchemy.orm import DeclarativeBase
from app.core.config import settings
# ---- 异步引擎 (带连接池配置) ----
engine = create_async_engine(
settings.DATABASE_URL,
echo=settings.DEBUG, # DEBUG 模式下打印 SQL
pool_size=20, # 连接池常驻连接数
max_overflow=10, # 超出 pool_size 后允许的临时连接数
pool_pre_ping=True, # 每次取连接前探测存活,防止用到已断开的连接
pool_recycle=3600, # 连接最大存活时间(秒),防止数据库端主动断连
)
# ---- 异步会话工厂 ----
AsyncSessionLocal = async_sessionmaker(
bind=engine,
class_=AsyncSession,
expire_on_commit=False, # 提交后不过期对象属性,避免 lazy load 异常
)
# ---- ORM 基类 ----
class Base(DeclarativeBase):
"""所有 ORM 模型必须继承此基类,Alembic 通过 Base.metadata 自动检测表结构变更。"""
pass
# ---- 依赖注入:获取数据库会话 ----
async def get_db() -> AsyncGenerator[AsyncSession, None]:
"""
FastAPI Depends() 专用生成器。
每个请求获取独立会话,请求结束后自动关闭,异常时自动回滚。
用法: db: AsyncSession = Depends(get_db)
"""
async with AsyncSessionLocal() as session:
try:
yield session
await session.commit()
except Exception:
await session.rollback()
raise
finally:
await session.close()
+142
View File
@@ -0,0 +1,142 @@
# -*- coding: utf-8 -*-
"""
Dify BaaS API 客户端
取代原有的 OllamaClient,所有 AI 调用统一走 Dify 平台 API。
Dify 部署地址: http://192.168.1.88
文档参考: https://docs.dify.ai/guides/application-publishing/developing-with-apis
"""
import logging
import httpx
from app.core.config import settings
logger = logging.getLogger("dify_client")
class DifyClient:
"""
Dify 平台 API 客户端(异步)。
每个 Dify 应用有独立的 API Key:
- 日志分析 App → DIFY_LOG_APP_API_KEY
- 月度报告 App → DIFY_REPORT_APP_API_KEY
调用时传入对应 key 即可。
"""
def __init__(self, base_url: str = "http://192.168.1.88/v1"):
self.base_url = base_url.rstrip("/")
async def call_text_generator(
self,
api_key: str,
inputs: dict,
query: str = "",
) -> str:
"""
调用 Dify 文本生成(completion)类应用。
:param api_key: Dify App API Key (app-xxx 格式)
:param inputs: 传入变量字典,键名需与 Dify 后台配置的变量名一致
:param query: 可选的用户查询文本
:return: Dify 返回的 answer 文本,失败时返回空字符串
"""
url = f"{self.base_url}/completion-messages"
headers = {
"Authorization": f"Bearer {api_key}",
"Content-Type": "application/json",
}
payload = {
"inputs": inputs,
"query": query,
"response_mode": "blocking",
"user": "crm-backend",
}
try:
async with httpx.AsyncClient(timeout=60.0) as client:
response = await client.post(url, headers=headers, json=payload)
if response.status_code != 200:
logger.error(
"Dify API 非 200 响应: status=%d body=%s",
response.status_code,
response.text[:500],
)
return ""
data = response.json()
answer = data.get("answer", "")
logger.info(
"Dify 调用成功: %d chars (key=...%s)",
len(answer),
api_key[-6:],
)
return answer
except httpx.TimeoutException:
logger.error("Dify API 超时 (60s): url=%s key=...%s", url, api_key[-6:])
return ""
except Exception as e:
logger.error("Dify API 异常: %s (key=...%s)", e, api_key[-6:], exc_info=True)
return ""
async def call_workflow(
self,
api_key: str,
inputs: dict,
user: str = "crm-backend",
) -> dict | str:
"""
调用 Dify 工作流(workflow)类应用。
:param api_key: Dify App API Key (app-xxx 格式)
:param inputs: 传入变量字典,键名需与 Dify 后台配置的变量名一致
:param user: 用户标识
:return: Dify 工作流返回的 outputs 字典,失败时返回空字符串
"""
url = f"{self.base_url}/workflows/run"
headers = {
"Authorization": f"Bearer {api_key}",
"Content-Type": "application/json",
}
payload = {
"inputs": inputs,
"response_mode": "blocking",
"user": user,
}
try:
async with httpx.AsyncClient(timeout=60.0) as client:
response = await client.post(url, headers=headers, json=payload)
if response.status_code != 200:
logger.error(
"Dify Workflow 非 200 响应: status=%d body=%s",
response.status_code,
response.text[:500],
)
return ""
data = response.json()
outputs = data.get("data", {}).get("outputs", {})
logger.info(
"Dify Workflow 调用成功: outputs_keys=%s (key=...%s)",
list(outputs.keys()) if isinstance(outputs, dict) else "N/A",
api_key[-6:],
)
return outputs
except httpx.TimeoutException:
logger.error("Dify Workflow 超时 (60s): url=%s key=...%s", url, api_key[-6:])
return ""
except Exception as e:
logger.error("Dify Workflow 异常: %s (key=...%s)", e, api_key[-6:], exc_info=True)
return ""
# 全局单例,使用 settings 中配置的 Dify 地址
dify_client = DifyClient(base_url=settings.DIFY_BASE_URL)
+64
View File
@@ -0,0 +1,64 @@
# -*- coding: utf-8 -*-
"""
安全模块:JWT 令牌签发/验证 + 密码哈希
使用 python-jose 进行 JWT 操作,bcrypt 直接进行密码哈希。
注意:passlib 已不再维护,与 bcrypt>=5.0 不兼容,故直接使用 bcrypt 库。
"""
from datetime import datetime, timedelta, timezone
import bcrypt
from jose import JWTError, jwt
from app.core.config import settings
def hash_password(plain_password: str) -> str:
"""将明文密码哈希为 bcrypt 格式存储"""
return bcrypt.hashpw(
plain_password.encode("utf-8"), bcrypt.gensalt()
).decode("utf-8")
def verify_password(plain_password: str, hashed_password: str) -> bool:
"""校验明文密码与数据库中的哈希值是否匹配"""
return bcrypt.checkpw(
plain_password.encode("utf-8"),
hashed_password.encode("utf-8"),
)
def create_access_token(
subject: str,
role: str,
expires_delta: timedelta | None = None,
) -> str:
"""
签发 JWT 访问令牌。
:param subject: 用户标识 (通常是 username 或 user_id)
:param role: 用户角色 (admin / user),嵌入 claims 供前端和后端鉴权
:param expires_delta: 自定义过期时间,默认使用配置值
"""
expire = datetime.now(timezone.utc) + (
expires_delta or timedelta(minutes=settings.ACCESS_TOKEN_EXPIRE_MINUTES)
)
payload = {
"sub": subject,
"role": role,
"exp": expire,
}
return jwt.encode(payload, settings.SECRET_KEY, algorithm=settings.JWT_ALGORITHM)
def decode_access_token(token: str) -> dict | None:
"""
解码并验证 JWT 令牌。
:return: payload 字典,失败返回 None
"""
try:
payload = jwt.decode(
token, settings.SECRET_KEY, algorithms=[settings.JWT_ALGORITHM]
)
return payload
except JWTError:
return None
View File
+63
View File
@@ -0,0 +1,63 @@
# -*- coding: utf-8 -*-
"""
用户 CRUD 数据访问层
封装所有用户相关的数据库操作,业务逻辑层只调用此模块,不直接写 SQL。
"""
from sqlalchemy import select
from sqlalchemy.ext.asyncio import AsyncSession
from app.core.security import hash_password, verify_password
from app.models.user import User
from app.schemas.user import UserCreate, UserUpdate
async def get_user_by_username(db: AsyncSession, username: str) -> User | None:
"""根据用户名查询用户"""
stmt = select(User).where(User.username == username)
result = await db.execute(stmt)
return result.scalar_one_or_none()
async def authenticate_user(
db: AsyncSession, username: str, password: str
) -> User | None:
"""验证用户名密码,返回用户对象或 None"""
user = await get_user_by_username(db, username)
if not user or not user.is_active:
return None
if not verify_password(password, user.password_hash):
return None
return user
async def create_user(db: AsyncSession, data: UserCreate) -> User:
"""创建新用户"""
user = User(
username=data.username,
password_hash=hash_password(data.password),
role=data.role,
permissions=data.permissions,
)
db.add(user)
await db.flush() # flush 获取自增 ID,但不提交 (由 get_db 统一提交)
await db.refresh(user)
return user
async def update_user(db: AsyncSession, user: User, data: UserUpdate) -> User:
"""部分更新用户信息"""
update_data = data.model_dump(exclude_unset=True)
if "password" in update_data:
update_data["password_hash"] = hash_password(update_data.pop("password"))
for field, value in update_data.items():
setattr(user, field, value)
await db.flush()
await db.refresh(user)
return user
async def delete_user(db: AsyncSession, user: User) -> None:
"""删除用户"""
await db.delete(user)
await db.flush()
+72
View File
@@ -0,0 +1,72 @@
# -*- coding: utf-8 -*-
"""
FastAPI 应用入口
组装中间件、CORS、路由,启动 ASGI 应用。
"""
import logging
from contextlib import asynccontextmanager
from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
from app.api.v1.router import api_v1_router
from app.core.config import settings
from app.middleware.audit import AuditMiddleware
# ---- 日志配置 ----
logging.basicConfig(
level=logging.INFO,
format="%(asctime)s | %(name)-12s | %(levelname)-5s | %(message)s",
datefmt="%Y-%m-%d %H:%M:%S",
)
logger = logging.getLogger(__name__)
# ---- 生命周期管理 (替代已废弃的 on_event) ----
@asynccontextmanager
async def lifespan(app: FastAPI):
"""应用启动/关闭时执行的钩子"""
logger.info("SHBL-CRM 后端服务启动 | 版本: %s", settings.APP_VERSION)
logger.info("数据库连接: %s@%s:%s/%s",
settings.DB_USER, settings.DB_HOST, settings.DB_PORT, settings.DB_NAME)
yield
logger.info("SHBL-CRM 后端服务关闭")
# ---- 创建 FastAPI 实例 ----
app = FastAPI(
title=settings.APP_NAME,
version=settings.APP_VERSION,
description="天津硕博霖客户信息管理系统 - 后端 API",
docs_url="/api/docs", # Swagger UI 路径
redoc_url="/api/redoc", # ReDoc 路径
openapi_url="/api/openapi.json",
lifespan=lifespan,
)
# ---- 1. 审计中间件 (最先添加,确保拦截所有请求) ----
app.add_middleware(AuditMiddleware)
# ---- 2. CORS 跨域 (严格白名单模式,禁止 allow_origins=["*"]) ----
app.add_middleware(
CORSMiddleware,
allow_origins=settings.CORS_ORIGINS, # 仅允许配置中指定的来源
allow_credentials=True,
allow_methods=["GET", "POST", "PUT", "DELETE", "PATCH"],
allow_headers=["Authorization", "Content-Type"],
)
# ---- 3. 挂载 API 路由 ----
app.include_router(api_v1_router, prefix="/api/v1")
# ---- 根路径 (可选,方便快速验证服务是否存活) ----
@app.get("/", tags=["系统"])
async def root():
return {
"service": settings.APP_NAME,
"version": settings.APP_VERSION,
"docs": "/api/docs",
}
View File
+59
View File
@@ -0,0 +1,59 @@
# -*- coding: utf-8 -*-
"""
全局审计中间件
拦截所有入站 HTTP 请求,记录:方法、URL、客户端 IP、耗时、响应状态码。
日志输出到标准 logging,生产环境可对接 ELK / Loki 等日志收集系统。
"""
import logging
import time
from starlette.middleware.base import BaseHTTPMiddleware, RequestResponseEndpoint
from starlette.requests import Request
from starlette.responses import Response
# 配置审计专用 logger,与业务日志分离
audit_logger = logging.getLogger("audit")
audit_logger.setLevel(logging.INFO)
class AuditMiddleware(BaseHTTPMiddleware):
"""
审计中间件 - 记录每个请求的关键信息。
日志格式: [AUDIT] <客户端IP> <方法> <URL> <状态码> <耗时ms>
"""
async def dispatch(
self, request: Request, call_next: RequestResponseEndpoint
) -> Response:
# 提取客户端真实 IP (优先取反向代理传递的 X-Forwarded-For)
client_ip = request.headers.get(
"X-Forwarded-For", request.client.host if request.client else "unknown"
)
method = request.method
url = str(request.url)
start_time = time.perf_counter()
try:
response = await call_next(request)
except Exception:
# 未捕获异常也要记录审计日志
elapsed_ms = (time.perf_counter() - start_time) * 1000
audit_logger.error(
"[AUDIT] %s %s %s 500 %.1fms (unhandled exception)",
client_ip, method, url, elapsed_ms,
)
raise
elapsed_ms = (time.perf_counter() - start_time) * 1000
audit_logger.info(
"[AUDIT] %s %s %s %d %.1fms",
client_ip, method, url, response.status_code, elapsed_ms,
)
# 将审计信息注入响应头 (方便调试,生产环境可移除)
response.headers["X-Request-Duration-Ms"] = f"{elapsed_ms:.1f}"
return response
+9
View File
@@ -0,0 +1,9 @@
# 在此处导入所有 ORM 模型,供 Alembic 自动检测
from app.models.user import User # noqa: F401
from app.models.client import Client # noqa: F401
from app.models.crm_business import ( # noqa: F401
CustomerLog,
CustomerTag,
FollowUpToDo,
SalesOpportunity,
)
+54
View File
@@ -0,0 +1,54 @@
# -*- coding: utf-8 -*-
"""
客户主表模型
CRM 的核心实体,所有业务表 (日志/标签/待办/销售机会) 均通过外键关联到此表。
"""
import uuid
from datetime import datetime
from sqlalchemy import String, Text, func
from sqlalchemy.dialects.postgresql import UUID
from sqlalchemy.orm import Mapped, mapped_column
from app.core.database import Base
class Client(Base):
"""
客户信息表 (clients)
记录客户基本信息,作为 CRM 业务数据的核心关联实体。
"""
__tablename__ = "clients"
id: Mapped[uuid.UUID] = mapped_column(
UUID(as_uuid=True), primary_key=True, default=uuid.uuid4,
)
name: Mapped[str] = mapped_column(
String(200), nullable=False, index=True,
comment="客户名称 (公司名/个人名)",
)
contact_person: Mapped[str | None] = mapped_column(
String(100), nullable=True,
comment="联系人姓名",
)
phone: Mapped[str | None] = mapped_column(
String(30), nullable=True,
comment="联系电话",
)
address: Mapped[str | None] = mapped_column(
String(500), nullable=True,
comment="地址",
)
notes: Mapped[str | None] = mapped_column(
Text, nullable=True,
comment="备注",
)
created_at: Mapped[datetime] = mapped_column(
server_default=func.now(),
)
updated_at: Mapped[datetime] = mapped_column(
server_default=func.now(),
onupdate=func.now(),
)
+132
View File
@@ -0,0 +1,132 @@
# -*- coding: utf-8 -*-
"""
CRM 业务数据模型
定义客户沟通日志、标签、跟进待办、销售机会四张业务表。
所有主键均为 UUID,与 User/KnowledgeChunk 保持一致的 ID 策略。
"""
import uuid
from datetime import datetime
from sqlalchemy import ForeignKey, String, Text, Numeric, func
from sqlalchemy.dialects.postgresql import UUID
from sqlalchemy.orm import Mapped, mapped_column
from app.core.database import Base
class CustomerLog(Base):
"""
客户沟通日志表
记录每次与客户的沟通内容(电话/拜访/微信等),
新增日志时会触发后台 AI 任务自动提取标签和待办。
"""
__tablename__ = "customer_logs"
id: Mapped[uuid.UUID] = mapped_column(
UUID(as_uuid=True), primary_key=True, default=uuid.uuid4,
)
customer_id: Mapped[uuid.UUID] = mapped_column(
UUID(as_uuid=True),
ForeignKey("clients.id", ondelete="CASCADE"),
nullable=False,
index=True,
comment="关联客户 ID",
)
content: Mapped[str] = mapped_column(
Text, nullable=False, comment="沟通日志内容",
)
created_at: Mapped[datetime] = mapped_column(
server_default=func.now(), comment="记录时间",
)
class CustomerTag(Base):
"""
客户标签表
由 AI 从沟通日志中自动提取,也支持手动添加。
同一客户下的标签名唯一(通过业务逻辑控制去重)。
"""
__tablename__ = "customer_tags"
id: Mapped[uuid.UUID] = mapped_column(
UUID(as_uuid=True), primary_key=True, default=uuid.uuid4,
)
customer_id: Mapped[uuid.UUID] = mapped_column(
UUID(as_uuid=True),
ForeignKey("clients.id", ondelete="CASCADE"),
nullable=False,
index=True,
comment="关联客户 ID",
)
tag_name: Mapped[str] = mapped_column(
String(100), nullable=False, comment="标签名称,如'价格敏感''决策周期长'",
)
created_at: Mapped[datetime] = mapped_column(
server_default=func.now(),
)
class FollowUpToDo(Base):
"""
跟进待办表
由 AI 根据沟通日志自动生成下一步行动建议,
也可由用户手动创建。status 为简单的二态: pending / done。
"""
__tablename__ = "follow_up_todos"
id: Mapped[uuid.UUID] = mapped_column(
UUID(as_uuid=True), primary_key=True, default=uuid.uuid4,
)
customer_id: Mapped[uuid.UUID] = mapped_column(
UUID(as_uuid=True),
ForeignKey("clients.id", ondelete="CASCADE"),
nullable=False,
index=True,
comment="关联客户 ID",
)
task_desc: Mapped[str] = mapped_column(
Text, nullable=False, comment="待办任务描述",
)
status: Mapped[str] = mapped_column(
String(20), nullable=False, default="pending",
comment="状态: pending(待处理) / done(已完成)",
)
created_at: Mapped[datetime] = mapped_column(
server_default=func.now(),
)
class SalesOpportunity(Base):
"""
销售机会表
跟踪每个客户的销售漏斗阶段和金额,用于经营看板和复盘报告。
stage 四阶段: 意向 → 谈判 → 成交 → 流失
"""
__tablename__ = "sales_opportunities"
id: Mapped[uuid.UUID] = mapped_column(
UUID(as_uuid=True), primary_key=True, default=uuid.uuid4,
)
customer_id: Mapped[uuid.UUID] = mapped_column(
UUID(as_uuid=True),
ForeignKey("clients.id", ondelete="CASCADE"),
nullable=False,
index=True,
comment="关联客户 ID",
)
amount: Mapped[float] = mapped_column(
Numeric(12, 2), nullable=False, default=0,
comment="预估/实际金额 (元)",
)
stage: Mapped[str] = mapped_column(
String(20), nullable=False, default="意向",
comment="漏斗阶段: 意向 / 谈判 / 成交 / 流失",
)
created_at: Mapped[datetime] = mapped_column(
server_default=func.now(),
)
+46
View File
@@ -0,0 +1,46 @@
# -*- coding: utf-8 -*-
"""
用户 ORM 模型
对应数据库 users 表,使用 SQLAlchemy 2.0 Mapped 注解风格。
"""
from datetime import datetime, timezone
from sqlalchemy import String, func
from sqlalchemy.orm import Mapped, mapped_column
from app.core.database import Base
class User(Base):
"""用户表 - 存储账号、密码哈希、角色权限"""
__tablename__ = "users"
id: Mapped[int] = mapped_column(primary_key=True, autoincrement=True)
username: Mapped[str] = mapped_column(
String(50), unique=True, nullable=False, index=True, comment="登录用户名"
)
password_hash: Mapped[str] = mapped_column(
String(255), nullable=False, comment="bcrypt 哈希密码"
)
role: Mapped[str] = mapped_column(
String(20), nullable=False, default="user", comment="角色: admin / user"
)
permissions: Mapped[str] = mapped_column(
String(200), nullable=False, default="view,edit", comment="逗号分隔权限列表"
)
is_active: Mapped[bool] = mapped_column(
default=True, comment="账户是否启用"
)
created_at: Mapped[datetime] = mapped_column(
server_default=func.now(), comment="创建时间"
)
updated_at: Mapped[datetime] = mapped_column(
server_default=func.now(),
onupdate=lambda: datetime.now(timezone.utc),
comment="最后更新时间",
)
def __repr__(self) -> str:
return f"<User(id={self.id}, username='{self.username}', role='{self.role}')>"
View File
+55
View File
@@ -0,0 +1,55 @@
# -*- coding: utf-8 -*-
"""
用户相关 Pydantic v2 校验模型 (DTO)
用于请求体验证和响应序列化,与 ORM 模型解耦。
"""
from datetime import datetime
from pydantic import BaseModel, Field, ConfigDict
# ---- 请求模型 ----
class UserLogin(BaseModel):
"""登录请求"""
username: str = Field(..., min_length=2, max_length=50, examples=["admin"])
password: str = Field(..., min_length=6, max_length=128)
class UserCreate(BaseModel):
"""创建用户请求 (管理员操作)"""
username: str = Field(..., min_length=2, max_length=50)
password: str = Field(..., min_length=6, max_length=128)
role: str = Field(default="user", pattern=r"^(admin|user)$")
permissions: str = Field(default="view,edit")
class UserUpdate(BaseModel):
"""更新用户请求 (部分更新)"""
password: str | None = Field(default=None, min_length=6, max_length=128)
role: str | None = Field(default=None, pattern=r"^(admin|user)$")
permissions: str | None = None
is_active: bool | None = None
# ---- 响应模型 ----
class UserOut(BaseModel):
"""用户信息响应 (脱敏,不含密码哈希)"""
model_config = ConfigDict(from_attributes=True) # 支持从 ORM 对象自动转换
id: int
username: str
role: str
permissions: str
is_active: bool
created_at: datetime
class Token(BaseModel):
"""JWT 令牌响应"""
access_token: str
token_type: str = "bearer"
role: str
username: str
View File
+145
View File
@@ -0,0 +1,145 @@
# -*- coding: utf-8 -*-
"""
AI 工作流服务 (Dify BaaS 版本)
事件驱动的异步 AI 任务:从客户沟通日志中自动提取标签和生成跟进待办。
架构要点:
- 此函数由 FastAPI BackgroundTasks 调用,运行在独立的后台线程中
- 必须使用独立的 DB Session 生命周期,严禁与主请求共享 Session
- AI 调用通过 Dify 平台 API(非直连大模型)
- AI 解析失败时静默降级(记录日志),绝不抛出异常导致主线程崩溃
"""
import json
import logging
import uuid
from app.core.dify_client import dify_client
from app.core.database import AsyncSessionLocal
from app.models.crm_business import CustomerTag, FollowUpToDo
from app.core.config import settings
logger = logging.getLogger("ai_workflow")
async def process_log_with_ai(
log_id: uuid.UUID,
content: str,
customer_id: uuid.UUID,
) -> None:
"""
后台 AI 任务:分析沟通日志,提取标签和待办。
*** 关键约束 ***
此函数在 BackgroundTasks 中执行,必须:
1. 使用独立的 AsyncSession(不与主请求共享)
2. 内部捕获所有异常,不向外抛出
3. AI 返回格式异常时静默降级
"""
logger.info("开始 AI 处理: log_id=%s, customer_id=%s", log_id, customer_id)
if not settings.DIFY_LOG_APP_API_KEY:
logger.error("DIFY_LOG_APP_API_KEY 未配置,跳过 AI 处理")
return
# ---- 独立的 DB Session 生命周期 ----
async with AsyncSessionLocal() as db:
try:
# Step 1: 调用 Dify 日志分析 Workflow App
# *** inputs 的键名必须与 Dify 后台配置的变量名对齐 ***
# 在 Dify 后台的 Workflow 编排中,需定义输入变量 "log_content"
workflow_outputs = await dify_client.call_workflow(
api_key=settings.DIFY_LOG_APP_API_KEY,
inputs={"log_content": content},
)
if not workflow_outputs:
logger.warning("Dify 返回空响应,跳过入库 (log_id=%s)", log_id)
return
# Workflow 返回的是 dict,序列化为 JSON 字符串以兼容现有解析管线
if isinstance(workflow_outputs, dict):
raw_response = json.dumps(workflow_outputs, ensure_ascii=False)
else:
raw_response = str(workflow_outputs)
logger.debug("Dify Workflow 原始返回: %s", raw_response[:500])
# Step 2: 解析 JSON 响应 (容错)
json_str = _extract_json(raw_response)
result = json.loads(json_str)
tags: list[str] = result.get("tags", [])
next_task: str = result.get("next_task", "")
# Step 3: 写入标签 (最多 3 个)
if tags:
for tag_name in tags[:3]:
tag_name = tag_name.strip()
if not tag_name:
continue
tag = CustomerTag(
customer_id=customer_id,
tag_name=tag_name,
)
db.add(tag)
logger.info("写入 %d 个标签: %s", len(tags[:3]), tags[:3])
# Step 4: 写入跟进待办
if next_task and next_task.strip():
todo = FollowUpToDo(
customer_id=customer_id,
task_desc=next_task.strip(),
status="pending",
)
db.add(todo)
logger.info("写入待办: %s", next_task.strip()[:100])
await db.commit()
logger.info("AI 处理完成: log_id=%s", log_id)
except json.JSONDecodeError as e:
logger.error(
"Dify 返回 JSON 解析失败 (log_id=%s): %s | 原始响应: %s",
log_id, e, raw_response[:300],
)
await db.rollback()
except Exception as e:
logger.error(
"AI 后台任务异常 (log_id=%s): %s",
log_id, e, exc_info=True,
)
await db.rollback()
def _extract_json(text: str) -> str:
"""
从 Dify/LLM 响应中提取 JSON 字符串。
处理常见的"包裹"行为:
- 直接返回 JSON
- 用 ```json ... ``` 包裹
- 在 JSON 前后加解释文字
"""
text = text.strip()
# 尝试提取 ```json ... ``` 代码块
if "```json" in text:
start = text.index("```json") + len("```json")
end = text.index("```", start)
return text[start:end].strip()
if "```" in text:
start = text.index("```") + len("```")
end = text.index("```", start)
return text[start:end].strip()
# 尝试找到第一个 { 和最后一个 }
first_brace = text.find("{")
last_brace = text.rfind("}")
if first_brace != -1 and last_brace != -1 and last_brace > first_brace:
return text[first_brace:last_brace + 1]
# 原样返回,让 json.loads 报错触发容错
return text
+127
View File
@@ -0,0 +1,127 @@
# -*- coding: utf-8 -*-
"""
销售数据分析服务 (Dify BaaS 版本)
基于 SQL 预聚合的销售漏斗统计 + Dify AI 驱动的复盘报告生成。
"""
import logging
from datetime import datetime, timezone
from sqlalchemy import func, select, extract, case
from sqlalchemy.ext.asyncio import AsyncSession
from app.core.config import settings
from app.core.dify_client import dify_client
from app.models.crm_business import SalesOpportunity
logger = logging.getLogger("analytics")
# ============================================================
# 1. SQL 预聚合:销售漏斗指标
# ============================================================
async def get_sales_metrics(db: AsyncSession) -> list[dict]:
"""
按当月统计各销售阶段的机会数量和总金额。
使用 SQLAlchemy GROUP BY 聚合查询,在数据库层面完成计算。
"""
now = datetime.now(timezone.utc)
stmt = (
select(
SalesOpportunity.stage,
func.count(SalesOpportunity.id).label("count"),
func.coalesce(func.sum(SalesOpportunity.amount), 0).label("total_amount"),
)
.where(
extract("year", SalesOpportunity.created_at) == now.year,
extract("month", SalesOpportunity.created_at) == now.month,
)
.group_by(SalesOpportunity.stage)
.order_by(
case(
(SalesOpportunity.stage == "意向", 1),
(SalesOpportunity.stage == "谈判", 2),
(SalesOpportunity.stage == "成交", 3),
(SalesOpportunity.stage == "流失", 4),
else_=5,
)
)
)
result = await db.execute(stmt)
rows = result.all()
metrics = [
{
"stage": row.stage,
"count": row.count,
"total_amount": float(row.total_amount),
}
for row in rows
]
logger.info("当月销售指标: %s", metrics)
return metrics
# ============================================================
# 2. Dify AI 复盘报告生成
# ============================================================
async def generate_monthly_report(db: AsyncSession) -> dict:
"""
生成当月销售复盘报告:
1. SQL 预聚合获取真实数据指标
2. 将结构化数据通过 inputs 传给 Dify 报告 App
3. 直接返回 Dify 生成的报告文本
:return: {"metrics": [...], "report": "Dify 生成的报告文本"}
"""
# Step 1: 获取真实销售数据
metrics = await get_sales_metrics(db)
if not metrics:
return {
"metrics": [],
"report": "当月暂无销售机会数据,无法生成复盘报告。",
}
# Step 2: 将数据序列化为 Dify 可消费的文本格式
# *** inputs 的键名必须与 Dify 后台配置的变量名对齐 ***
# 在 Dify 后台的报告 App 编排中,需定义输入变量 "metrics_data"
metrics_text = "\n".join(
f"- {m['stage']}: {m['count']} 个机会, 总金额 ¥{m['total_amount']:,.2f}"
for m in metrics
)
total_count = sum(m["count"] for m in metrics)
total_amount = sum(m["total_amount"] for m in metrics)
metrics_text += f"\n- 合计: {total_count} 个机会, 总金额 ¥{total_amount:,.2f}"
# Step 3: 调用 Dify 报告 App
if not settings.DIFY_REPORT_APP_API_KEY:
logger.error("DIFY_REPORT_APP_API_KEY 未配置")
return {
"metrics": metrics,
"report": "AI 报告服务未配置,请联系管理员。",
}
# 动态生成 Dify 后台所需的必填参数 report_period
now = datetime.now(timezone.utc)
report_period = f"{now.year}{now.month:02d}"
report_text = await dify_client.call_text_generator(
api_key=settings.DIFY_REPORT_APP_API_KEY,
inputs={"metrics_data": metrics_text, "report_period": report_period},
)
if not report_text:
report_text = "AI 报告生成失败,请稍后重试或检查 Dify 服务状态。"
logger.info("月度复盘报告生成完成 (%d 字)", len(report_text))
return {
"metrics": metrics,
"report": report_text,
}