v0.1.0: CRM/ERP 系统内测版本 - 安全加固完成

- Docker bridge 网络隔离(8000 端口封死)
- Gunicorn 4 Worker 多进程
- Alembic 数据库迁移基线
- 日志轮转 20m×3
- JWT 密钥 + DB 密码 + CORS 收紧
- 3-2-1 备份链路(NAS + R740-B 冷备)
- 连接池 pool_pre_ping + pool_recycle=3600
This commit is contained in:
hankin
2026-03-16 07:31:37 +00:00
commit 423baff73b
2578 changed files with 824643 additions and 0 deletions
+21
View File
@@ -0,0 +1,21 @@
APP_NAME=SHBL-CRM
APP_VERSION=2.0.0
DEBUG=true
# PostgreSQL 连接
DB_HOST=192.168.1.85
DB_PORT=5432
DB_USER=admin
DB_PASSWORD=admin_password_2026
DB_NAME=lubrication_crm
# JWT 密钥
SECRET_KEY=dev_secret_key_replace_in_production_64chars_minimum_length_ok
# CORS 白名单
CORS_ORIGINS=["http://localhost:5173","http://localhost:8080"]
# Dify BaaS 平台
DIFY_BASE_URL=http://192.168.1.88/v1
DIFY_LOG_APP_API_KEY=app-gMi1uhkJXjteZk1Qc27Ve8Jw
DIFY_REPORT_APP_API_KEY=app-dhEK3cgt7iqksRMaviNqUu9W
+24
View File
@@ -0,0 +1,24 @@
# ---- SHBL-CRM Backend Environment ----
# Copy to .env and fill in real values
APP_NAME=SHBL-CRM
APP_VERSION=2.0.0
DEBUG=true
# PostgreSQL
DB_HOST=127.0.0.1
DB_PORT=5432
DB_USER=crm_admin
DB_PASSWORD=change_me_in_production
DB_NAME=shbl_crm
# JWT
SECRET_KEY=REPLACE_WITH_RANDOM_64_CHAR_HEX
# CORS
CORS_ORIGINS=["http://localhost:5173","http://localhost:8080"]
# Dify BaaS (http://192.168.1.88)
DIFY_BASE_URL=http://192.168.1.88/v1
DIFY_LOG_APP_API_KEY=app-xxx
DIFY_REPORT_APP_API_KEY=app-xxx
+35
View File
@@ -0,0 +1,35 @@
import psycopg2
conn = psycopg2.connect(
host="192.168.1.85",
port=5432,
user="admin",
password="admin_password_2026",
dbname="lubrication_crm",
)
cur = conn.cursor()
# Check table structures
for t in ["users", "clients", "follow_ups", "expenses"]:
cur.execute(f"""
SELECT column_name, data_type, is_nullable, column_default
FROM information_schema.columns
WHERE table_name = '{t}'
ORDER BY ordinal_position
""")
print(f"\n=== {t} ===")
for row in cur.fetchall():
print(f" {row[0]:20s} | {row[1]:20s} | null={row[2]} | default={row[3]}")
# Check constraints
cur.execute(f"""
SELECT conname, contype
FROM pg_constraint
JOIN pg_class ON conrelid = pg_class.oid
WHERE pg_class.relname = '{t}'
""")
constraints = cur.fetchall()
if constraints:
print(f" Constraints: {constraints}")
conn.close()
+36
View File
@@ -0,0 +1,36 @@
[alembic]
script_location = alembic
sqlalchemy.url =
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %%(levelname)-5.5s [%%(name)s] %%(message)s
datefmt = %%H:%%M:%%S
+64
View File
@@ -0,0 +1,64 @@
# -*- coding: utf-8 -*-
"""
Alembic 迁移环境配置
从 app.core.config 动态获取数据库 URL,自动检测 ORM 模型变更。
"""
from logging.config import fileConfig
from alembic import context
from sqlalchemy import engine_from_config, pool
from app.core.config import settings
from app.core.database import Base
# 导入所有模型,确保 Alembic 能检测到它们
import app.models # noqa: F401
# Alembic Config 对象
config = context.config
# 动态注入数据库 URL (使用同步驱动,因为 Alembic 不支持异步)
config.set_main_option("sqlalchemy.url", settings.DATABASE_URL_SYNC)
# 日志配置
if config.config_file_name is not None:
fileConfig(config.config_file_name)
# MetaData 对象 - Alembic 通过它检测表结构变更
target_metadata = Base.metadata
def run_migrations_offline() -> None:
"""离线模式:生成 SQL 脚本而不实际连接数据库"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online() -> None:
"""在线模式:连接数据库并直接执行迁移"""
connectable = engine_from_config(
config.get_section(config.config_ini_section, {}),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
with connectable.connect() as connection:
context.configure(
connection=connection,
target_metadata=target_metadata,
)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()
+25
View File
@@ -0,0 +1,25 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers
revision: str = ${repr(up_revision)}
down_revision: Union[str, None] = ${repr(down_revision)}
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
def upgrade() -> None:
${upgrades if upgrades else "pass"}
def downgrade() -> None:
${downgrades if downgrades else "pass"}
@@ -0,0 +1,143 @@
"""initial_full_schema
Renames legacy tables to *_legacy backup, then creates
all tables with correct UUID PKs, types, and constraints.
Revision ID: 0001
Revises:
Create Date: 2026-02-24
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
revision: str = "0001"
down_revision: Union[str, None] = None
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
# Legacy tables to rename (preserve data)
LEGACY_TABLES = ["users", "clients", "follow_ups", "expenses"]
def upgrade() -> None:
conn = op.get_bind()
# Step 1: Rename legacy tables to *_legacy
for table in LEGACY_TABLES:
exists = conn.execute(sa.text(
f"SELECT EXISTS (SELECT 1 FROM information_schema.tables "
f"WHERE table_schema='public' AND table_name='{table}')"
)).scalar()
if exists:
conn.execute(sa.text(
f'ALTER TABLE "{table}" RENAME TO "{table}_legacy"'
))
# Step 2: Create new tables with proper schema
# ---- users ----
conn.execute(sa.text("""
CREATE TABLE users (
id UUID PRIMARY KEY,
username VARCHAR(50) UNIQUE NOT NULL,
password_hash VARCHAR(128) NOT NULL,
role VARCHAR(20) NOT NULL DEFAULT 'viewer',
permissions JSON DEFAULT '[]',
is_active BOOLEAN DEFAULT true,
created_at TIMESTAMP DEFAULT now(),
updated_at TIMESTAMP DEFAULT now()
);
"""))
# ---- clients ----
conn.execute(sa.text("""
CREATE TABLE clients (
id UUID PRIMARY KEY,
name VARCHAR(200) NOT NULL,
contact_person VARCHAR(100),
phone VARCHAR(30),
address VARCHAR(500),
notes TEXT,
created_at TIMESTAMP DEFAULT now(),
updated_at TIMESTAMP DEFAULT now()
);
"""))
conn.execute(sa.text(
"CREATE INDEX ix_clients_name ON clients (name);"
))
# ---- customer_logs ----
conn.execute(sa.text("""
CREATE TABLE customer_logs (
id UUID PRIMARY KEY,
customer_id UUID NOT NULL REFERENCES clients(id) ON DELETE CASCADE,
content TEXT NOT NULL,
created_at TIMESTAMP DEFAULT now()
);
"""))
conn.execute(sa.text(
"CREATE INDEX ix_customer_logs_cid ON customer_logs (customer_id);"
))
# ---- customer_tags ----
conn.execute(sa.text("""
CREATE TABLE customer_tags (
id UUID PRIMARY KEY,
customer_id UUID NOT NULL REFERENCES clients(id) ON DELETE CASCADE,
tag_name VARCHAR(100) NOT NULL,
created_at TIMESTAMP DEFAULT now()
);
"""))
conn.execute(sa.text(
"CREATE INDEX ix_customer_tags_cid ON customer_tags (customer_id);"
))
# ---- follow_up_todos ----
conn.execute(sa.text("""
CREATE TABLE follow_up_todos (
id UUID PRIMARY KEY,
customer_id UUID NOT NULL REFERENCES clients(id) ON DELETE CASCADE,
task_desc TEXT NOT NULL,
status VARCHAR(20) NOT NULL DEFAULT 'pending',
created_at TIMESTAMP DEFAULT now()
);
"""))
conn.execute(sa.text(
"CREATE INDEX ix_follow_up_todos_cid ON follow_up_todos (customer_id);"
))
# ---- sales_opportunities ----
conn.execute(sa.text("""
CREATE TABLE sales_opportunities (
id UUID PRIMARY KEY,
customer_id UUID NOT NULL REFERENCES clients(id) ON DELETE CASCADE,
amount NUMERIC(12, 2) NOT NULL DEFAULT 0,
stage VARCHAR(20) NOT NULL DEFAULT 'intent',
created_at TIMESTAMP DEFAULT now()
);
"""))
conn.execute(sa.text(
"CREATE INDEX ix_sales_opp_cid ON sales_opportunities (customer_id);"
))
def downgrade() -> None:
conn = op.get_bind()
# Drop new tables
for t in ["sales_opportunities", "follow_up_todos", "customer_tags",
"customer_logs", "clients", "users"]:
conn.execute(sa.text(f'DROP TABLE IF EXISTS "{t}" CASCADE'))
# Restore legacy tables
for table in LEGACY_TABLES:
exists = conn.execute(sa.text(
f"SELECT EXISTS (SELECT 1 FROM information_schema.tables "
f"WHERE table_schema='public' AND table_name='{table}_legacy')"
)).scalar()
if exists:
conn.execute(sa.text(
f'ALTER TABLE "{table}_legacy" RENAME TO "{table}"'
))
+1
View File
@@ -0,0 +1 @@
# SHBL-CRM Backend Application Package
View File
+40
View File
@@ -0,0 +1,40 @@
# -*- coding: utf-8 -*-
"""
API 公共依赖
提供 JWT 令牌验证依赖,用于需要认证的路由。
"""
from fastapi import Depends, HTTPException, status
from fastapi.security import HTTPAuthorizationCredentials, HTTPBearer
from app.core.security import decode_access_token
# Bearer Token 提取器
bearer_scheme = HTTPBearer(auto_error=True)
async def get_current_user(
credentials: HTTPAuthorizationCredentials = Depends(bearer_scheme),
) -> dict:
"""
从 Authorization: Bearer <token> 中解码 JWT,返回 payload。
用法: current_user: dict = Depends(get_current_user)
"""
payload = decode_access_token(credentials.credentials)
if payload is None:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="令牌无效或已过期",
headers={"WWW-Authenticate": "Bearer"},
)
return payload
async def require_admin(current_user: dict = Depends(get_current_user)) -> dict:
"""仅允许 admin 角色访问,否则 403"""
if current_user.get("role") != "admin":
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail="权限不足,需要管理员角色",
)
return current_user
View File
+37
View File
@@ -0,0 +1,37 @@
# -*- coding: utf-8 -*-
"""
认证端点
处理用户登录,签发 JWT 令牌。
"""
from fastapi import APIRouter, Depends, HTTPException, status
from sqlalchemy.ext.asyncio import AsyncSession
from app.core.database import get_db
from app.core.security import create_access_token
from app.crud.user import authenticate_user
from app.schemas.user import Token, UserLogin
router = APIRouter()
@router.post("/login", response_model=Token, summary="用户登录", tags=["认证"])
async def login(body: UserLogin, db: AsyncSession = Depends(get_db)):
"""
校验用户名密码,成功后签发 JWT access_token。
前端后续请求需在 Authorization 头携带 Bearer <token>。
"""
user = await authenticate_user(db, body.username, body.password)
if not user:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="用户名或密码错误",
headers={"WWW-Authenticate": "Bearer"},
)
token = create_access_token(subject=user.username, role=user.role)
return Token(
access_token=token,
role=user.role,
username=user.username,
)
+31
View File
@@ -0,0 +1,31 @@
# -*- coding: utf-8 -*-
"""
健康检查端点
用于 Nginx/LB 探活和数据库连接状态探测。
"""
from fastapi import APIRouter, Depends
from sqlalchemy import text
from sqlalchemy.ext.asyncio import AsyncSession
from app.core.database import get_db
router = APIRouter()
@router.get("/health", summary="健康检查", tags=["系统"])
async def health_check(db: AsyncSession = Depends(get_db)):
"""
探测服务与数据库连接是否存活。
- 数据库可达 → {"status": "healthy", "database": "connected"}
- 数据库不可达 → {"status": "degraded", "database": "disconnected", "detail": "..."}
"""
try:
await db.execute(text("SELECT 1"))
return {"status": "healthy", "database": "connected"}
except Exception as e:
return {
"status": "degraded",
"database": "disconnected",
"detail": str(e),
}
+81
View File
@@ -0,0 +1,81 @@
# -*- coding: utf-8 -*-
"""
客户沟通日志 API
POST /api/v1/logs - 提交日志并触发后台 AI 标签提取
"""
import uuid
from fastapi import APIRouter, BackgroundTasks, Depends, HTTPException, status
from pydantic import BaseModel, Field
from sqlalchemy.ext.asyncio import AsyncSession
from app.api.deps import get_current_user
from app.core.database import get_db
from app.models.crm_business import CustomerLog
from app.services.ai_workflow import process_log_with_ai
router = APIRouter()
# ---- 请求/响应模型 ----
class LogCreate(BaseModel):
"""提交沟通日志请求"""
customer_id: uuid.UUID = Field(..., description="关联客户 ID")
content: str = Field(..., min_length=5, max_length=5000, description="沟通日志内容")
class LogResponse(BaseModel):
"""提交成功响应"""
id: uuid.UUID
message: str = "日志已提交,AI 正在后台分析标签和待办"
# ---- 路由 ----
@router.post(
"",
response_model=LogResponse,
status_code=status.HTTP_200_OK,
summary="提交客户沟通日志",
tags=["客户日志"],
)
async def create_customer_log(
body: LogCreate,
background_tasks: BackgroundTasks,
db: AsyncSession = Depends(get_db),
current_user: dict = Depends(get_current_user),
):
"""
接收前端提交的客户沟通日志:
1. 立即存入 customer_logs 表
2. 将 AI 标签提取任务加入 BackgroundTasks 后台队列
3. 立即返回 200 OK(不等待 AI 处理完成)
AI 后台任务会:
- 调用 qwen3:14b 分析日志内容
- 自动提取最多 3 个客户标签 → customer_tags
- 自动生成 1 个跟进待办 → follow_up_todos
"""
# Step 1: 立即写入日志记录
log = CustomerLog(
customer_id=body.customer_id,
content=body.content,
)
db.add(log)
await db.flush()
await db.refresh(log)
# Step 2: 将 AI 处理加入后台队列
# *** 关键:传入 log.id / body.content / body.customer_id 三个值 ***
# process_log_with_ai 会创建独立的 DB Session,不与当前请求的 db 共享
background_tasks.add_task(
process_log_with_ai,
log_id=log.id,
content=body.content,
customer_id=body.customer_id,
)
# Step 3: 立即返回(不等待 AI)
return LogResponse(id=log.id)
+55
View File
@@ -0,0 +1,55 @@
# -*- coding: utf-8 -*-
"""
销售复盘报告 API
GET /api/v1/reports/monthly - 获取当月销售复盘报告 (AI 生成)
"""
from fastapi import APIRouter, Depends
from pydantic import BaseModel
from sqlalchemy.ext.asyncio import AsyncSession
from app.api.deps import get_current_user
from app.core.database import get_db
from app.services.analytics import generate_monthly_report
router = APIRouter()
# ---- 响应模型 ----
class StageMetric(BaseModel):
"""单个阶段的统计指标"""
stage: str
count: int
total_amount: float
class MonthlyReportResponse(BaseModel):
"""月度复盘报告响应"""
metrics: list[StageMetric]
report: str
# ---- 路由 ----
@router.get(
"/monthly",
response_model=MonthlyReportResponse,
summary="获取当月销售复盘报告",
tags=["数据报告"],
)
async def get_monthly_report(
db: AsyncSession = Depends(get_db),
current_user: dict = Depends(get_current_user),
):
"""
生成当月销售复盘报告:
1. SQL 预聚合统计各阶段的机会数量和金额
2. 将真实数据注入 Prompt,调用 qwen3:14b 生成分析报告
3. 同步返回结构化数据 + AI 报告文本
注意:此接口为同步等待模式(用户主动触发),
AI 生成可能需要 10-30 秒,前端应显示加载状态。
"""
result = await generate_monthly_report(db)
return MonthlyReportResponse(**result)
+22
View File
@@ -0,0 +1,22 @@
# -*- coding: utf-8 -*-
"""
API v1 路由汇总
所有 v1 版本的子路由在此注册,由 main.py 统一挂载到 /api/v1 前缀。
"""
from fastapi import APIRouter
from app.api.v1.endpoints import auth, health, logs, reports
api_v1_router = APIRouter()
# 挂载各业务模块路由
api_v1_router.include_router(health.router, prefix="", tags=["系统"])
api_v1_router.include_router(auth.router, prefix="/auth", tags=["认证"])
api_v1_router.include_router(logs.router, prefix="/logs", tags=["客户日志"])
api_v1_router.include_router(reports.router, prefix="/reports", tags=["数据报告"])
# 后续新增模块在此追加,例如:
# from app.api.v1.endpoints import clients, expenses
# api_v1_router.include_router(clients.router, prefix="/clients", tags=["客户管理"])
# api_v1_router.include_router(expenses.router, prefix="/expenses", tags=["报销管理"])
View File
+65
View File
@@ -0,0 +1,65 @@
# -*- coding: utf-8 -*-
"""
核心配置模块
使用 Pydantic v2 Settings 管理所有环境变量,支持 .env 文件自动加载。
"""
from pydantic_settings import BaseSettings, SettingsConfigDict
class Settings(BaseSettings):
"""应用全局配置,所有敏感信息通过环境变量注入,禁止硬编码。"""
model_config = SettingsConfigDict(
env_file=".env",
env_file_encoding="utf-8",
case_sensitive=False,
)
# ---- 应用基础 ----
APP_NAME: str = "SHBL-CRM"
APP_VERSION: str = "2.0.0"
DEBUG: bool = False
# ---- 数据库 (PostgreSQL + asyncpg) ----
DB_HOST: str = "127.0.0.1"
DB_PORT: int = 5432
DB_USER: str = "crm_admin"
DB_PASSWORD: str = "change_me_in_production"
DB_NAME: str = "shbl_crm"
@property
def DATABASE_URL(self) -> str:
"""构造异步 PostgreSQL 连接字符串 (asyncpg 驱动)"""
return (
f"postgresql+asyncpg://{self.DB_USER}:{self.DB_PASSWORD}"
f"@{self.DB_HOST}:{self.DB_PORT}/{self.DB_NAME}"
)
@property
def DATABASE_URL_SYNC(self) -> str:
"""同步连接字符串,仅供 Alembic 迁移使用"""
return (
f"postgresql+psycopg2://{self.DB_USER}:{self.DB_PASSWORD}"
f"@{self.DB_HOST}:{self.DB_PORT}/{self.DB_NAME}"
)
# ---- JWT 安全 ----
SECRET_KEY: str = "REPLACE_WITH_RANDOM_64_CHAR_HEX"
JWT_ALGORITHM: str = "HS256"
ACCESS_TOKEN_EXPIRE_MINUTES: int = 60 * 24 # 24小时
# ---- CORS 白名单 (严格模式,禁止 "*") ----
CORS_ORIGINS: list[str] = [
"http://localhost:5173", # Vite 开发服务器
"http://localhost:8080", # Nginx 生产前端
]
# ---- AI 服务 (Dify BaaS 平台) ----
DIFY_BASE_URL: str = "http://192.168.1.88/v1"
DIFY_LOG_APP_API_KEY: str = "" # 日志分析 App (completion)
DIFY_REPORT_APP_API_KEY: str = "" # 月度报告 App (completion)
# 全局单例,其他模块通过 from app.core.config import settings 引用
settings = Settings()
+58
View File
@@ -0,0 +1,58 @@
# -*- coding: utf-8 -*-
"""
异步数据库引擎与会话管理
使用 SQLAlchemy 2.0 异步模式 + asyncpg 驱动,配置连接池参数。
提供 get_db() 依赖注入函数供 FastAPI 路由使用。
"""
from collections.abc import AsyncGenerator
from sqlalchemy.ext.asyncio import (
AsyncSession,
async_sessionmaker,
create_async_engine,
)
from sqlalchemy.orm import DeclarativeBase
from app.core.config import settings
# ---- 异步引擎 (带连接池配置) ----
engine = create_async_engine(
settings.DATABASE_URL,
echo=settings.DEBUG, # DEBUG 模式下打印 SQL
pool_size=20, # 连接池常驻连接数
max_overflow=10, # 超出 pool_size 后允许的临时连接数
pool_pre_ping=True, # 每次取连接前探测存活,防止用到已断开的连接
pool_recycle=3600, # 连接最大存活时间(秒),防止数据库端主动断连
)
# ---- 异步会话工厂 ----
AsyncSessionLocal = async_sessionmaker(
bind=engine,
class_=AsyncSession,
expire_on_commit=False, # 提交后不过期对象属性,避免 lazy load 异常
)
# ---- ORM 基类 ----
class Base(DeclarativeBase):
"""所有 ORM 模型必须继承此基类,Alembic 通过 Base.metadata 自动检测表结构变更。"""
pass
# ---- 依赖注入:获取数据库会话 ----
async def get_db() -> AsyncGenerator[AsyncSession, None]:
"""
FastAPI Depends() 专用生成器。
每个请求获取独立会话,请求结束后自动关闭,异常时自动回滚。
用法: db: AsyncSession = Depends(get_db)
"""
async with AsyncSessionLocal() as session:
try:
yield session
await session.commit()
except Exception:
await session.rollback()
raise
finally:
await session.close()
+142
View File
@@ -0,0 +1,142 @@
# -*- coding: utf-8 -*-
"""
Dify BaaS API 客户端
取代原有的 OllamaClient,所有 AI 调用统一走 Dify 平台 API。
Dify 部署地址: http://192.168.1.88
文档参考: https://docs.dify.ai/guides/application-publishing/developing-with-apis
"""
import logging
import httpx
from app.core.config import settings
logger = logging.getLogger("dify_client")
class DifyClient:
"""
Dify 平台 API 客户端(异步)。
每个 Dify 应用有独立的 API Key:
- 日志分析 App → DIFY_LOG_APP_API_KEY
- 月度报告 App → DIFY_REPORT_APP_API_KEY
调用时传入对应 key 即可。
"""
def __init__(self, base_url: str = "http://192.168.1.88/v1"):
self.base_url = base_url.rstrip("/")
async def call_text_generator(
self,
api_key: str,
inputs: dict,
query: str = "",
) -> str:
"""
调用 Dify 文本生成(completion)类应用。
:param api_key: Dify App API Key (app-xxx 格式)
:param inputs: 传入变量字典,键名需与 Dify 后台配置的变量名一致
:param query: 可选的用户查询文本
:return: Dify 返回的 answer 文本,失败时返回空字符串
"""
url = f"{self.base_url}/completion-messages"
headers = {
"Authorization": f"Bearer {api_key}",
"Content-Type": "application/json",
}
payload = {
"inputs": inputs,
"query": query,
"response_mode": "blocking",
"user": "crm-backend",
}
try:
async with httpx.AsyncClient(timeout=60.0) as client:
response = await client.post(url, headers=headers, json=payload)
if response.status_code != 200:
logger.error(
"Dify API 非 200 响应: status=%d body=%s",
response.status_code,
response.text[:500],
)
return ""
data = response.json()
answer = data.get("answer", "")
logger.info(
"Dify 调用成功: %d chars (key=...%s)",
len(answer),
api_key[-6:],
)
return answer
except httpx.TimeoutException:
logger.error("Dify API 超时 (60s): url=%s key=...%s", url, api_key[-6:])
return ""
except Exception as e:
logger.error("Dify API 异常: %s (key=...%s)", e, api_key[-6:], exc_info=True)
return ""
async def call_workflow(
self,
api_key: str,
inputs: dict,
user: str = "crm-backend",
) -> dict | str:
"""
调用 Dify 工作流(workflow)类应用。
:param api_key: Dify App API Key (app-xxx 格式)
:param inputs: 传入变量字典,键名需与 Dify 后台配置的变量名一致
:param user: 用户标识
:return: Dify 工作流返回的 outputs 字典,失败时返回空字符串
"""
url = f"{self.base_url}/workflows/run"
headers = {
"Authorization": f"Bearer {api_key}",
"Content-Type": "application/json",
}
payload = {
"inputs": inputs,
"response_mode": "blocking",
"user": user,
}
try:
async with httpx.AsyncClient(timeout=60.0) as client:
response = await client.post(url, headers=headers, json=payload)
if response.status_code != 200:
logger.error(
"Dify Workflow 非 200 响应: status=%d body=%s",
response.status_code,
response.text[:500],
)
return ""
data = response.json()
outputs = data.get("data", {}).get("outputs", {})
logger.info(
"Dify Workflow 调用成功: outputs_keys=%s (key=...%s)",
list(outputs.keys()) if isinstance(outputs, dict) else "N/A",
api_key[-6:],
)
return outputs
except httpx.TimeoutException:
logger.error("Dify Workflow 超时 (60s): url=%s key=...%s", url, api_key[-6:])
return ""
except Exception as e:
logger.error("Dify Workflow 异常: %s (key=...%s)", e, api_key[-6:], exc_info=True)
return ""
# 全局单例,使用 settings 中配置的 Dify 地址
dify_client = DifyClient(base_url=settings.DIFY_BASE_URL)
+64
View File
@@ -0,0 +1,64 @@
# -*- coding: utf-8 -*-
"""
安全模块:JWT 令牌签发/验证 + 密码哈希
使用 python-jose 进行 JWT 操作,bcrypt 直接进行密码哈希。
注意:passlib 已不再维护,与 bcrypt>=5.0 不兼容,故直接使用 bcrypt 库。
"""
from datetime import datetime, timedelta, timezone
import bcrypt
from jose import JWTError, jwt
from app.core.config import settings
def hash_password(plain_password: str) -> str:
"""将明文密码哈希为 bcrypt 格式存储"""
return bcrypt.hashpw(
plain_password.encode("utf-8"), bcrypt.gensalt()
).decode("utf-8")
def verify_password(plain_password: str, hashed_password: str) -> bool:
"""校验明文密码与数据库中的哈希值是否匹配"""
return bcrypt.checkpw(
plain_password.encode("utf-8"),
hashed_password.encode("utf-8"),
)
def create_access_token(
subject: str,
role: str,
expires_delta: timedelta | None = None,
) -> str:
"""
签发 JWT 访问令牌。
:param subject: 用户标识 (通常是 username 或 user_id)
:param role: 用户角色 (admin / user),嵌入 claims 供前端和后端鉴权
:param expires_delta: 自定义过期时间,默认使用配置值
"""
expire = datetime.now(timezone.utc) + (
expires_delta or timedelta(minutes=settings.ACCESS_TOKEN_EXPIRE_MINUTES)
)
payload = {
"sub": subject,
"role": role,
"exp": expire,
}
return jwt.encode(payload, settings.SECRET_KEY, algorithm=settings.JWT_ALGORITHM)
def decode_access_token(token: str) -> dict | None:
"""
解码并验证 JWT 令牌。
:return: payload 字典,失败返回 None
"""
try:
payload = jwt.decode(
token, settings.SECRET_KEY, algorithms=[settings.JWT_ALGORITHM]
)
return payload
except JWTError:
return None
View File
+63
View File
@@ -0,0 +1,63 @@
# -*- coding: utf-8 -*-
"""
用户 CRUD 数据访问层
封装所有用户相关的数据库操作,业务逻辑层只调用此模块,不直接写 SQL。
"""
from sqlalchemy import select
from sqlalchemy.ext.asyncio import AsyncSession
from app.core.security import hash_password, verify_password
from app.models.user import User
from app.schemas.user import UserCreate, UserUpdate
async def get_user_by_username(db: AsyncSession, username: str) -> User | None:
"""根据用户名查询用户"""
stmt = select(User).where(User.username == username)
result = await db.execute(stmt)
return result.scalar_one_or_none()
async def authenticate_user(
db: AsyncSession, username: str, password: str
) -> User | None:
"""验证用户名密码,返回用户对象或 None"""
user = await get_user_by_username(db, username)
if not user or not user.is_active:
return None
if not verify_password(password, user.password_hash):
return None
return user
async def create_user(db: AsyncSession, data: UserCreate) -> User:
"""创建新用户"""
user = User(
username=data.username,
password_hash=hash_password(data.password),
role=data.role,
permissions=data.permissions,
)
db.add(user)
await db.flush() # flush 获取自增 ID,但不提交 (由 get_db 统一提交)
await db.refresh(user)
return user
async def update_user(db: AsyncSession, user: User, data: UserUpdate) -> User:
"""部分更新用户信息"""
update_data = data.model_dump(exclude_unset=True)
if "password" in update_data:
update_data["password_hash"] = hash_password(update_data.pop("password"))
for field, value in update_data.items():
setattr(user, field, value)
await db.flush()
await db.refresh(user)
return user
async def delete_user(db: AsyncSession, user: User) -> None:
"""删除用户"""
await db.delete(user)
await db.flush()
+72
View File
@@ -0,0 +1,72 @@
# -*- coding: utf-8 -*-
"""
FastAPI 应用入口
组装中间件、CORS、路由,启动 ASGI 应用。
"""
import logging
from contextlib import asynccontextmanager
from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
from app.api.v1.router import api_v1_router
from app.core.config import settings
from app.middleware.audit import AuditMiddleware
# ---- 日志配置 ----
logging.basicConfig(
level=logging.INFO,
format="%(asctime)s | %(name)-12s | %(levelname)-5s | %(message)s",
datefmt="%Y-%m-%d %H:%M:%S",
)
logger = logging.getLogger(__name__)
# ---- 生命周期管理 (替代已废弃的 on_event) ----
@asynccontextmanager
async def lifespan(app: FastAPI):
"""应用启动/关闭时执行的钩子"""
logger.info("SHBL-CRM 后端服务启动 | 版本: %s", settings.APP_VERSION)
logger.info("数据库连接: %s@%s:%s/%s",
settings.DB_USER, settings.DB_HOST, settings.DB_PORT, settings.DB_NAME)
yield
logger.info("SHBL-CRM 后端服务关闭")
# ---- 创建 FastAPI 实例 ----
app = FastAPI(
title=settings.APP_NAME,
version=settings.APP_VERSION,
description="天津硕博霖客户信息管理系统 - 后端 API",
docs_url="/api/docs", # Swagger UI 路径
redoc_url="/api/redoc", # ReDoc 路径
openapi_url="/api/openapi.json",
lifespan=lifespan,
)
# ---- 1. 审计中间件 (最先添加,确保拦截所有请求) ----
app.add_middleware(AuditMiddleware)
# ---- 2. CORS 跨域 (严格白名单模式,禁止 allow_origins=["*"]) ----
app.add_middleware(
CORSMiddleware,
allow_origins=settings.CORS_ORIGINS, # 仅允许配置中指定的来源
allow_credentials=True,
allow_methods=["GET", "POST", "PUT", "DELETE", "PATCH"],
allow_headers=["Authorization", "Content-Type"],
)
# ---- 3. 挂载 API 路由 ----
app.include_router(api_v1_router, prefix="/api/v1")
# ---- 根路径 (可选,方便快速验证服务是否存活) ----
@app.get("/", tags=["系统"])
async def root():
return {
"service": settings.APP_NAME,
"version": settings.APP_VERSION,
"docs": "/api/docs",
}
View File
+59
View File
@@ -0,0 +1,59 @@
# -*- coding: utf-8 -*-
"""
全局审计中间件
拦截所有入站 HTTP 请求,记录:方法、URL、客户端 IP、耗时、响应状态码。
日志输出到标准 logging,生产环境可对接 ELK / Loki 等日志收集系统。
"""
import logging
import time
from starlette.middleware.base import BaseHTTPMiddleware, RequestResponseEndpoint
from starlette.requests import Request
from starlette.responses import Response
# 配置审计专用 logger,与业务日志分离
audit_logger = logging.getLogger("audit")
audit_logger.setLevel(logging.INFO)
class AuditMiddleware(BaseHTTPMiddleware):
"""
审计中间件 - 记录每个请求的关键信息。
日志格式: [AUDIT] <客户端IP> <方法> <URL> <状态码> <耗时ms>
"""
async def dispatch(
self, request: Request, call_next: RequestResponseEndpoint
) -> Response:
# 提取客户端真实 IP (优先取反向代理传递的 X-Forwarded-For)
client_ip = request.headers.get(
"X-Forwarded-For", request.client.host if request.client else "unknown"
)
method = request.method
url = str(request.url)
start_time = time.perf_counter()
try:
response = await call_next(request)
except Exception:
# 未捕获异常也要记录审计日志
elapsed_ms = (time.perf_counter() - start_time) * 1000
audit_logger.error(
"[AUDIT] %s %s %s 500 %.1fms (unhandled exception)",
client_ip, method, url, elapsed_ms,
)
raise
elapsed_ms = (time.perf_counter() - start_time) * 1000
audit_logger.info(
"[AUDIT] %s %s %s %d %.1fms",
client_ip, method, url, response.status_code, elapsed_ms,
)
# 将审计信息注入响应头 (方便调试,生产环境可移除)
response.headers["X-Request-Duration-Ms"] = f"{elapsed_ms:.1f}"
return response
+9
View File
@@ -0,0 +1,9 @@
# 在此处导入所有 ORM 模型,供 Alembic 自动检测
from app.models.user import User # noqa: F401
from app.models.client import Client # noqa: F401
from app.models.crm_business import ( # noqa: F401
CustomerLog,
CustomerTag,
FollowUpToDo,
SalesOpportunity,
)
+54
View File
@@ -0,0 +1,54 @@
# -*- coding: utf-8 -*-
"""
客户主表模型
CRM 的核心实体,所有业务表 (日志/标签/待办/销售机会) 均通过外键关联到此表。
"""
import uuid
from datetime import datetime
from sqlalchemy import String, Text, func
from sqlalchemy.dialects.postgresql import UUID
from sqlalchemy.orm import Mapped, mapped_column
from app.core.database import Base
class Client(Base):
"""
客户信息表 (clients)
记录客户基本信息,作为 CRM 业务数据的核心关联实体。
"""
__tablename__ = "clients"
id: Mapped[uuid.UUID] = mapped_column(
UUID(as_uuid=True), primary_key=True, default=uuid.uuid4,
)
name: Mapped[str] = mapped_column(
String(200), nullable=False, index=True,
comment="客户名称 (公司名/个人名)",
)
contact_person: Mapped[str | None] = mapped_column(
String(100), nullable=True,
comment="联系人姓名",
)
phone: Mapped[str | None] = mapped_column(
String(30), nullable=True,
comment="联系电话",
)
address: Mapped[str | None] = mapped_column(
String(500), nullable=True,
comment="地址",
)
notes: Mapped[str | None] = mapped_column(
Text, nullable=True,
comment="备注",
)
created_at: Mapped[datetime] = mapped_column(
server_default=func.now(),
)
updated_at: Mapped[datetime] = mapped_column(
server_default=func.now(),
onupdate=func.now(),
)
+132
View File
@@ -0,0 +1,132 @@
# -*- coding: utf-8 -*-
"""
CRM 业务数据模型
定义客户沟通日志、标签、跟进待办、销售机会四张业务表。
所有主键均为 UUID,与 User/KnowledgeChunk 保持一致的 ID 策略。
"""
import uuid
from datetime import datetime
from sqlalchemy import ForeignKey, String, Text, Numeric, func
from sqlalchemy.dialects.postgresql import UUID
from sqlalchemy.orm import Mapped, mapped_column
from app.core.database import Base
class CustomerLog(Base):
"""
客户沟通日志表
记录每次与客户的沟通内容(电话/拜访/微信等),
新增日志时会触发后台 AI 任务自动提取标签和待办。
"""
__tablename__ = "customer_logs"
id: Mapped[uuid.UUID] = mapped_column(
UUID(as_uuid=True), primary_key=True, default=uuid.uuid4,
)
customer_id: Mapped[uuid.UUID] = mapped_column(
UUID(as_uuid=True),
ForeignKey("clients.id", ondelete="CASCADE"),
nullable=False,
index=True,
comment="关联客户 ID",
)
content: Mapped[str] = mapped_column(
Text, nullable=False, comment="沟通日志内容",
)
created_at: Mapped[datetime] = mapped_column(
server_default=func.now(), comment="记录时间",
)
class CustomerTag(Base):
"""
客户标签表
由 AI 从沟通日志中自动提取,也支持手动添加。
同一客户下的标签名唯一(通过业务逻辑控制去重)。
"""
__tablename__ = "customer_tags"
id: Mapped[uuid.UUID] = mapped_column(
UUID(as_uuid=True), primary_key=True, default=uuid.uuid4,
)
customer_id: Mapped[uuid.UUID] = mapped_column(
UUID(as_uuid=True),
ForeignKey("clients.id", ondelete="CASCADE"),
nullable=False,
index=True,
comment="关联客户 ID",
)
tag_name: Mapped[str] = mapped_column(
String(100), nullable=False, comment="标签名称,如'价格敏感''决策周期长'",
)
created_at: Mapped[datetime] = mapped_column(
server_default=func.now(),
)
class FollowUpToDo(Base):
"""
跟进待办表
由 AI 根据沟通日志自动生成下一步行动建议,
也可由用户手动创建。status 为简单的二态: pending / done。
"""
__tablename__ = "follow_up_todos"
id: Mapped[uuid.UUID] = mapped_column(
UUID(as_uuid=True), primary_key=True, default=uuid.uuid4,
)
customer_id: Mapped[uuid.UUID] = mapped_column(
UUID(as_uuid=True),
ForeignKey("clients.id", ondelete="CASCADE"),
nullable=False,
index=True,
comment="关联客户 ID",
)
task_desc: Mapped[str] = mapped_column(
Text, nullable=False, comment="待办任务描述",
)
status: Mapped[str] = mapped_column(
String(20), nullable=False, default="pending",
comment="状态: pending(待处理) / done(已完成)",
)
created_at: Mapped[datetime] = mapped_column(
server_default=func.now(),
)
class SalesOpportunity(Base):
"""
销售机会表
跟踪每个客户的销售漏斗阶段和金额,用于经营看板和复盘报告。
stage 四阶段: 意向 → 谈判 → 成交 → 流失
"""
__tablename__ = "sales_opportunities"
id: Mapped[uuid.UUID] = mapped_column(
UUID(as_uuid=True), primary_key=True, default=uuid.uuid4,
)
customer_id: Mapped[uuid.UUID] = mapped_column(
UUID(as_uuid=True),
ForeignKey("clients.id", ondelete="CASCADE"),
nullable=False,
index=True,
comment="关联客户 ID",
)
amount: Mapped[float] = mapped_column(
Numeric(12, 2), nullable=False, default=0,
comment="预估/实际金额 (元)",
)
stage: Mapped[str] = mapped_column(
String(20), nullable=False, default="意向",
comment="漏斗阶段: 意向 / 谈判 / 成交 / 流失",
)
created_at: Mapped[datetime] = mapped_column(
server_default=func.now(),
)
+46
View File
@@ -0,0 +1,46 @@
# -*- coding: utf-8 -*-
"""
用户 ORM 模型
对应数据库 users 表,使用 SQLAlchemy 2.0 Mapped 注解风格。
"""
from datetime import datetime, timezone
from sqlalchemy import String, func
from sqlalchemy.orm import Mapped, mapped_column
from app.core.database import Base
class User(Base):
"""用户表 - 存储账号、密码哈希、角色权限"""
__tablename__ = "users"
id: Mapped[int] = mapped_column(primary_key=True, autoincrement=True)
username: Mapped[str] = mapped_column(
String(50), unique=True, nullable=False, index=True, comment="登录用户名"
)
password_hash: Mapped[str] = mapped_column(
String(255), nullable=False, comment="bcrypt 哈希密码"
)
role: Mapped[str] = mapped_column(
String(20), nullable=False, default="user", comment="角色: admin / user"
)
permissions: Mapped[str] = mapped_column(
String(200), nullable=False, default="view,edit", comment="逗号分隔权限列表"
)
is_active: Mapped[bool] = mapped_column(
default=True, comment="账户是否启用"
)
created_at: Mapped[datetime] = mapped_column(
server_default=func.now(), comment="创建时间"
)
updated_at: Mapped[datetime] = mapped_column(
server_default=func.now(),
onupdate=lambda: datetime.now(timezone.utc),
comment="最后更新时间",
)
def __repr__(self) -> str:
return f"<User(id={self.id}, username='{self.username}', role='{self.role}')>"
View File
+55
View File
@@ -0,0 +1,55 @@
# -*- coding: utf-8 -*-
"""
用户相关 Pydantic v2 校验模型 (DTO)
用于请求体验证和响应序列化,与 ORM 模型解耦。
"""
from datetime import datetime
from pydantic import BaseModel, Field, ConfigDict
# ---- 请求模型 ----
class UserLogin(BaseModel):
"""登录请求"""
username: str = Field(..., min_length=2, max_length=50, examples=["admin"])
password: str = Field(..., min_length=6, max_length=128)
class UserCreate(BaseModel):
"""创建用户请求 (管理员操作)"""
username: str = Field(..., min_length=2, max_length=50)
password: str = Field(..., min_length=6, max_length=128)
role: str = Field(default="user", pattern=r"^(admin|user)$")
permissions: str = Field(default="view,edit")
class UserUpdate(BaseModel):
"""更新用户请求 (部分更新)"""
password: str | None = Field(default=None, min_length=6, max_length=128)
role: str | None = Field(default=None, pattern=r"^(admin|user)$")
permissions: str | None = None
is_active: bool | None = None
# ---- 响应模型 ----
class UserOut(BaseModel):
"""用户信息响应 (脱敏,不含密码哈希)"""
model_config = ConfigDict(from_attributes=True) # 支持从 ORM 对象自动转换
id: int
username: str
role: str
permissions: str
is_active: bool
created_at: datetime
class Token(BaseModel):
"""JWT 令牌响应"""
access_token: str
token_type: str = "bearer"
role: str
username: str
View File
+145
View File
@@ -0,0 +1,145 @@
# -*- coding: utf-8 -*-
"""
AI 工作流服务 (Dify BaaS 版本)
事件驱动的异步 AI 任务:从客户沟通日志中自动提取标签和生成跟进待办。
架构要点:
- 此函数由 FastAPI BackgroundTasks 调用,运行在独立的后台线程中
- 必须使用独立的 DB Session 生命周期,严禁与主请求共享 Session
- AI 调用通过 Dify 平台 API(非直连大模型)
- AI 解析失败时静默降级(记录日志),绝不抛出异常导致主线程崩溃
"""
import json
import logging
import uuid
from app.core.dify_client import dify_client
from app.core.database import AsyncSessionLocal
from app.models.crm_business import CustomerTag, FollowUpToDo
from app.core.config import settings
logger = logging.getLogger("ai_workflow")
async def process_log_with_ai(
log_id: uuid.UUID,
content: str,
customer_id: uuid.UUID,
) -> None:
"""
后台 AI 任务:分析沟通日志,提取标签和待办。
*** 关键约束 ***
此函数在 BackgroundTasks 中执行,必须:
1. 使用独立的 AsyncSession(不与主请求共享)
2. 内部捕获所有异常,不向外抛出
3. AI 返回格式异常时静默降级
"""
logger.info("开始 AI 处理: log_id=%s, customer_id=%s", log_id, customer_id)
if not settings.DIFY_LOG_APP_API_KEY:
logger.error("DIFY_LOG_APP_API_KEY 未配置,跳过 AI 处理")
return
# ---- 独立的 DB Session 生命周期 ----
async with AsyncSessionLocal() as db:
try:
# Step 1: 调用 Dify 日志分析 Workflow App
# *** inputs 的键名必须与 Dify 后台配置的变量名对齐 ***
# 在 Dify 后台的 Workflow 编排中,需定义输入变量 "log_content"
workflow_outputs = await dify_client.call_workflow(
api_key=settings.DIFY_LOG_APP_API_KEY,
inputs={"log_content": content},
)
if not workflow_outputs:
logger.warning("Dify 返回空响应,跳过入库 (log_id=%s)", log_id)
return
# Workflow 返回的是 dict,序列化为 JSON 字符串以兼容现有解析管线
if isinstance(workflow_outputs, dict):
raw_response = json.dumps(workflow_outputs, ensure_ascii=False)
else:
raw_response = str(workflow_outputs)
logger.debug("Dify Workflow 原始返回: %s", raw_response[:500])
# Step 2: 解析 JSON 响应 (容错)
json_str = _extract_json(raw_response)
result = json.loads(json_str)
tags: list[str] = result.get("tags", [])
next_task: str = result.get("next_task", "")
# Step 3: 写入标签 (最多 3 个)
if tags:
for tag_name in tags[:3]:
tag_name = tag_name.strip()
if not tag_name:
continue
tag = CustomerTag(
customer_id=customer_id,
tag_name=tag_name,
)
db.add(tag)
logger.info("写入 %d 个标签: %s", len(tags[:3]), tags[:3])
# Step 4: 写入跟进待办
if next_task and next_task.strip():
todo = FollowUpToDo(
customer_id=customer_id,
task_desc=next_task.strip(),
status="pending",
)
db.add(todo)
logger.info("写入待办: %s", next_task.strip()[:100])
await db.commit()
logger.info("AI 处理完成: log_id=%s", log_id)
except json.JSONDecodeError as e:
logger.error(
"Dify 返回 JSON 解析失败 (log_id=%s): %s | 原始响应: %s",
log_id, e, raw_response[:300],
)
await db.rollback()
except Exception as e:
logger.error(
"AI 后台任务异常 (log_id=%s): %s",
log_id, e, exc_info=True,
)
await db.rollback()
def _extract_json(text: str) -> str:
"""
从 Dify/LLM 响应中提取 JSON 字符串。
处理常见的"包裹"行为:
- 直接返回 JSON
- 用 ```json ... ``` 包裹
- 在 JSON 前后加解释文字
"""
text = text.strip()
# 尝试提取 ```json ... ``` 代码块
if "```json" in text:
start = text.index("```json") + len("```json")
end = text.index("```", start)
return text[start:end].strip()
if "```" in text:
start = text.index("```") + len("```")
end = text.index("```", start)
return text[start:end].strip()
# 尝试找到第一个 { 和最后一个 }
first_brace = text.find("{")
last_brace = text.rfind("}")
if first_brace != -1 and last_brace != -1 and last_brace > first_brace:
return text[first_brace:last_brace + 1]
# 原样返回,让 json.loads 报错触发容错
return text
+127
View File
@@ -0,0 +1,127 @@
# -*- coding: utf-8 -*-
"""
销售数据分析服务 (Dify BaaS 版本)
基于 SQL 预聚合的销售漏斗统计 + Dify AI 驱动的复盘报告生成。
"""
import logging
from datetime import datetime, timezone
from sqlalchemy import func, select, extract, case
from sqlalchemy.ext.asyncio import AsyncSession
from app.core.config import settings
from app.core.dify_client import dify_client
from app.models.crm_business import SalesOpportunity
logger = logging.getLogger("analytics")
# ============================================================
# 1. SQL 预聚合:销售漏斗指标
# ============================================================
async def get_sales_metrics(db: AsyncSession) -> list[dict]:
"""
按当月统计各销售阶段的机会数量和总金额。
使用 SQLAlchemy GROUP BY 聚合查询,在数据库层面完成计算。
"""
now = datetime.now(timezone.utc)
stmt = (
select(
SalesOpportunity.stage,
func.count(SalesOpportunity.id).label("count"),
func.coalesce(func.sum(SalesOpportunity.amount), 0).label("total_amount"),
)
.where(
extract("year", SalesOpportunity.created_at) == now.year,
extract("month", SalesOpportunity.created_at) == now.month,
)
.group_by(SalesOpportunity.stage)
.order_by(
case(
(SalesOpportunity.stage == "意向", 1),
(SalesOpportunity.stage == "谈判", 2),
(SalesOpportunity.stage == "成交", 3),
(SalesOpportunity.stage == "流失", 4),
else_=5,
)
)
)
result = await db.execute(stmt)
rows = result.all()
metrics = [
{
"stage": row.stage,
"count": row.count,
"total_amount": float(row.total_amount),
}
for row in rows
]
logger.info("当月销售指标: %s", metrics)
return metrics
# ============================================================
# 2. Dify AI 复盘报告生成
# ============================================================
async def generate_monthly_report(db: AsyncSession) -> dict:
"""
生成当月销售复盘报告:
1. SQL 预聚合获取真实数据指标
2. 将结构化数据通过 inputs 传给 Dify 报告 App
3. 直接返回 Dify 生成的报告文本
:return: {"metrics": [...], "report": "Dify 生成的报告文本"}
"""
# Step 1: 获取真实销售数据
metrics = await get_sales_metrics(db)
if not metrics:
return {
"metrics": [],
"report": "当月暂无销售机会数据,无法生成复盘报告。",
}
# Step 2: 将数据序列化为 Dify 可消费的文本格式
# *** inputs 的键名必须与 Dify 后台配置的变量名对齐 ***
# 在 Dify 后台的报告 App 编排中,需定义输入变量 "metrics_data"
metrics_text = "\n".join(
f"- {m['stage']}: {m['count']} 个机会, 总金额 ¥{m['total_amount']:,.2f}"
for m in metrics
)
total_count = sum(m["count"] for m in metrics)
total_amount = sum(m["total_amount"] for m in metrics)
metrics_text += f"\n- 合计: {total_count} 个机会, 总金额 ¥{total_amount:,.2f}"
# Step 3: 调用 Dify 报告 App
if not settings.DIFY_REPORT_APP_API_KEY:
logger.error("DIFY_REPORT_APP_API_KEY 未配置")
return {
"metrics": metrics,
"report": "AI 报告服务未配置,请联系管理员。",
}
# 动态生成 Dify 后台所需的必填参数 report_period
now = datetime.now(timezone.utc)
report_period = f"{now.year}{now.month:02d}"
report_text = await dify_client.call_text_generator(
api_key=settings.DIFY_REPORT_APP_API_KEY,
inputs={"metrics_data": metrics_text, "report_period": report_period},
)
if not report_text:
report_text = "AI 报告生成失败,请稍后重试或检查 Dify 服务状态。"
logger.info("月度复盘报告生成完成 (%d 字)", len(report_text))
return {
"metrics": metrics,
"report": report_text,
}
+49
View File
@@ -0,0 +1,49 @@
import os
import json
import httpx
from dotenv import load_dotenv
# 加载环境变量
load_dotenv()
DIFY_LOG_APP_API_KEY = os.getenv("DIFY_LOG_APP_API_KEY")
DIFY_REPORT_APP_API_KEY = os.getenv("DIFY_REPORT_APP_API_KEY")
TARGET_URL = "http://192.168.1.88/v1/completion-messages"
def test_dify_endpoint(api_key: str, app_name: str, payload: dict):
if not api_key:
print(f"[{app_name}] Error: API Key not found in .env file.")
return
headers = {
"Authorization": f"Bearer {api_key}",
"Content-Type": "application/json"
}
print(f"\n--- Testing App: {app_name} ---")
print(f"[{app_name}] Request Payload: {json.dumps(payload, ensure_ascii=False)}")
with httpx.Client(timeout=30.0) as client:
# 这里严禁 try-except 包含网络响应错误(只让它如果是网络断开直接崩就行),但能抓取到 400 等状态码
response = client.post(TARGET_URL, headers=headers, json=payload)
print(f"[{app_name}] HTTP Status Code: {response.status_code}")
print(f"[{app_name}] Raw Response Body: {response.text}")
if __name__ == "__main__":
# 测试用例 1: 日志分析应用
log_payload = {
"inputs": {"log_content": "今天拜访了张总,他对价格很敏感。"},
"response_mode": "blocking",
"user": "debug_script"
}
test_dify_endpoint(DIFY_LOG_APP_API_KEY, "Log Analysis App", log_payload)
# 测试用例 2: 月度报告应用
report_payload = {
"inputs": {"metrics_data": "测试指标数据", "report_period": "2026年02月"},
"response_mode": "blocking",
"user": "debug_script"
}
test_dify_endpoint(DIFY_REPORT_APP_API_KEY, "Monthly Report App", report_payload)
+28
View File
@@ -0,0 +1,28 @@
# SHBL-CRM Backend Dependencies
# Python 3.10+
# ---- Web 框架 ----
fastapi>=0.115.0
uvicorn[standard]>=0.34.0
# ---- 数据库 (异步 PostgreSQL) ----
sqlalchemy[asyncio]>=2.0.0
asyncpg>=0.30.0
psycopg2-binary>=2.9.0 # 仅 Alembic 同步迁移使用
alembic>=1.14.0
# ---- 数据校验 ----
pydantic>=2.0.0
pydantic-settings>=2.0.0
# ---- 安全 ----
python-jose[cryptography]>=3.3.0
bcrypt>=4.0.0
# ---- AI (Dify BaaS HTTP 客户端) ----
httpx>=0.27.0 # 异步 HTTP 客户端 (调用 Dify API)
# ---- 工具 ----
python-multipart>=0.0.9 # FastAPI 表单/文件上传支持
pandas>=2.0.0
openpyxl>=3.1.0
+35
View File
@@ -0,0 +1,35 @@
import psycopg2
import bcrypt
import json
import uuid
try:
conn = psycopg2.connect(
host="192.168.1.85",
port=5432,
user="admin",
password="admin_password_2026",
dbname="lubrication_crm"
)
cur = conn.cursor()
hash_pw = bcrypt.hashpw(b"admin123", bcrypt.gensalt()).decode("utf-8")
permissions_json = json.dumps(["view", "edit"])
new_uuid = str(uuid.uuid4())
cur.execute(
"""
INSERT INTO users (id, username, password_hash, role, permissions, is_active)
VALUES (%s, %s, %s, %s, %s, true)
ON CONFLICT (username) DO UPDATE SET password_hash=EXCLUDED.password_hash
""",
(new_uuid, "admin", hash_pw, "admin", permissions_json)
)
conn.commit()
print("Admin user inserted/updated via SQL script.")
except Exception as e:
print(f"Database error: {e}")
finally:
if 'cur' in locals():
cur.close()
if 'conn' in locals():
conn.close()
+64
View File
@@ -0,0 +1,64 @@
import psycopg2
import uuid
from datetime import datetime
try:
conn = psycopg2.connect(
host="192.168.1.85",
port=5432,
user="admin",
password="admin_password_2026",
dbname="lubrication_crm"
)
cur = conn.cursor()
# 1. 存在测试用的客户 ID (LogEntry.vue 默认写的)
test_client_uuid = "a37dbd8b-a9c0-4deb-ad76-83a0d29bbf28"
cur.execute(
"""
INSERT INTO clients (id, name, contact_person, phone)
VALUES (%s, '自动化联调测试客户', '张经理', '13800138000')
ON CONFLICT (id) DO NOTHING
""",
(test_client_uuid,)
)
# 2. 插入一些销售机会数据 (用于展示当月的 Dashboard AI 复盘效果)
current_year = datetime.now().year
current_month = datetime.now().month
created_str = f"{current_year}-{current_month:02d}-15 10:00:00"
opportunities = [
{"stage": "意向", "amount": 50000},
{"stage": "意向", "amount": 20000},
{"stage": "谈判", "amount": 150000},
{"stage": "成交", "amount": 300000},
{"stage": "流失", "amount": 10000},
]
for opp in opportunities:
cur.execute(
"""
INSERT INTO sales_opportunities (id, customer_id, stage, amount, created_at)
VALUES (%s, %s, %s, %s, %s)
""",
(
str(uuid.uuid4()),
test_client_uuid,
opp["stage"],
opp["amount"],
created_str,
)
)
conn.commit()
print("Seed data for logging and reports inserted successfully.")
except Exception as e:
print(f"Database error: {e}")
finally:
if 'cur' in locals():
cur.close()
if 'conn' in locals():
conn.close()
+245
View File
@@ -0,0 +1,245 @@
# -*- coding: utf-8 -*-
"""
SHBL-CRM Integration Tests
Module 3: Business Logic + AI Workflow
Test 1: POST /api/v1/logs - Event-driven AI background task
Test 2: GET /api/v1/reports/monthly - SQL pre-aggregation + AI report
Prerequisites:
- Backend running: uvicorn app.main:app (port 8000)
- PostgreSQL + Alembic migration applied
- Ollama node reachable (for AI tests)
Run: pytest tests/test_api_integration.py -v -s
"""
import asyncio
import logging
import uuid
import httpx
import pytest
import pytest_asyncio
# ---- Logging setup for test output ----
logging.basicConfig(
level=logging.INFO,
format="%(asctime)s | %(name)-12s | %(levelname)-5s | %(message)s",
)
logger = logging.getLogger("test_integration")
# ---- Config ----
BASE_URL = "http://127.0.0.1:8000/api/v1"
# Test fixtures: pre-seeded client & auth
# Must insert a test client into DB before running,
# or use the seed script. This UUID will be used for all tests.
TEST_CLIENT_ID: str | None = None # Populated by fixture
TEST_TOKEN: str | None = None # Populated by fixture
# ============================================================
# Fixtures
# ============================================================
@pytest_asyncio.fixture(scope="module")
async def seed_test_data():
"""
Seed a test client and admin user into the database,
then authenticate to get a JWT token.
"""
global TEST_CLIENT_ID, TEST_TOKEN
# Create admin user + test client via direct DB insert
import psycopg2
import bcrypt
admin_hash = bcrypt.hashpw(b"admin123", bcrypt.gensalt()).decode("utf-8")
test_client_uuid = str(uuid.uuid4())
conn = psycopg2.connect(
host="192.168.1.85",
port=5432,
user="admin",
password="admin_password_2026",
dbname="lubrication_crm",
)
cur = conn.cursor()
# Seed admin user (idempotent)
cur.execute("""
INSERT INTO users (id, username, password_hash, role, is_active)
VALUES (%s, 'test_admin', %s, 'admin', true)
ON CONFLICT (username) DO UPDATE SET password_hash = EXCLUDED.password_hash
""", (str(uuid.uuid4()), admin_hash))
# Seed test client
cur.execute("""
INSERT INTO clients (id, name, contact_person, phone)
VALUES (%s, 'Test_Integration_Client', 'Zhang San', '13800138000')
""", (test_client_uuid,))
conn.commit()
cur.close()
conn.close()
TEST_CLIENT_ID = test_client_uuid
# Authenticate to get JWT token
async with httpx.AsyncClient(base_url=BASE_URL, timeout=10.0) as client:
resp = await client.post("/auth/login", json={
"username": "test_admin",
"password": "admin123",
})
assert resp.status_code == 200, f"Login failed: {resp.text}"
TEST_TOKEN = resp.json()["access_token"]
logger.info("Auth token acquired for test_admin")
yield
# Cleanup: remove test data
conn = psycopg2.connect(
host="192.168.1.85", port=5432, user="admin",
password="admin_password_2026", dbname="lubrication_crm",
)
cur = conn.cursor()
cur.execute("DELETE FROM customer_tags WHERE customer_id = %s", (test_client_uuid,))
cur.execute("DELETE FROM follow_up_todos WHERE customer_id = %s", (test_client_uuid,))
cur.execute("DELETE FROM customer_logs WHERE customer_id = %s", (test_client_uuid,))
cur.execute("DELETE FROM sales_opportunities WHERE customer_id = %s", (test_client_uuid,))
cur.execute("DELETE FROM clients WHERE id = %s", (test_client_uuid,))
conn.commit()
cur.close()
conn.close()
logger.info("Test data cleaned up")
# ============================================================
# Test Case 1: POST /api/v1/logs + AI Background Task
# ============================================================
@pytest.mark.asyncio
async def test_create_log_and_ai_background_task(seed_test_data):
"""
Test the event-driven AI processing pipeline:
1. POST log → 200 OK (immediate)
2. Wait for BackgroundTasks to call Ollama
3. Verify tags & todos were written to DB
"""
assert TEST_TOKEN, "Auth token missing"
assert TEST_CLIENT_ID, "Test client ID missing"
headers = {"Authorization": f"Bearer {TEST_TOKEN}"}
log_content = "今天拜访了张总,客户对价格敏感,要求下周二前给折扣方案"
# ---- Step 1: Submit log ----
async with httpx.AsyncClient(base_url=BASE_URL, timeout=15.0) as client:
resp = await client.post(
"/logs",
json={
"customer_id": TEST_CLIENT_ID,
"content": log_content,
},
headers=headers,
)
# ---- Assert 1: Immediate response ----
assert resp.status_code == 200, f"Expected 200, got {resp.status_code}: {resp.text}"
data = resp.json()
assert "id" in data, "Response missing 'id' field"
log_id = data["id"]
logger.info("Log submitted successfully: id=%s", log_id)
# ---- Step 2: Wait for AI background task ----
# BackgroundTasks calls Ollama (qwen3:14b), may take 5-30s
logger.info("Waiting 10s for AI background processing...")
await asyncio.sleep(10)
# ---- Assert 2: Verify DB side effects ----
import psycopg2
conn = psycopg2.connect(
host="192.168.1.85", port=5432, user="admin",
password="admin_password_2026", dbname="lubrication_crm",
)
cur = conn.cursor()
# Check customer_tags
cur.execute(
"SELECT tag_name FROM customer_tags WHERE customer_id = %s",
(TEST_CLIENT_ID,),
)
tags = [r[0] for r in cur.fetchall()]
logger.info("Generated tags: %s", tags)
# Check follow_up_todos
cur.execute(
"SELECT task_desc, status FROM follow_up_todos WHERE customer_id = %s",
(TEST_CLIENT_ID,),
)
todos = cur.fetchall()
logger.info("Generated todos: %s", todos)
cur.close()
conn.close()
# AI may or may not succeed (Ollama connectivity),
# but if it did, we should see results.
# Use soft assertion: log warning if empty, don't fail hard
# (Ollama node might be unreachable in test env)
if tags:
assert len(tags) <= 3, f"Expected at most 3 tags, got {len(tags)}"
logger.info("PASS: AI generated %d tag(s)", len(tags))
else:
logger.warning(
"WARNING: No tags generated. "
"Check Ollama connectivity and backend logs for errors."
)
if todos:
assert todos[0][1] == "pending", "Todo status should be 'pending'"
logger.info("PASS: AI generated todo: %s", todos[0][0][:80])
else:
logger.warning(
"WARNING: No todos generated. "
"Check Ollama connectivity and backend logs for errors."
)
# ============================================================
# Test Case 2: GET /api/v1/reports/monthly
# ============================================================
@pytest.mark.asyncio
async def test_monthly_sales_report_generation(seed_test_data):
"""
Test SQL pre-aggregation + AI report generation:
1. GET /reports/monthly → 200
2. Response contains metrics list + report string
"""
assert TEST_TOKEN, "Auth token missing"
headers = {"Authorization": f"Bearer {TEST_TOKEN}"}
# ---- Step 1: Request monthly report ----
# This is a synchronous wait for AI generation, set generous timeout
async with httpx.AsyncClient(base_url=BASE_URL, timeout=90.0) as client:
logger.info("Requesting monthly report (may take 30-60s for AI generation)...")
resp = await client.get("/reports/monthly", headers=headers)
# ---- Assert 1: Status code ----
assert resp.status_code == 200, f"Expected 200, got {resp.status_code}: {resp.text}"
data = resp.json()
logger.info("Report response received (%d bytes)", len(resp.text))
# ---- Assert 2: Response structure ----
assert "metrics" in data, "Response missing 'metrics' field"
assert "report" in data, "Response missing 'report' field"
assert isinstance(data["metrics"], list), "'metrics' should be a list"
assert isinstance(data["report"], str), "'report' should be a string"
assert len(data["report"]) > 0, "'report' should be non-empty"
logger.info("Metrics: %s", data["metrics"])
logger.info("Report preview: %s...", data["report"][:200])
logger.info("PASS: Monthly report generated (%d chars)", len(data["report"]))