Commit 13111c9f authored by uuo00_n's avatar uuo00_n

merge: 合并 gitlab/main 分支,解决 docker-compose.prod.yml 冲突

- 解决 edu-service、llm-service 环境变量冲突,采用 gitlab/main 的 ${VAR} 风格
- 同步 Redis 服务密码配置(requirepass ${REDIS_PASSWORD})
- 合并 deploy.sh、docker-compose.yml 及各微服务更新
Co-Authored-By: 's avatarClaude Sonnet 4.6 <noreply@anthropic.com>
parents 94e9c927 ff10be50
#!/bin/bash
# =============================================================================
# LLM Filter 项目生产环境部署脚本 (Ubuntu/Debian)
# =============================================================================
# 颜色定义
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
NC='\033[0m' # No Color
echo -e "${GREEN}=== LLM Filter 项目部署脚本 ===${NC}"
echo -e "${BLUE}======================================================${NC}"
echo -e "${BLUE} LLM Filter 系统 - 生产环境部署脚本 (Ubuntu) ${NC}"
echo -e "${BLUE}======================================================${NC}"
echo ""
# 1. 权限检查
if [ "$EUID" -ne 0 ]; then
echo -e "${YELLOW}[提示] 请使用 sudo 运行此脚本,以便管理 Docker 服务。${NC}"
echo -e "示例: sudo ./deploy.sh"
exit 1
fi
# 2. 系统环境检查与依赖安装
echo -e "${BLUE}[1/5] 检查系统环境...${NC}"
# 检查 Docker 是否安装
# 检查 Docker
if ! command -v docker &> /dev/null; then
echo -e "${YELLOW}未检测到 Docker,请先在宝塔面板安装 Docker。${NC}"
echo -e "${YELLOW}未检测到 Docker,正在尝试自动安装 (适用于 Ubuntu)...${NC}"
apt-get update
apt-get install -y ca-certificates curl gnupg
install -m 0755 -d /etc/apt/keyrings
curl -fsSL https://download.docker.com/linux/ubuntu/gpg | gpg --dearmor -o /etc/apt/keyrings/docker.gpg
chmod a+r /etc/apt/keyrings/docker.gpg
echo \
"deb [arch=\"$(dpkg --print-architecture)\" signed-by=/etc/apt/keyrings/docker.gpg] https://download.docker.com/linux/ubuntu \
$(. /etc/os-release && echo \"$VERSION_CODENAME\") stable" | \
tee /etc/apt/sources.list.d/docker.list > /dev/null
apt-get update
apt-get install -y docker-ce docker-ce-cli containerd.io docker-buildx-plugin docker-compose-plugin
if ! command -v docker &> /dev/null; then
echo -e "${RED}[错误] Docker 安装失败,请手动安装后重试。${NC}"
exit 1
fi
echo -e "${GREEN}Docker 安装成功!${NC}"
else
echo -e "${GREEN}Docker 已安装。${NC}"
fi
# 确定 Docker Compose 命令
if docker compose version &> /dev/null; then
COMPOSE_CMD="docker compose"
elif command -v docker-compose &> /dev/null; then
COMPOSE_CMD="docker-compose"
else
echo -e "${RED}[错误] 未找到 Docker Compose 插件。${NC}"
echo "请尝试运行: apt-get install docker-compose-plugin"
exit 1
fi
echo -e "${GREEN}使用 Compose 命令: $COMPOSE_CMD${NC}"
# 3. 配置检查
echo -e "${BLUE}[2/5] 检查环境配置...${NC}"
# 检查 Docker Compose 是否安装
if ! command -v docker-compose &> /dev/null; then
# 尝试检查 docker compose (v2)
if ! docker compose version &> /dev/null; then
echo -e "${YELLOW}未检测到 Docker Compose。${NC}"
echo "请尝试运行: pip install docker-compose 或在宝塔软件商店安装。"
if [ ! -f .env ]; then
echo -e "${YELLOW}.env 文件不存在,正在生成默认配置...${NC}"
# 检查 python3
if command -v python3 &> /dev/null; then
python3 scripts/generate_secrets.py
if [ $? -eq 0 ]; then
echo -e "${GREEN}.env 文件已生成。请务必检查其中的配置(如数据库密码、API Key)。${NC}"
echo -e "${YELLOW}是否现在暂停脚本以编辑 .env 文件? (y/n)${NC}"
read -r -p "输入 y 编辑,n 继续: " choice
if [[ "$choice" =~ ^[Yy]$ ]]; then
echo "请编辑 .env 文件后重新运行此脚本。"
exit 0
fi
else
echo -e "${RED}[错误] 生成 .env 失败。${NC}"
exit 1
fi
else
echo -e "${RED}[错误] 未找到 python3,无法自动生成配置。请手动创建 .env 文件。${NC}"
exit 1
fi
DOCKER_COMPOSE_CMD="docker compose"
else
DOCKER_COMPOSE_CMD="docker-compose"
echo -e "${GREEN}.env 文件已存在。${NC}"
fi
echo -e "${GREEN}正在停止旧容器...${NC}"
$DOCKER_COMPOSE_CMD -f docker-compose.prod.yml down
# 4. 创建必要的目录
echo -e "${BLUE}[3/5] 准备数据目录...${NC}"
mkdir -p logs
mkdir -p postgres_data
mkdir -p mongo_data
mkdir -p redis_data
# 设置权限 (根据实际情况调整,这里设为当前用户或宽松权限)
chmod 777 logs
# 5. 部署服务
echo -e "${BLUE}[4/5] 构建并启动服务...${NC}"
echo "停止旧容器..."
$COMPOSE_CMD -f docker-compose.prod.yml down --remove-orphans
echo "拉取/构建镜像..."
$COMPOSE_CMD -f docker-compose.prod.yml build
echo -e "${GREEN}正在构建并启动服务 (这可能需要几分钟)...${NC}"
$DOCKER_COMPOSE_CMD -f docker-compose.prod.yml up -d --build
echo "启动服务..."
$COMPOSE_CMD -f docker-compose.prod.yml up -d
if [ $? -eq 0 ]; then
echo -e "${GREEN}=== 部署成功! ===${NC}"
echo "服务状态:"
$DOCKER_COMPOSE_CMD -f docker-compose.prod.yml ps
if [ $? -ne 0 ]; then
echo -e "${RED}[错误] 服务启动失败,请检查 Docker 日志。${NC}"
exit 1
fi
# 6. 健康检查与状态展示
echo -e "${BLUE}[5/5] 等待服务就绪...${NC}"
# 简单的等待,生产环境可以使用更复杂的健康检查脚本
progress_bar() {
local duration=$1
local interval=0.5
local steps=$(echo "$duration / $interval" | bc)
local i=0
while [ $i -lt $steps ]; do
echo -ne "Waiting... ["
for ((j=0; j<i; j++)); do echo -ne "#"; done
for ((j=i; j<steps; j++)); do echo -ne " "; done
echo -ne "]\r"
sleep $interval
((i++))
done
echo ""
echo "访问地址 (本地部署请使用 localhost,服务器部署请使用服务器IP):"
echo "- API 网关: http://localhost:8080"
echo "- Auth Service 文档: http://localhost:8080/docs/auth/"
echo "- Edu Service 文档: http://localhost:8080/docs/edu/"
echo "- LLM Service 文档: http://localhost:8080/docs/llm/"
echo "- Security Service 文档: http://localhost:8080/docs/security/"
}
# 检查 bc 是否存在,不存在用简单 sleep
if command -v bc &> /dev/null; then
progress_bar 10
else
echo -e "${YELLOW}部署过程中出现错误,请检查日志。${NC}"
sleep 10
fi
echo -e "${GREEN}=== 部署完成! ===${NC}"
echo ""
echo "服务运行状态:"
$COMPOSE_CMD -f docker-compose.prod.yml ps
echo ""
echo -e "${YELLOW}注意: 如果是首次部署,请确保已初始化数据库。${NC}"
echo -e "查看日志命令: $COMPOSE_CMD -f docker-compose.prod.yml logs -f"
......@@ -7,17 +7,17 @@ services:
container_name: llm-filter-db
restart: always
environment:
POSTGRES_USER: admin
POSTGRES_PASSWORD: password
POSTGRES_DB: llm_filter_db
POSTGRES_USER: ${DB_USER}
POSTGRES_PASSWORD: ${DB_PASSWORD}
POSTGRES_DB: ${DB_NAME}
ports:
- "5433:5432"
- "${POSTGRES_PORT_EXTERNAL:-5433}:5432"
volumes:
- postgres_data:/var/lib/postgresql/data
networks:
- llm-network
healthcheck:
test: ["CMD-SHELL", "pg_isready -U admin -d llm_filter_db"]
test: ["CMD-SHELL", "pg_isready -U ${DB_USER} -d ${DB_NAME}"]
interval: 10s
timeout: 5s
retries: 5
......@@ -30,14 +30,18 @@ services:
container_name: llm-filter-auth
restart: always
ports:
- "8081:8081"
- "${AUTH_SERVICE_PORT:-8081}:8081"
environment:
- DB_HOST=postgres
- DB_USER=admin
- DB_PASSWORD=password
- DB_NAME=llm_filter_db
- DB_PORT=5432
- JWT_SECRET=llm_filter_secure_secret_key_2025_update_must_be_32_bytes
- DB_HOST=${DB_HOST}
- DB_PORT=${DB_PORT}
- DB_USER=${DB_USER}
- DB_PASSWORD=${DB_PASSWORD}
- DB_NAME=${DB_NAME}
- JWT_SECRET=${JWT_SECRET}
- ADMIN_USERNAME=${ADMIN_USERNAME}
- ADMIN_PASSWORD=${ADMIN_PASSWORD}
- ADMIN_EMAIL=${ADMIN_EMAIL}
- TZ=${TZ}
depends_on:
postgres:
condition: service_healthy
......@@ -52,16 +56,23 @@ services:
container_name: llm-filter-edu
restart: always
ports:
- "8082:8082"
- "${EDU_SERVICE_PORT:-8082}:8082"
environment:
- DB_HOST=postgres
- DB_USER=admin
- DB_PASSWORD=password
- DB_NAME=llm_filter_db
- DB_PORT=5432
- JWT_SECRET=llm_filter_secure_secret_key_2025_update_must_be_32_bytes
- SPRING_REDIS_HOST=redis
- SPRING_REDIS_PORT=6379
- DB_HOST=${DB_HOST}
- DB_PORT=${DB_PORT}
- DB_USER=${DB_USER}
- DB_PASSWORD=${DB_PASSWORD}
- DB_NAME=${DB_NAME}
- JWT_SECRET=${JWT_SECRET}
- SERVER_PORT=${SERVER_PORT}
- JPA_DDL_AUTO=${JPA_DDL_AUTO}
- JPA_SHOW_SQL=${JPA_SHOW_SQL}
- JPA_FORMAT_SQL=${JPA_FORMAT_SQL}
- JPA_DIALECT=${JPA_DIALECT}
- LOGGING_LEVEL=${LOGGING_LEVEL}
- SPRING_REDIS_HOST=${REDIS_HOST}
- SPRING_REDIS_PORT=${REDIS_PORT}
- TZ=${TZ}
depends_on:
postgres:
condition: service_healthy
......@@ -77,28 +88,38 @@ services:
dockerfile: Dockerfile
container_name: llm-filter-llm
restart: always
# 生产环境不需要 --reload
command: uvicorn app.main:app --host 0.0.0.0 --port 8000 --proxy-headers --forwarded-allow-ips '*'
# 生产环境:移除--reload,使用多worker提高性能
command: uvicorn app.main:app --host 0.0.0.0 --port 8000 --workers 4 --proxy-headers --forwarded-allow-ips '*'
ports:
- "8000:8000"
- "${LLM_SERVICE_PORT:-8000}:8000"
# 生产环境移除代码挂载,使用镜像内的代码
volumes:
- ./scripts:/app/scripts
environment:
- MONGODB_URL=mongodb://mongo:27017
- DB_NAME=llm_filter_db
- SECRET_KEY=llm_filter_secure_secret_key_2025_update_must_be_32_bytes
- ALGORITHM=HS256
- ACCESS_TOKEN_EXPIRE_MINUTES=30
- TERM_START_DATE=2025-09-01
- OLLAMA_BASE_URL=http://192.168.6.6:11434/
- OLLAMA_MODEL=deepseek-r1:14b
- MONGODB_URL=${MONGODB_URL}
- DB_NAME=${DB_NAME}
- SECRET_KEY=${JWT_SECRET}
- ALGORITHM=${ALGORITHM}
- ACCESS_TOKEN_EXPIRE_MINUTES=${ACCESS_TOKEN_EXPIRE_MINUTES}
- TERM_START_DATE=${TERM_START_DATE}
- OLLAMA_BASE_URL=${OLLAMA_BASE_URL}
- OLLAMA_MODEL=${OLLAMA_MODEL}
# Dify Configuration
- DIFY_API_URL=http://192.168.6.6/v1
- DIFY_API_KEY=app-lkK33EQOVXXrjD9x3SKbItr7
- DIFY_API_URL=${DIFY_API_URL}
- DIFY_API_KEY=${DIFY_API_KEY}
- DIFY_RESPONSE_MODE=${DIFY_RESPONSE_MODE}
- DIFY_MESSAGE_ENDPOINT=${DIFY_MESSAGE_ENDPOINT}
# Redis Configuration
- REDIS_HOST=redis
- REDIS_PORT=6379
- REDIS_HOST=${REDIS_HOST}
- REDIS_PORT=${REDIS_PORT}
- REDIS_DB=${REDIS_DB}
- REDIS_PASSWORD=${REDIS_PASSWORD}
# App Configuration
- APP_BASE_URL=${APP_BASE_URL}
- API_V1_STR=${API_V1_STR}
- APP_MODE=${APP_MODE}
- CORS_ALLOWED_ORIGINS=${CORS_ALLOWED_ORIGINS}
- TZ=${TZ}
depends_on:
- mongo
- redis
......@@ -112,17 +133,31 @@ services:
dockerfile: Dockerfile
container_name: llm-filter-security
restart: always
# 生产环境不需要 --reload
command: uvicorn app.main:app --host 0.0.0.0 --port 8000 --proxy-headers --forwarded-allow-ips '*'
# 生产环境:移除--reload,使用多worker提高性能
command: uvicorn app.main:app --host 0.0.0.0 --port 8000 --workers 4 --proxy-headers --forwarded-allow-ips '*'
ports:
- "8003:8000"
- "${SECURITY_SERVICE_PORT:-8003}:8000"
# 生产环境移除代码挂载
environment:
- JWT_SECRET=llm_filter_secure_secret_key_2025_update_must_be_32_bytes
- DIFY_API_URL=http://192.168.6.6/v1
- DIFY_API_KEY=app-ggTb0oC9WXQQm2r7KrvVUS6v
- MONGODB_URL=mongodb://mongo:27017
- MONGODB_DB_NAME=security_service_db
- JWT_SECRET=${JWT_SECRET}
- ALGORITHM=${ALGORITHM}
- DIFY_API_URL=${DIFY_API_URL}
- DIFY_API_KEY=${DIFY_API_KEY}
- DIFY_RESPONSE_MODE=${DIFY_RESPONSE_MODE}
- MONGODB_URL=${MONGODB_URL}
- MONGODB_DB_NAME=${MONGODB_DB_NAME}
- REDIS_HOST=${REDIS_HOST}
- REDIS_PORT=${REDIS_PORT}
- REDIS_DB=${REDIS_DB}
- REDIS_PASSWORD=${REDIS_PASSWORD}
- ZABBIX_URL=${ZABBIX_URL:-http://localhost}
- ZABBIX_USERNAME=${ZABBIX_USERNAME:-Admin}
- ZABBIX_PASSWORD=${ZABBIX_PASSWORD:-zabbix}
- ZABBIX_SYNC_INTERVAL=${ZABBIX_SYNC_INTERVAL:-3600}
- ZABBIX_AUTO_SYNC=${ZABBIX_AUTO_SYNC:-true}
- LOG_LEVEL=${LOG_LEVEL:-INFO}
- LOG_FORMAT=${LOG_FORMAT:-json}
- TZ=${TZ}
networks:
- llm-network
......@@ -131,10 +166,13 @@ services:
image: redis:7-alpine
container_name: llm-filter-redis
restart: always
command: redis-server --requirepass ${REDIS_PASSWORD}
ports:
- "6379:6379"
- "${REDIS_PORT_EXTERNAL:-6379}:6379"
volumes:
- redis_data:/data
environment:
- TZ=${TZ}
networks:
- llm-network
......@@ -144,7 +182,7 @@ services:
container_name: llm-filter-mongo
restart: always
ports:
- "27017:27017"
- "${MONGODB_PORT_EXTERNAL:-27017}:27017"
volumes:
- mongo_data:/data/db
networks:
......@@ -156,9 +194,11 @@ services:
container_name: llm-filter-gateway
restart: always
ports:
- "8080:80"
- "${GATEWAY_PORT:-8080}:80"
volumes:
- ./gateway/nginx.conf:/etc/nginx/nginx.conf:ro
environment:
- TZ=${TZ}
depends_on:
- auth-service
- edu-service
......
......@@ -6,11 +6,11 @@ services:
image: postgres:15-alpine
container_name: llm-filter-db
environment:
POSTGRES_USER: admin
POSTGRES_PASSWORD: password
POSTGRES_DB: llm_filter_db
POSTGRES_USER: ${DB_USER}
POSTGRES_PASSWORD: ${DB_PASSWORD}
POSTGRES_DB: ${DB_NAME}
ports:
- "5433:5432"
- "${POSTGRES_PORT_EXTERNAL:-5433}:5432"
volumes:
- postgres_data:/var/lib/postgresql/data
networks:
......@@ -20,10 +20,13 @@ services:
redis:
image: redis:7-alpine
container_name: llm-filter-redis
command: redis-server --requirepass ${REDIS_PASSWORD}
ports:
- "6379:6379"
- "${REDIS_PORT_EXTERNAL:-6379}:6379"
volumes:
- redis_data:/data
environment:
- TZ=${TZ}
networks:
- llm-network
......@@ -34,36 +37,47 @@ services:
dockerfile: Dockerfile
container_name: llm-filter-auth
ports:
- "8081:8081"
- "${AUTH_SERVICE_PORT:-8081}:8081"
environment:
- DB_HOST=postgres
- DB_USER=admin
- DB_PASSWORD=password
- DB_NAME=llm_filter_db
- DB_PORT=5432
- JWT_SECRET=llm_filter_secure_secret_key_2025_update_must_be_32_bytes
- DB_HOST=${DB_HOST}
- DB_PORT=${DB_PORT}
- DB_USER=${DB_USER}
- DB_PASSWORD=${DB_PASSWORD}
- DB_NAME=${DB_NAME}
- JWT_SECRET=${JWT_SECRET}
- ADMIN_USERNAME=${ADMIN_USERNAME}
- ADMIN_PASSWORD=${ADMIN_PASSWORD}
- ADMIN_EMAIL=${ADMIN_EMAIL}
- TZ=${TZ}
depends_on:
- postgres
networks:
- llm-network
# 教务核心服务 (Go)
# 教务核心服务
edu-service:
build:
context: ./microservices/edu-service
dockerfile: Dockerfile
container_name: llm-filter-edu
ports:
- "8082:8082"
- "${EDU_SERVICE_PORT:-8082}:8082"
environment:
- DB_HOST=postgres
- DB_USER=admin
- DB_PASSWORD=password
- DB_NAME=llm_filter_db
- DB_PORT=5432
- JWT_SECRET=llm_filter_secure_secret_key_2025_update_must_be_32_bytes
- SPRING_REDIS_HOST=redis
- SPRING_REDIS_PORT=6379
- DB_HOST=${DB_HOST}
- DB_PORT=${DB_PORT}
- DB_USER=${DB_USER}
- DB_PASSWORD=${DB_PASSWORD}
- DB_NAME=${DB_NAME}
- JWT_SECRET=${JWT_SECRET}
- SERVER_PORT=${SERVER_PORT}
- JPA_DDL_AUTO=${JPA_DDL_AUTO}
- JPA_SHOW_SQL=${JPA_SHOW_SQL}
- JPA_FORMAT_SQL=${JPA_FORMAT_SQL}
- JPA_DIALECT=${JPA_DIALECT}
- LOGGING_LEVEL=${LOGGING_LEVEL}
- SPRING_REDIS_HOST=${REDIS_HOST}
- SPRING_REDIS_PORT=${REDIS_PORT}
- TZ=${TZ}
depends_on:
- postgres
- redis
......@@ -78,25 +92,35 @@ services:
container_name: llm-filter-llm
command: uvicorn app.main:app --host 0.0.0.0 --port 8000 --reload --proxy-headers --forwarded-allow-ips '*'
ports:
- "8000:8000"
- "${LLM_SERVICE_PORT:-8000}:8000"
volumes:
- ./microservices/llm-service:/app
- ./scripts:/app/scripts
environment:
- MONGODB_URL=mongodb://mongo:27017
- DB_NAME=llm_filter_db
- SECRET_KEY=llm_filter_secure_secret_key_2025_update_must_be_32_bytes
- ALGORITHM=HS256
- ACCESS_TOKEN_EXPIRE_MINUTES=30
- TERM_START_DATE=2025-09-01
- OLLAMA_BASE_URL=http://192.168.6.6:11434/
- OLLAMA_MODEL=deepseek-r1:14b
- MONGODB_URL=${MONGODB_URL}
- DB_NAME=${DB_NAME}
- SECRET_KEY=${JWT_SECRET}
- ALGORITHM=${ALGORITHM}
- ACCESS_TOKEN_EXPIRE_MINUTES=${ACCESS_TOKEN_EXPIRE_MINUTES}
- TERM_START_DATE=${TERM_START_DATE}
- OLLAMA_BASE_URL=${OLLAMA_BASE_URL}
- OLLAMA_MODEL=${OLLAMA_MODEL}
# Dify Configuration
- DIFY_API_URL=http://192.168.6.6/v1
- DIFY_API_KEY=app-sLnrbNjEi1GiTDGgL2B2DwLZ
- DIFY_API_URL=${DIFY_API_URL}
- DIFY_API_KEY=${DIFY_API_KEY}
- DIFY_RESPONSE_MODE=${DIFY_RESPONSE_MODE}
- DIFY_MESSAGE_ENDPOINT=${DIFY_MESSAGE_ENDPOINT}
# Redis Configuration
- REDIS_HOST=redis
- REDIS_PORT=6379
- REDIS_HOST=${REDIS_HOST}
- REDIS_PORT=${REDIS_PORT}
- REDIS_DB=${REDIS_DB}
- REDIS_PASSWORD=${REDIS_PASSWORD}
# App Configuration
- APP_BASE_URL=${APP_BASE_URL}
- API_V1_STR=${API_V1_STR}
- APP_MODE=${APP_MODE}
- CORS_ALLOWED_ORIGINS=${CORS_ALLOWED_ORIGINS}
- TZ=${TZ}
depends_on:
- mongo
- redis
......@@ -111,15 +135,29 @@ services:
container_name: llm-filter-security
command: uvicorn app.main:app --host 0.0.0.0 --port 8000 --reload --proxy-headers --forwarded-allow-ips '*'
ports:
- "8003:8000"
- "${SECURITY_SERVICE_PORT:-8003}:8000"
volumes:
- ./microservices/security-service/app:/app/app
environment:
- JWT_SECRET=llm_filter_secure_secret_key_2025_update_must_be_32_bytes
- DIFY_API_URL=http://192.168.6.6/v1
- DIFY_API_KEY=app-ggTb0oC9WXQQm2r7KrvVUS6v
- MONGODB_URL=mongodb://mongo:27017
- MONGODB_DB_NAME=security_service_db
- JWT_SECRET=${JWT_SECRET}
- ALGORITHM=${ALGORITHM}
- DIFY_API_URL=${DIFY_API_URL}
- DIFY_API_KEY=${DIFY_API_KEY}
- DIFY_RESPONSE_MODE=${DIFY_RESPONSE_MODE}
- MONGODB_URL=${MONGODB_URL}
- MONGODB_DB_NAME=${MONGODB_DB_NAME}
- REDIS_HOST=${REDIS_HOST}
- REDIS_PORT=${REDIS_PORT}
- REDIS_DB=${REDIS_DB}
- REDIS_PASSWORD=${REDIS_PASSWORD}
- ZABBIX_URL=${ZABBIX_URL:-http://localhost}
- ZABBIX_USERNAME=${ZABBIX_USERNAME:-Admin}
- ZABBIX_PASSWORD=${ZABBIX_PASSWORD:-zabbix}
- ZABBIX_SYNC_INTERVAL=${ZABBIX_SYNC_INTERVAL:-3600}
- ZABBIX_AUTO_SYNC=${ZABBIX_AUTO_SYNC:-true}
- LOG_LEVEL=${LOG_LEVEL:-INFO}
- LOG_FORMAT=${LOG_FORMAT:-text}
- TZ=${TZ}
depends_on:
- mongo
networks:
......@@ -130,7 +168,7 @@ services:
image: mongo:latest
container_name: llm-filter-mongo
ports:
- "27017:27017"
- "${MONGODB_PORT_EXTERNAL:-27017}:27017"
volumes:
- mongo_data:/data/db
networks:
......@@ -141,9 +179,11 @@ services:
image: nginx:alpine
container_name: llm-filter-gateway
ports:
- "8080:80"
- "${GATEWAY_PORT:-8080}:80"
volumes:
- ./gateway/nginx.conf:/etc/nginx/nginx.conf:ro
environment:
- TZ=${TZ}
depends_on:
- auth-service
- edu-service
......
module auth-service
go 1.24.3
go 1.24.2
require github.com/gin-gonic/gin v1.11.0
......@@ -12,6 +12,7 @@ require (
github.com/go-openapi/jsonreference v0.19.6 // indirect
github.com/go-openapi/spec v0.20.4 // indirect
github.com/go-openapi/swag v0.19.15 // indirect
github.com/joho/godotenv v1.5.1 // indirect
github.com/josharian/intern v1.0.0 // indirect
github.com/mailru/easyjson v0.7.6 // indirect
gopkg.in/yaml.v2 v2.4.0 // indirect
......
......@@ -59,6 +59,8 @@ github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD
github.com/jinzhu/inflection v1.0.0/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc=
github.com/jinzhu/now v1.1.5 h1:/o9tlHleP7gOFmsnYNz3RGnqzefHA47wQpKrrdTIwXQ=
github.com/jinzhu/now v1.1.5/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8=
github.com/joho/godotenv v1.5.1 h1:7eLL/+HRGLY0ldzfGMeQkb7vMd0as4CfYvUVzLqw0N0=
github.com/joho/godotenv v1.5.1/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwAbqwq4=
github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY=
github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y=
github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=
......
......@@ -60,11 +60,12 @@ func (s *AuthService) Register(req *RegisterRequest) (*model.User, error) {
// 创建用户
user := &model.User{
Username: req.Username,
Email: req.Email,
Password: hashedPwd,
Role: "user",
Edition: "edu",
Username: req.Username,
Email: req.Email,
Password: hashedPwd,
Role: "user",
RoleLevel: 1, // 默认普通用户等级
Edition: "edu",
}
if err := s.repo.Create(user); err != nil {
......
......@@ -10,10 +10,12 @@ import (
"fmt"
"log"
"os"
"path/filepath"
_ "auth-service/docs" // docs is generated by Swag CLI
"github.com/gin-gonic/gin"
"github.com/joho/godotenv"
swaggerFiles "github.com/swaggo/files"
ginSwagger "github.com/swaggo/gin-swagger"
"gorm.io/driver/postgres"
......@@ -30,6 +32,19 @@ import (
// @name Authorization
func main() {
// 尝试加载根目录 .env 文件 (用于本地开发)
// 优先级:当前目录 .env > 向上两级 .env > 向上三级 .env
// 注意:Docker 环境下通常不包含这些 .env 文件,而是直接通过 environment 注入,
// 所以这里的加载失败不应该阻断程序运行,只作为开发辅助。
// 1. 尝试当前目录
godotenv.Load()
// 2. 尝试项目根目录 (假设在 microservices/auth-service 下运行)
// 根目录在 ../../.env
rootEnvPath := filepath.Join("..", "..", ".env")
godotenv.Load(rootEnvPath)
// 获取环境变量配置
dbHost := os.Getenv("DB_HOST")
dbUser := os.Getenv("DB_USER")
......@@ -37,21 +52,9 @@ func main() {
dbName := os.Getenv("DB_NAME")
dbPort := os.Getenv("DB_PORT")
// 默认值处理(用于本地开发)
if dbHost == "" {
dbHost = "localhost"
}
if dbUser == "" {
dbUser = "admin"
}
if dbPassword == "" {
dbPassword = "password" // 请确保本地有此密码的数据库或修改此处
}
if dbName == "" {
dbName = "llm_filter_db"
}
if dbPort == "" {
dbPort = "5433"
// 必需的环境变量检查
if dbHost == "" || dbUser == "" || dbPassword == "" || dbName == "" || dbPort == "" {
log.Fatal("Missing required database environment variables: DB_HOST, DB_USER, DB_PASSWORD, DB_NAME, DB_PORT")
}
dsn := fmt.Sprintf("host=%s user=%s password=%s dbname=%s port=%s sslmode=disable TimeZone=Asia/Shanghai",
......@@ -111,7 +114,11 @@ func main() {
}
func initAdminUser(repo *repository.UserRepository) {
adminUsername := "admin"
adminUsername := os.Getenv("ADMIN_USERNAME")
if adminUsername == "" {
adminUsername = "admin"
}
exists, err := repo.ExistsByUsername(adminUsername)
if err != nil {
log.Printf("Failed to check admin user existence: %v", err)
......@@ -123,7 +130,7 @@ func initAdminUser(repo *repository.UserRepository) {
adminPassword := os.Getenv("ADMIN_PASSWORD")
if adminPassword == "" {
adminPassword = "password123"
log.Fatal("ADMIN_PASSWORD environment variable is required for initial admin user creation")
}
hashedPwd, err := utils.HashPassword(adminPassword)
......@@ -132,11 +139,16 @@ func initAdminUser(repo *repository.UserRepository) {
return
}
adminEmail := os.Getenv("ADMIN_EMAIL")
if adminEmail == "" {
adminEmail = "admin@example.com"
}
adminUser := &model.User{
Username: adminUsername,
Email: "admin@example.com",
Email: adminEmail,
Password: hashedPwd,
Role: "administrator", // 兼容旧系统的最高权限角色
Role: "administrator",
RoleLevel: 5,
Edition: "edu",
}
......@@ -144,7 +156,7 @@ func initAdminUser(repo *repository.UserRepository) {
if err := repo.Create(adminUser); err != nil {
log.Printf("Failed to create admin user: %v", err)
} else {
log.Printf("Admin user created successfully. Username: %s, Password: %s", adminUsername, adminPassword)
log.Printf("Admin user created successfully. Username: %s", adminUsername)
}
} else {
log.Println("Admin user already exists.")
......
......@@ -2,6 +2,7 @@ package utils
import (
"fmt"
"log"
"os"
"time"
......@@ -9,12 +10,17 @@ import (
"golang.org/x/crypto/bcrypt"
)
var jwtSecret = []byte("your-secret-key")
var jwtSecret []byte
func init() {
if secret := os.Getenv("JWT_SECRET"); secret != "" {
jwtSecret = []byte(secret)
secret := os.Getenv("JWT_SECRET")
if secret == "" {
log.Fatal("JWT_SECRET environment variable is required")
}
if len(secret) < 32 {
log.Fatal("JWT_SECRET must be at least 32 characters long")
}
jwtSecret = []byte(secret)
}
func GetJWTSecret() []byte {
......
......@@ -21,6 +21,12 @@ import org.springframework.context.annotation.Bean;
public class EduServiceApplication {
public static void main(String[] args) {
// 尝试加载根目录 .env 文件 (用于本地开发)
// 注意:生产环境 Docker 会直接注入环境变量,这里仅作为本地开发辅助
// Spring Boot 默认不加载 .env,这里使用 System.setProperty 模拟或推荐使用插件
// 为了简单起见,这里不做复杂的 .env 解析,建议本地开发使用 IDE 插件或手动设置环境变量
// 或者使用 java-dotenv 库
SpringApplication.run(EduServiceApplication.class, args);
}
......
......@@ -6,6 +6,8 @@ import com.llmfilter.edu.service.ScheduleService;
import io.swagger.v3.oas.annotations.Operation;
import io.swagger.v3.oas.annotations.tags.Tag;
import lombok.RequiredArgsConstructor;
import com.llmfilter.edu.security.UserContextHolder;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.*;
......@@ -23,9 +25,18 @@ public class ScheduleController {
@PutMapping("/assign-teacher")
@Operation(summary = "分配任课教师", description = "为特定课程分配任课教师")
public ResponseEntity<Map<String, Boolean>> assignTeacher(@RequestBody AssignTeacherPayload payload) {
public ResponseEntity<Map<String, Object>> assignTeacher(@RequestBody AssignTeacherPayload payload) {
// 权限检查:仅管理员可分配教师
String role = UserContextHolder.getContext().getRole();
if (!"administrator".equals(role)) {
Map<String, Object> error = new HashMap<>();
error.put("success", false);
error.put("message", "Permission denied: Administrator role required");
return ResponseEntity.status(HttpStatus.FORBIDDEN).body(error);
}
scheduleService.assignTeacher(payload);
Map<String, Boolean> result = new HashMap<>();
Map<String, Object> result = new HashMap<>();
result.put("success", true);
return ResponseEntity.ok(result);
}
......
......@@ -21,7 +21,7 @@ import java.security.Key;
@Component
public class JwtAuthenticationFilter implements Filter {
@Value("${jwt.secret:your_secret_key_here}")
@Value("${jwt.secret}")
private String jwtSecret;
private Key key;
......
server.port=8082
server.port=${SERVER_PORT:8082}
spring.application.name=edu-service
# Database Configuration
# ============================================================
# 数据库配置
# ============================================================
spring.datasource.url=jdbc:postgresql://${DB_HOST:localhost}:${DB_PORT:5433}/${DB_NAME:llm_filter_db}
spring.datasource.username=${DB_USER:admin}
spring.datasource.password=${DB_PASSWORD:password}
spring.datasource.password=${DB_PASSWORD}
spring.datasource.driver-class-name=org.postgresql.Driver
# JPA Configuration
spring.jpa.hibernate.ddl-auto=update
spring.jpa.show-sql=true
spring.jpa.properties.hibernate.dialect=org.hibernate.dialect.PostgreSQLDialect
spring.jpa.properties.hibernate.format_sql=true
# ============================================================
# JPA/Hibernate 配置
# ============================================================
spring.jpa.hibernate.ddl-auto=${JPA_DDL_AUTO:update}
spring.jpa.show-sql=${JPA_SHOW_SQL:true}
spring.jpa.properties.hibernate.dialect=${JPA_DIALECT:org.hibernate.dialect.PostgreSQLDialect}
spring.jpa.properties.hibernate.format_sql=${JPA_FORMAT_SQL:true}
# Security
jwt.secret=${JWT_SECRET:your_secret_key_here_must_be_very_long_to_be_secure_at_least_32_bytes}
# ============================================================
# 安全配置
# ============================================================
jwt.secret=${JWT_SECRET}
# Redis Configuration
# ============================================================
# Redis 配置
# ============================================================
spring.redis.host=${SPRING_REDIS_HOST:localhost}
spring.redis.port=${SPRING_REDIS_PORT:6379}
spring.redis.timeout=2000
spring.redis.lettuce.pool.max-active=8
spring.redis.lettuce.pool.max-wait=-1
spring.redis.lettuce.pool.max-idle=8
spring.redis.lettuce.pool.min-idle=0
spring.redis.timeout=${REDIS_TIMEOUT:2000}
spring.redis.lettuce.pool.max-active=${REDIS_POOL_MAX_ACTIVE:8}
spring.redis.lettuce.pool.max-wait=${REDIS_POOL_MAX_WAIT:-1}
spring.redis.lettuce.pool.max-idle=${REDIS_POOL_MAX_IDLE:8}
spring.redis.lettuce.pool.min-idle=${REDIS_POOL_MIN_IDLE:0}
# ============================================================
# 日志配置
# ============================================================
logging.level.root=${LOGGING_LEVEL:INFO}
logging.pattern.console=%d{yyyy-MM-dd HH:mm:ss} [%thread] %-5level %logger{36} - %msg%n
FROM python:3.10-slim
# 添加非root用户
RUN useradd -m -u 1000 appuser && \
mkdir -p /app && \
chown -R appuser:appuser /app
WORKDIR /app
# 设置时区
......@@ -11,10 +16,14 @@ COPY requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt -i https://pypi.tuna.tsinghua.edu.cn/simple
# 复制业务代码
COPY . .
COPY --chown=appuser:appuser . .
# 切换到非root用户
USER appuser
# 暴露端口
EXPOSE 8000
# 启动命令
CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"]
# 启动命令(生产环境,多worker)
CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000", "--workers", "4"]
import os
# Pydantic v2 中 BaseSettings 已迁移到 pydantic-settings
from pydantic_settings import BaseSettings
from dotenv import load_dotenv
# 加载环境变量 (尝试向上查找 .env)
load_dotenv(verbose=True) # 默认查找当前目录
if not os.getenv("MONGODB_URL"):
# 如果没找到,尝试向上级目录查找(本地开发场景)
load_dotenv(dotenv_path="../../.env")
load_dotenv(dotenv_path="../../../.env") # 备用:防止层级变动
from pathlib import Path
from pydantic import Field
from pydantic_settings import BaseSettings, SettingsConfigDict
class Settings(BaseSettings):
_ROOT_ENV_PATH = Path(__file__).resolve().parents[4] / ".env"
_SERVICE_ENV_PATH = Path(__file__).resolve().parents[2] / ".env"
model_config = SettingsConfigDict(
env_file=(_ROOT_ENV_PATH, _SERVICE_ENV_PATH),
env_file_encoding="utf-8",
case_sensitive=True,
extra="ignore"
)
# 应用配置
APP_NAME: str = "LLM过滤系统"
API_V1_STR: str = "/api/v1"
APP_BASE_URL: str = os.getenv("APP_BASE_URL", "http://localhost:8000")
APP_BASE_URL: str = "http://localhost:8000"
# 数据库配置
MONGODB_URL: str = os.getenv("MONGODB_URL", "mongodb://localhost:27017")
DB_NAME: str = os.getenv("DB_NAME", "llm_filter_db")
MONGODB_URL: str = "mongodb://localhost:27017"
DB_NAME: str = "llm_filter_db"
# JWT配置
SECRET_KEY: str = os.getenv("SECRET_KEY", "your_secret_key_here")
ALGORITHM: str = os.getenv("ALGORITHM", "HS256")
ACCESS_TOKEN_EXPIRE_MINUTES: int = int(os.getenv("ACCESS_TOKEN_EXPIRE_MINUTES", "30"))
SECRET_KEY: str = ""
ALGORITHM: str = "HS256"
ACCESS_TOKEN_EXPIRE_MINUTES: int = 30
# Ollama配置
OLLAMA_BASE_URL: str = os.getenv("OLLAMA_BASE_URL", "http://localhost:11434")
OLLAMA_MODEL: str = os.getenv("OLLAMA_MODEL", "llama2")
OLLAMA_BASE_URL: str = "http://192.168.6.6:11434/"
OLLAMA_MODEL: str = "deepseek-r1:14b"
# Dify配置
DIFY_API_URL: str = os.getenv("DIFY_API_URL", "http://192.168.6.6/v1")
DIFY_API_KEY: str = os.getenv("DIFY_API_KEY", "app-sLnrbNjEi1GiTDGgL2B2DwLZ")
DIFY_RESPONSE_MODE: str = os.getenv("DIFY_RESPONSE_MODE", "streaming")
DIFY_MESSAGE_ENDPOINT: str = os.getenv("DIFY_MESSAGE_ENDPOINT", "chat-messages")
DIFY_API_URL: str = "http://192.168.6.6/v1"
# 使用别名从环境变量 DIFY_API_KEY_LLM 读取,代码中仍通过 settings.DIFY_API_KEY 访问
DIFY_API_KEY: str = Field("", alias="DIFY_API_KEY_LLM")
DIFY_RESPONSE_MODE: str = "streaming"
DIFY_MESSAGE_ENDPOINT: str = "chat-messages"
# 应用运行模式开关:仅运行教育版或企业版之一
# 允许的值:"edu" / "biz";若未设置则默认使用 "edu"
# 注意:不再提供混合模式(mixed),如需混合请显式设置并在依赖中放行
APP_MODE: str = os.getenv("APP_MODE", "edu")
CORS_ALLOWED_ORIGINS: str = os.getenv("CORS_ALLOWED_ORIGINS", "*")
GITHUB_DEFAULT_REPO: str = os.getenv("GITHUB_DEFAULT_REPO", "")
GITHUB_TOKEN: str = os.getenv("GITHUB_TOKEN", "")
APP_MODE: str = "edu"
CORS_ALLOWED_ORIGINS: str = "*"
GITHUB_DEFAULT_REPO: str = ""
GITHUB_TOKEN: str = ""
# 学期配置
TERM_START_DATE: str = os.getenv("TERM_START_DATE", "2025-09-01") # 默认开学日期
TERM_START_DATE: str = "2025-09-01" # 默认开学日期
# Redis 配置
REDIS_HOST: str = os.getenv("REDIS_HOST", "localhost")
REDIS_PORT: int = int(os.getenv("REDIS_PORT", "6379"))
REDIS_DB: int = int(os.getenv("REDIS_DB", "0"))
REDIS_PASSWORD: str = os.getenv("REDIS_PASSWORD", "")
REDIS_HOST: str = "localhost"
REDIS_PORT: int = 6379
REDIS_DB: int = 0
REDIS_PASSWORD: str = ""
settings = Settings()
from fastapi import FastAPI
from fastapi import FastAPI, HTTPException
from fastapi.openapi.docs import get_swagger_ui_html
from fastapi.middleware.cors import CORSMiddleware
from app.api.v1.router import api_router
from app.core.config import settings
from app.db.mongodb import connect_to_mongo, close_mongo_connection
from app.db.mongodb import connect_to_mongo, close_mongo_connection, client
from app.utils.sensitive_word_filter import sensitive_word_filter
from datetime import datetime
app = FastAPI(
title=settings.APP_NAME,
......@@ -77,3 +78,45 @@ async def root():
"version": "1.0.0",
"message": "欢迎使用LLM过滤系统API"
}
@app.get("/health")
async def health_check():
"""健康检查端点"""
try:
# 检查数据库连接
db_status = "ok" if client else "error"
return {
"status": "healthy",
"timestamp": datetime.now().isoformat(),
"service": "llm-service",
"version": "1.0.0",
"components": {
"mongodb": {"status": db_status}
}
}
except Exception as e:
raise HTTPException(status_code=503, detail=f"Service unhealthy: {str(e)}")
@app.get("/ready")
async def readiness_check():
"""就绪检查端点"""
try:
# 检查关键依赖是否就绪
if not client:
return {
"status": "not_ready",
"timestamp": datetime.now().isoformat(),
"reason": "database_not_connected"
}
return {
"status": "ready",
"timestamp": datetime.now().isoformat()
}
except Exception as e:
return {
"status": "not_ready",
"timestamp": datetime.now().isoformat(),
"reason": str(e)
}
......@@ -236,12 +236,30 @@ async def add_message(conversation_id: str, user_id: str, content: str) -> Dict[
# 调用模型生成回复
assistant_response = await generate_response(model_messages)
# 检查助手回复是否包含敏感词
response_check = await sensitive_word_filter.check_text(assistant_response)
if response_check["contains_sensitive_words"]:
# 记录敏感词审计日志
sensitive_record = {
"user_id": user_id,
"conversation_id": ObjectId(conversation_id),
"message_content": assistant_response, # 记录原始违规内容
"source": "assistant", # 标记来源为助手
"sensitive_words_found": response_check["sensitive_words_found"],
"highest_severity": response_check["highest_severity"],
"timestamp": datetime.now()
}
await db.db.sensitive_records.insert_one(sensitive_record)
# 屏蔽回复内容
assistant_response = "(系统拦截)生成的内容包含敏感信息,已屏蔽。"
# 创建助手回复消息
assistant_message = {
"role": "assistant",
"content": assistant_response,
"timestamp": datetime.now(),
"contains_sensitive_words": False,
"contains_sensitive_words": False, # 既然已屏蔽,标记为不包含(或者是 True 但内容已安全化)
"sensitive_words_found": []
}
......
# Security Service API 测试与假数据示例
本文件用于说明如何通过 HTTP API(经网关或直连服务)调用 Security Service,并使用一组统一的假数据进行联调和回归测试。
> 注意:这些假数据仅用于测试,生产环境请接入真实监控和日志数据。
---
## 1. 环境与前提条件
- 网关地址(推荐):`http://localhost:8080`
- Security Service API 前缀:`/api/v1/security`
- 所有接口均需要携带管理员 JWT:
- Header:`Authorization: Bearer <admin_token>`
在以下示例中,请将 `<admin_token>` 替换为实际从 Auth Service 获取的 token。
---
## 2. 接口一览
- `POST /api/v1/security/analysis` —— 安全风险分析
- `POST /api/v1/security/attack-advice` —— 攻击应急建议
- `GET /api/v1/security/report` —— 安全日报
- `GET /api/v1/security/monitor` —— 风险监测与合规评估
- `GET /api/v1/security/rss/news` —— 安全新闻 RSS 订阅
---
## 3. 安全风险分析(/analysis)测试示例
### 3.1 请求说明
- 方法:`POST`
- URL:`http://localhost:8080/api/v1/security/analysis`
- 用途:基于设备信息列表做 AI 风险分析。
### 3.2 测试请求体(假数据)
```json
{
"devices": [
{
"id": "sw-001",
"name": "Core-Switch-A",
"type": "switch",
"status": "warning",
"version": "v1.2.0",
"logs": [
"Port 22 high traffic",
"Packet loss detected"
]
},
{
"id": "fw-001",
"name": "Edge-Firewall",
"type": "firewall",
"status": "active",
"version": "v2.1.patch3",
"logs": [
"Denied 1000+ requests from IP 192.168.1.50"
]
}
]
}
```
### 3.3 curl 示例
```bash
curl -X POST "http://localhost:8080/api/v1/security/analysis" \
-H "Authorization: Bearer <admin_token>" \
-H "Content-Type: application/json" \
-d '{
"devices": [
{
"id": "sw-001",
"name": "Core-Switch-A",
"type": "switch",
"status": "warning",
"version": "v1.2.0",
"logs": ["Port 22 high traffic", "Packet loss detected"]
},
{
"id": "fw-001",
"name": "Edge-Firewall",
"type": "firewall",
"status": "active",
"version": "v2.1.patch3",
"logs": ["Denied 1000+ requests from IP 192.168.1.50"]
}
]
}'
```
### 3.4 期望响应结构
服务端返回 JSON 结构符合 `SecurityAnalysisResponse`
```json
{
"summary": "string",
"vulnerabilities": ["string"],
"suggestions": ["string"],
"risk_level": "string"
}
```
---
## 4. 攻击应急建议(/attack-advice)测试示例
### 4.1 请求说明
- 方法:`POST`
- URL:`http://localhost:8080/api/v1/security/attack-advice`
- 用途:当系统已遭受攻击或疑似攻击时,获取应急响应方案。
### 4.2 测试请求体(假数据)
```json
{
"attack_type": "Brute Force Login",
"target_device": "DB-Server-Prod",
"severity": "high",
"logs": "Failed login attempts from 10.0.0.10"
}
```
### 4.3 curl 示例
```bash
curl -X POST "http://localhost:8080/api/v1/security/attack-advice" \
-H "Authorization: Bearer <admin_token>" \
-H "Content-Type: application/json" \
-d '{
"attack_type": "Brute Force Login",
"target_device": "DB-Server-Prod",
"severity": "high",
"logs": "Failed login attempts from 10.0.0.10"
}'
```
### 4.4 期望响应结构
```json
{
"immediate_actions": ["string"],
"analysis": "string",
"mitigation_plan": "string"
}
```
---
## 5. 安全日报(/report)测试示例
### 5.1 请求说明
- 方法:`GET`
- URL:`http://localhost:8080/api/v1/security/report`
- 用途:生成面向管理层的每日安全概览。
### 5.2 curl 示例
```bash
curl -X GET "http://localhost:8080/api/v1/security/report" \
-H "Authorization: Bearer <admin_token>"
```
> 当前实现中,后端会构造简单的统计信息传给 Dify。实际环境应接入真实监控数据。
### 5.3 期望响应结构
```json
{
"date": "string",
"overall_status": "string",
"device_summary": "string",
"incident_summary": "string",
"recommendations": "string"
}
```
---
## 6. 风险监测与合规评估(/monitor)测试示例
### 6.1 请求说明
- 方法:`GET`
- URL:`http://localhost:8080/api/v1/security/monitor`
- 用途:基于给定的漏洞信息进行整体风险与合规评估。
> 当前代码中 `context_data` 为空数组时,智能体可以根据默认经验或空数据做基线评估。也可以在后续版本中从漏洞库/配置注入具体列表。
### 6.2 curl 示例
```bash
curl -X GET "http://localhost:8080/api/v1/security/monitor" \
-H "Authorization: Bearer <admin_token>"
```
### 6.3 期望响应结构
```json
{
"detected_vulnerabilities": ["string"],
"compliance_risks": ["string"],
"ai_assessment": "string"
}
---
## 7. 安全新闻 RSS 订阅(/rss/news)测试示例
### 7.1 请求说明
- 方法:`GET`
- URL:`http://localhost:8080/api/v1/security/rss/news`
- 用途:获取来自天融信、360 CERT、绿盟等安全厂商的最新资讯。
### 7.2 curl 示例
```bash
curl -X GET "http://localhost:8080/api/v1/security/rss/news" \
-H "Authorization: Bearer <admin_token>"
```
### 7.3 期望响应结构
```json
{
"items": [
{
"title": "string",
"link": "string",
"description": "string",
"published": "string",
"source": "string"
}
]
}
---
## 8. 测试建议
- 在联调阶段,可以先固定一组假数据(如本文件中的示例),确保:
- Dify 智能体返回的 JSON 结构稳定且字段完整;
- Security Service 能正确解析并返回给前端。
- 回归测试时:
- 建议将这些请求录入到自动化测试脚本中(如 pytest + httpx/requests),
- 对响应结构做 Schema 校验,避免后续修改提示词或模型配置导致输出结构破坏。
\ No newline at end of file
FROM python:3.11-slim
# 添加非root用户
RUN useradd -m -u 1000 appuser && \
mkdir -p /app && \
chown -R appuser:appuser /app
WORKDIR /app
# 安装依赖
COPY requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt
COPY . .
# 复制应用代码
COPY --chown=appuser:appuser . .
# 切换到非root用户
USER appuser
# 暴露端口
EXPOSE 8000
CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"]
# 启动命令(生产环境,多worker)
CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000", "--workers", "4", "--proxy-headers", "--forwarded-allow-ips", "*"]
# Security Service 使用说明
## 1. 服务定位
Security Service 是本项目的安全分析微服务,主要能力包括:
- 基于交换机 / 防火墙 / 服务器等设备信息做 AI 安全风险分析
- 在遭受攻击时给出 AI 应急响应建议
- 生成每日安全日报
- 基于互联网最新漏洞进行风险监测与合规性评估
当前实现为 **无状态服务**
- 不直接读写 PostgreSQL / MongoDB
- 所有分析结果仅在请求周期内计算并返回,不做持久化存储
## 2. 部署与访问入口
### 2.1 通过 Docker Compose 启动
在项目根目录执行:
```bash
cd /Users/uu/Desktop/dles_prj/llm-filter
docker-compose up -d --build security-service gateway
```
### 2.2 访问地址
- 统一网关入口(推荐):`http://localhost:8080`
- 安全服务 API:`/api/v1/security/*`
- 安全服务文档:`http://localhost:8080/docs/security/`
- 直连 Security Service 容器:
- Base URL:`http://localhost:8003`
- API 前缀:`/api/v1/security`
## 3. 鉴权与权限控制
- 所有接口均要求携带 Auth Service 签发的 **JWT**
- HTTP Header:`Authorization: Bearer <token>`
- 服务内部通过 [`get_current_admin`](app/core/security.py) 校验管理员身份:
- 仅当 `role``admin` / `administrator` / `root` 时允许访问
示例 Header:
```http
Authorization: Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9...
```
## 4. 接口一览
所有路径均在前缀 `/api/v1/security` 下,以下以 **网关地址** 为例:`http://localhost:8080`
### 4.1 安全风险分析
- 方法:`POST`
- URL:`/api/v1/security/analysis`
- 说明:
- 输入网络设备列表(交换机 / 防火墙 / 服务器等),由 AI 分析潜在安全隐患
- **注意**:必须传入有效的 `devices` 列表,否则将返回空结果或错误。
请求示例:
```json
POST http://localhost:8080/api/v1/security/analysis
Authorization: Bearer <admin_token>
Content-Type: application/json
{
"devices": [
{
"id": "sw-001",
"name": "Core-Switch-A",
"type": "switch",
"status": "warning",
"logs": ["Port 22 high traffic", "Packet loss detected"],
"version": "v1.2.0"
}
]
}
```
响应字段(`SecurityAnalysisResponse`):
- `summary`: 总体安全概况
- `vulnerabilities`: 漏洞 / 风险列表
- `suggestions`: 修复建议列表
- `risk_level`: 风险等级(如 `low` / `medium` / `high` / `critical`
### 4.2 攻击应急建议
- 方法:`POST`
- URL:`/api/v1/security/attack-advice`
- 说明:当系统已遭受攻击时,提供应急响应与缓解方案
请求示例:
```json
POST http://localhost:8080/api/v1/security/attack-advice
Authorization: Bearer <admin_token>
Content-Type: application/json
{
"attack_type": "Brute Force Login",
"target_device": "DB-Server-Prod",
"severity": "high",
"logs": "Failed login attempts from 10.0.0.10"
}
```
响应字段(`AttackAdviceResponse`):
- `immediate_actions`: 立即执行的操作建议列表
- `analysis`: 攻击分析说明
- `mitigation_plan`: 中长期缓解与防护计划
### 4.3 安全日报
- 方法:`GET`
- URL:`/api/v1/security/report`
- 说明:生成企业安全日报,用于面向管理层的安全概览展示
- **注意**:当前实现需要接入真实数据源才能生成有效报告,否则返回空状态。
响应字段(`SecurityReportResponse`):
- `date`: 报告日期(`YYYY-MM-DD`
- `overall_status`: 总体安全状态
- `device_summary`: 设备运行状况摘要
- `incident_summary`: 安全事件与拦截情况摘要
- `recommendations`: 后续安全改进建议
### 4.4 风险监测与合规评估
- 方法:`GET`
- URL:`/api/v1/security/monitor`
- 说明:基于互联网最新漏洞信息,评估当前企业的合规风险
- **注意**:需要配置或接入外部漏洞数据库,否则返回空列表。
响应字段(`RiskMonitorResponse`):
- `detected_vulnerabilities`: 识别到的漏洞列表
- `compliance_risks`: 合规风险点列表
- `ai_assessment`: AI 对整体风险的评估说明
### 4.5 安全新闻 RSS 订阅
- 方法:`GET`
- URL:`/api/v1/security/rss/news`
- 说明:获取来自天融信、360 CERT、绿盟等安全厂商的最新 RSS 安全资讯。
响应字段(`RSSFeedResponse`):
- `items`: 新闻列表,包含标题、链接、摘要、发布时间和来源。
## 5. Dify 集成与异常处理
服务内部通过 Dify 完成大部分安全分析逻辑:
- Dify 调用配置在 [config.py](app/core/config.py)
- `DIFY_API_URL`
- `DIFY_API_KEY`
- 请求由 [`SecurityService._call_llm`](app/services/analysis.py) 统一发起
- **智能体 Prompt 配置**:请参考 [DIFY_PROMPT.md](DIFY_PROMPT.md) 文档,在 Dify 平台配置对应的 System Prompt 和变量。
当 Dify 不可用(网络异常 / 超时 / 抛错)时:
- 服务将记录错误日志
- 抛出异常供上层处理或返回 HTTP 500 错误
- **不再提供 Mock 数据降级**,以确保运维人员能及时感知服务状态异常。
## 6. 数据存储与状态
当前版本的 Security Service:
- **数据持久化**:分析结果(风险分析与攻击建议)会异步存储到 **MongoDB** 数据库中。
- **历史查询**:提供接口查询历史分析记录。
## 7. 历史查询接口
### 7.1 安全分析历史
- 方法:`GET`
- URL:`/api/v1/security/analysis/history`
- 参数:
- `start_date` (可选): 开始时间 (ISO 8601)
- `end_date` (可选): 结束时间 (ISO 8601)
- `limit` (可选): 返回数量限制,默认 20
- 响应:包含 `total``items` (AnalysisHistoryItem)
### 7.2 攻击建议历史
- 方法:`GET`
- URL:`/api/v1/security/attack-advice/history`
- 参数:
- `start_date` (可选): 开始时间 (ISO 8601)
- `end_date` (可选): 结束时间 (ISO 8601)
- `limit` (可选): 返回数量限制,默认 20
- 响应:包含 `total``items` (AttackAdviceHistoryItem)
## 8. 快速调试示例(curl)
确保容器已启动后,可以在宿主机直接运行:
```bash
# 1. 使用管理员 Token 调用安全分析(通过网关)
# 注意:请确保传入真实的设备数据
curl -X POST "http://localhost:8080/api/v1/security/analysis" \
-H "Authorization: Bearer <admin_token>" \
-H "Content-Type: application/json" \
-d '{"devices": [{"id": "test-1", "name": "Test-Device", "type": "server", "status": "active"}]}'
# 2. 直接访问文档(网关统一入口)
open "http://localhost:8080/docs/security/"
```
from datetime import datetime
from fastapi import APIRouter, Depends, Query
from app.schemas.payloads import *
from app.services.analysis import SecurityService
from app.services.rss import RSSService
from app.services.zabbix_service import ZabbixService
from app.core.security import get_current_admin
from app.core.database import db
from datetime import datetime, timezone
router = APIRouter()
service = SecurityService()
zabbix_service = ZabbixService()
service = SecurityService(zabbix_service=zabbix_service)
rss_service = RSSService()
@router.post("/analysis", response_model=SecurityAnalysisResponse)
async def analyze_risks(request: SecurityAnalysisRequest, admin: dict = Depends(get_current_admin)):
result = await service.analyze_risks(request.devices)
# 异步存储结果到 MongoDB
if db.db is not None:
log_entry = result.model_dump()
log_entry["created_at"] = datetime.now(timezone.utc)
await db.db.security_analysis_logs.insert_one(log_entry)
return result
return await service.analyze_risks(request.devices)
@router.get("/analysis/history", response_model=HistoryQueryResponse)
async def get_analysis_history(
......@@ -36,15 +30,12 @@ async def get_analysis_history(
@router.post("/attack-advice", response_model=AttackAdviceResponse)
async def get_attack_advice(request: AttackAdviceRequest, admin: dict = Depends(get_current_admin)):
result = await service.get_attack_advice(request.attack_type, request.target_device, request.logs)
# 异步存储结果到 MongoDB
if db.db is not None:
log_entry = result.model_dump()
log_entry["created_at"] = datetime.now(timezone.utc)
await db.db.attack_advice_logs.insert_one(log_entry)
return result
return await service.get_attack_advice(
attack_type=request.attack_type,
target=request.target_device,
logs=request.logs,
severity=request.severity,
)
@router.get("/attack-advice/history", response_model=HistoryQueryResponse)
async def get_attack_advice_history(
......@@ -73,3 +64,53 @@ async def get_security_news(admin: dict = Depends(get_current_admin)):
"""
return await rss_service.get_security_news()
@router.post("/zabbix/sync")
async def sync_zabbix_data(admin: dict = Depends(get_current_admin)):
"""
手动同步Zabbix数据
"""
try:
result = await zabbix_service.sync_data()
return {
"status": "success",
"message": "Zabbix数据同步完成",
"data": result
}
except Exception as e:
return {
"status": "error",
"message": f"Zabbix数据同步失败: {str(e)}",
"data": None
}
@router.get("/zabbix/status")
async def get_zabbix_status(admin: dict = Depends(get_current_admin)):
"""
获取Zabbix服务状态
"""
status = zabbix_service.get_sync_status()
return {
"status": "success",
"data": status
}
@router.post("/zabbix/devices")
async def get_zabbix_devices(admin: dict = Depends(get_current_admin)):
"""
获取Zabbix设备列表
"""
try:
device_data = await zabbix_service.collect_device_data()
return {
"status": "success",
"message": f"成功获取 {len(device_data.get('devices', []))} 台设备",
"data": device_data
}
except Exception as e:
return {
"status": "error",
"message": f"获取设备数据失败: {str(e)}",
"data": None
}
from pydantic_settings import BaseSettings
from pathlib import Path
from pydantic import Field
from pydantic_settings import BaseSettings, SettingsConfigDict
class Settings(BaseSettings):
_ROOT_ENV_PATH = Path(__file__).resolve().parents[4] / ".env"
_SERVICE_ENV_PATH = Path(__file__).resolve().parents[2] / ".env"
model_config = SettingsConfigDict(
env_file=(_ROOT_ENV_PATH, _SERVICE_ENV_PATH),
env_file_encoding="utf-8",
case_sensitive=True,
)
API_V1_STR: str = "/api/v1"
PROJECT_NAME: str = "Security Service"
# 鉴权配置 (与 Auth Service 保持一致)
JWT_SECRET: str = "llm_filter_secure_secret_key_2025_update_must_be_32_bytes"
JWT_SECRET: str = ""
ALGORITHM: str = "HS256"
# Dify 配置
DIFY_API_URL: str = "http://192.168.6.6/v1"
DIFY_API_KEY: str = "app-lkK33EQOVXXrjD9x3SKbItr7"
# 使用别名从环境变量 DIFY_API_KEY_SECURITY 读取
DIFY_API_KEY: str = Field("", alias="DIFY_API_KEY_SECURITY")
DIFY_RESPONSE_MODE: str = "streaming"
# MongoDB 配置
MONGODB_URL: str = "mongodb://localhost:27017"
MONGODB_DB_NAME: str = "security_service_db"
# Redis 配置
REDIS_HOST: str = "localhost"
REDIS_PORT: int = 6379
REDIS_DB: int = 0
REDIS_PASSWORD: str = ""
# Zabbix 配置
ZABBIX_URL: str = "http://localhost"
ZABBIX_USERNAME: str = "Admin"
ZABBIX_PASSWORD: str = "zabbix"
class Config:
case_sensitive = True
# 数据同步配置
ZABBIX_SYNC_INTERVAL: int = 3600
ZABBIX_AUTO_SYNC: bool = True
settings = Settings()
from fastapi import FastAPI
from contextlib import asynccontextmanager
from datetime import datetime
import logging
from fastapi import FastAPI, HTTPException
from app.api.v1.endpoints import router as security_router
from app.core.config import settings
from app.core.database import db
from app.services.zabbix_service import ZabbixService
logger = logging.getLogger(__name__)
@asynccontextmanager
async def lifespan(app: FastAPI):
# Startup
db.connect()
try:
zabbix_service = ZabbixService()
sync_status = zabbix_service.get_sync_status()
if sync_status.get("collector_initialized"):
logger.info("Zabbix服务初始化成功")
else:
logger.warning("Zabbix服务初始化失败,请检查Zabbix配置")
except Exception as e:
logger.error(f"Zabbix服务连接检查失败: {e}")
yield
# Shutdown
db.close()
app = FastAPI(title=settings.PROJECT_NAME, lifespan=lifespan)
# 注册路由
app.include_router(security_router, prefix=f"{settings.API_V1_STR}/security", tags=["Security"])
app = FastAPI(title=settings.PROJECT_NAME, version="2.0.0", lifespan=lifespan)
@app.get("/")
def health_check():
return {"status": "ok", "service": "security-service"}
return {"status": "ok", "service": "security-service", "version": "2.0.0"}
@app.get("/health")
async def detailed_health_check():
try:
db_status = "ok" if db.db else "error"
zabbix_status = {"status": "not_configured"}
try:
zabbix_service = ZabbixService()
sync_status = zabbix_service.get_sync_status()
if sync_status.get("collector_initialized"):
zabbix_status = {
"status": "ok",
"last_sync": sync_status.get("last_sync_time"),
}
else:
zabbix_status = {
"status": "error",
"message": "Zabbix collector未初始化",
}
except Exception as e:
zabbix_status = {
"status": "error",
"message": str(e),
}
return {
"status": "healthy",
"timestamp": datetime.now().isoformat(),
"service": "security-service",
"version": "2.0.0",
"components": {
"database": {"status": db_status},
"zabbix": zabbix_status,
},
}
except Exception as e:
logger.error(f"Health check failed: {e}")
raise HTTPException(status_code=503, detail=f"Service unhealthy: {str(e)}")
@app.get("/ready")
async def readiness_check():
try:
if not db.db:
return {
"status": "not_ready",
"timestamp": datetime.now().isoformat(),
"reason": "database_not_connected",
}
return {
"status": "ready",
"timestamp": datetime.now().isoformat(),
}
except Exception as e:
return {
"status": "not_ready",
"timestamp": datetime.now().isoformat(),
"reason": str(e),
}
app.include_router(security_router, prefix=f"{settings.API_V1_STR}/security", tags=["Security"])
import json
import httpx
from datetime import datetime
from datetime import datetime, timezone
from typing import List, Dict, Any, Optional
from pydantic import ValidationError
from app.schemas.payloads import *
from app.core.config import settings
from app.core.database import db
from app.services.zabbix_service import ZabbixService
import logging
logger = logging.getLogger(__name__)
class SecurityService:
def __init__(self, zabbix_service: ZabbixService):
"""
初始化安全服务
:param zabbix_service: Zabbix数据服务
"""
self.zabbix_service = zabbix_service
async def analyze_risks(self, devices: List[DeviceInfo] = None) -> SecurityAnalysisResponse:
"""
任务:风险分析
任务:风险分析 - 使用真实Zabbix数据
"""
if not devices:
# 从Zabbix自动采集设备数据
try:
logger.info("未提供设备数据,从Zabbix自动采集...")
zabbix_data = await self.zabbix_service.collect_device_data()
devices_data = zabbix_data.get("devices", [])
# 转换为DeviceInfo对象
devices = []
for device_data in devices_data:
device = DeviceInfo(
id=device_data.get("id", ""),
name=device_data.get("name", ""),
type=device_data.get("type", "unknown"),
status=device_data.get("status", "unknown"),
logs=device_data.get("logs", [])
)
devices.append(device)
logger.info(f"从Zabbix采集到 {len(devices)} 台设备")
except Exception as e:
logger.error(f"从Zabbix采集设备数据失败: {e}")
# 使用空设备列表继续,让LLM处理
devices = []
if not devices:
# 如果依然没有设备数据,创建空的设备列表
devices = []
# 1. 准备数据,不再拼接 Prompt,只序列化数据
device_data = [d.dict() for d in devices] # 假设 Pydantic 模型有 .dict(),或者手动转 dict
# 1. 准备数据,序列化设备数据
device_data = [d.model_dump() for d in devices]
# 2. 构造 Dify 所需的变量 inputs
inputs = {
"task_type": "analysis", # 告诉 Dify 执行哪个任务分支
"context_data": json.dumps(device_data, ensure_ascii=False) # 将复杂数据转为字符串传递
"context_data": json.dumps(device_data, ensure_ascii=False) # 将复杂数据转为字符串传递
}
return await self._call_llm(inputs, SecurityAnalysisResponse)
# 3. 调用LLM进行分析
result = await self._call_llm(inputs, SecurityAnalysisResponse)
# 4. 异步存储结果到 MongoDB
try:
if db.db is not None:
log_entry = result.model_dump()
log_entry["created_at"] = datetime.now(timezone.utc)
log_entry["device_count"] = len(devices)
await db.db.security_analysis_logs.insert_one(log_entry)
logger.info("安全分析结果已保存到MongoDB")
except Exception as e:
logger.error(f"保存分析结果到MongoDB失败: {e}")
return result
async def get_attack_advice(self, attack_type: str, target: str, logs: str) -> AttackAdviceResponse:
async def get_attack_advice(self, attack_type: str, target: str, logs: str, severity: Optional[str] = None) -> AttackAdviceResponse:
"""
任务:攻击应急建议
"""
# 构造结构化数据
data = {
"attack_type": attack_type,
"target_device": target,
"logs": logs
"severity": severity,
"logs": logs,
}
inputs = {
"task_type": "advice",
"context_data": json.dumps(data, ensure_ascii=False)
"context_data": json.dumps(data, ensure_ascii=False),
}
return await self._call_llm(inputs, AttackAdviceResponse)
result = await self._call_llm(inputs, AttackAdviceResponse)
try:
if db.db is not None:
log_entry = result.model_dump()
log_entry.update(
{
"created_at": datetime.now(timezone.utc),
"attack_type": attack_type,
"target_device": target,
"severity": severity,
}
)
await db.db.attack_advice_logs.insert_one(log_entry)
logger.info("攻击建议结果已保存到MongoDB")
except Exception as e:
logger.error(f"保存攻击建议结果到MongoDB失败: {e}")
return result
async def generate_report(self) -> SecurityReportResponse:
"""
任务:生成日报
任务:生成日报 - 使用真实数据
"""
# 注意:此处应从真实数据源获取状态,当前暂无数据源连接
data = {
"date": datetime.now().strftime('%Y-%m-%d'),
"device_status": "暂无数据 (需接入数据源)",
"intercept_count": 0
}
try:
# 从Zabbix获取实时数据
logger.info("从Zabbix采集数据生成安全报告...")
# 采集设备数据
device_data = await self.zabbix_service.collect_device_data()
devices = device_data.get("devices", [])
# 统计设备状态
total_devices = len(devices)
up_devices = sum(1 for d in devices if d.get("status") == "up")
down_devices = sum(1 for d in devices if d.get("status") == "down")
problem_devices = sum(1 for d in devices if d.get("logs"))
# 构建报告数据
report_data = {
"date": datetime.now().strftime('%Y-%m-%d'),
"device_status": {
"total_devices": total_devices,
"up_devices": up_devices,
"down_devices": down_devices,
"problem_devices": problem_devices
},
"incident_summary": {
"total_events": sum(len(d.get("logs", [])) for d in devices),
"critical_events": 0, # 可以从Zabbix触发器优先级统计
"high_events": 0,
"medium_events": 0,
"low_events": 0,
"resolved_events": 0,
"unresolved_events": problem_devices
},
"top_issues": self._extract_top_issues(devices),
"real_time_data": True # 标识使用真实数据
}
logger.info(f"成功采集到 {total_devices} 台设备数据,其中 {problem_devices} 台有问题")
except Exception as e:
logger.error(f"从Zabbix采集数据失败,使用备用数据: {e}")
# 备用数据(当Zabbix不可用时)
report_data = {
"date": datetime.now().strftime('%Y-%m-%d'),
"device_status": {
"total_devices": 0,
"up_devices": 0,
"down_devices": 0,
"problem_devices": 0,
"status": "zabbix_unavailable"
},
"incident_summary": {
"total_events": 0,
"critical_events": 0,
"high_events": 0,
"medium_events": 0,
"low_events": 0,
"resolved_events": 0,
"unresolved_events": 0
},
"top_issues": ["Zabbix服务不可用,无法获取实时数据"],
"real_time_data": False # 标识使用备用数据
}
inputs = {
"task_type": "report",
"context_data": json.dumps(data, ensure_ascii=False)
"context_data": json.dumps(report_data, ensure_ascii=False)
}
return await self._call_llm(inputs, SecurityReportResponse)
result = await self._call_llm(inputs, SecurityReportResponse)
# 异步存储结果到 MongoDB
try:
if db.db is not None:
log_entry = result.model_dump()
log_entry["created_at"] = datetime.now(timezone.utc)
log_entry["report_date"] = report_data["date"]
log_entry["real_time_data"] = report_data.get("real_time_data", False)
await db.db.security_report_logs.insert_one(log_entry)
logger.info("安全报告结果已保存到MongoDB")
except Exception as e:
logger.error(f"保存报告结果到MongoDB失败: {e}")
return result
async def monitor_risks(self) -> RiskMonitorResponse:
"""
[MOCK] 实时风险监控
实时风险监控 - 使用真实数据
"""
try:
logger.info("从Zabbix采集监控数据...")
# 采集设备数据
device_data = await self.zabbix_service.collect_device_data()
devices = device_data.get("devices", [])
# 分析设备状态
detected_vulnerabilities = []
compliance_risks = []
for device in devices:
device_name = device.get("name", "未知设备")
device_type = device.get("type", "unknown")
logs = device.get("logs", [])
# 基于设备类型和日志分析潜在风险
if device_type == "switch":
# 交换机常见风险
if any("down" in log.lower() for log in logs):
compliance_risks.append(f"交换机 {device_name} 离线 - 网络中断风险")
if any("link down" in log.lower() for log in logs):
detected_vulnerabilities.append(f"网络接口异常 - {device_name}")
elif device_type == "firewall":
# 防火墙常见风险
if any("packet loss" in log.lower() for log in logs):
detected_vulnerabilities.append(f"防火墙 {device_name} 包丢失 - 性能问题")
if any("connection" in log.lower() and "failed" in log.lower() for log in logs):
compliance_risks.append(f"防火墙 {device_name} 连接失败 - 安全策略检查")
elif device_type == "server":
# 服务器常见风险
if any("cpu" in log.lower() and "high" in log.lower() for log in logs):
detected_vulnerabilities.append(f"服务器 {device_name} CPU 高负载 - 性能风险")
if any("memory" in log.lower() and "usage" in log.lower() for log in logs):
compliance_risks.append(f"服务器 {device_name} 内存使用异常 - 资源优化")
# 通用风险检测
for log in logs:
if any(priority in log for priority in ["Priority: 5", "Priority: 4"]):
detected_vulnerabilities.append(f"高优先级告警 - {device_name}: {log[:50]}...")
# 如果没有检测到具体风险,提供通用风险评估
if not detected_vulnerabilities and not compliance_risks:
detected_vulnerabilities = ["系统运行正常,未检测到明显安全漏洞"]
compliance_risks = ["系统符合基本安全要求,建议定期审计"]
ai_assessment = f"系统安全状态: {self._assess_security_risk_level(devices)}"
logger.info(f"监控分析完成,发现 {len(detected_vulnerabilities)} 个漏洞和 {len(compliance_risks)} 个合规风险")
except Exception as e:
logger.error(f"从Zabbix采集监控数据失败: {e}")
# 备用监控数据
detected_vulnerabilities = ["监控系统不可用,无法获取实时风险数据"]
compliance_risks = ["系统状态未知,请检查Zabbix连接"]
ai_assessment = "监控系统服务异常,无法进行有效风险评估"
return RiskMonitorResponse(
detected_vulnerabilities=["CVE-2024-0001 (Critical)", "Weak SSH Config"],
compliance_risks=["Password policy outdated", "Unencrypted backup found"],
ai_assessment="System security posture is stable but requires attention on recent CVEs."
detected_vulnerabilities=detected_vulnerabilities,
compliance_risks=compliance_risks,
ai_assessment=ai_assessment
)
async def get_analysis_history(self, start_date: datetime = None, end_date: datetime = None, limit: int = 20) -> HistoryQueryResponse:
......@@ -126,6 +320,10 @@ class SecurityService:
return HistoryQueryResponse(total=total, items=items)
async def _call_llm(self, inputs: Dict[str, Any], model_cls):
if not settings.DIFY_API_URL or not settings.DIFY_API_KEY:
logger.warning("Dify 未配置(缺少 DIFY_API_URL 或 DIFY_API_KEY),跳过 LLM 调用并返回默认结果")
return self._build_default_response(model_cls)
try:
url = f"{settings.DIFY_API_URL.rstrip('/')}/chat-messages"
headers = {
......@@ -151,15 +349,11 @@ class SecurityService:
payload = {
"inputs": inputs,
"query": query_prompt,
"response_mode": "streaming",
"response_mode": settings.DIFY_RESPONSE_MODE,
"conversation_id": "",
"user": "security-system-api",
}
# DEBUG LOG: 打印实际发送的 Payload
logger.info(f"Sending request to Dify. URL: {url}")
logger.info(f"Payload inputs: {json.dumps(inputs, ensure_ascii=False)}")
full_answer = ""
async with httpx.AsyncClient() as client:
async with client.stream("POST", url, json=payload, headers=headers, timeout=120.0) as resp:
......@@ -202,6 +396,49 @@ class SecurityService:
return self._build_default_response(model_cls)
def _extract_top_issues(self, devices: List[Dict]) -> List[str]:
"""
从设备日志中提取主要问题
"""
issues = []
for device in devices:
device_name = device.get("name", "未知设备")
logs = device.get("logs", [])
# 分析日志中的关键问题
for log in logs:
if any(keyword in log.lower() for keyword in ["critical", "down", "failed", "error", "high priority"]):
issues.append(f"{device_name}: {log[:80]}...")
break # 每个设备只取一个问题
# 如果没有问题,返回正常状态
if not issues:
issues = ["系统运行正常,未发现明显问题"]
return issues[:5] # 只返回前5个主要问题
def _assess_security_risk_level(self, devices: List[Dict]) -> str:
"""
评估系统整体安全风险等级
"""
total_devices = len(devices)
problem_devices = sum(1 for d in devices if d.get("logs"))
if total_devices == 0:
return "未知 - 无设备数据"
problem_ratio = problem_devices / total_devices
if problem_ratio >= 0.3:
return "高风险 - 多个设备出现异常"
elif problem_ratio >= 0.1:
return "中等风险 - 部分设备存在异常"
elif problem_ratio > 0:
return "低风险 - 少量设备出现异常"
else:
return "安全 - 所有设备运行正常"
def _clean_json_string(self, text: str) -> str:
"""清洗 Markdown 代码块标记和思维链标签"""
import re
......@@ -218,7 +455,7 @@ class SecurityService:
# 但通常思考在前,正文在后。如果没闭合,说明正文还没出来。
# 这里保守处理:如果剩下内容全是思考,那就全删了,返回空串,由上层处理为空的情况。
if '<think>' in text:
text = re.sub(r'<think>.*', '', text, flags=re.DOTALL)
text = re.sub(r'<think>.*', '', text, flags=re.DOTALL)
# 3. 清洗 Markdown 标记
if "```json" in text:
......
import asyncio
import requests
import json
import logging
from datetime import datetime, timedelta
from typing import List, Dict, Any, Optional
from app.core.config import settings
logger = logging.getLogger(__name__)
class ZabbixDataCollector:
def __init__(self, zabbix_url: str, username: str, password: str):
"""
初始化 Zabbix 数据采集器
:param zabbix_url: Zabbix 服务器基础 URL,例如 "http://192.168.20.199"
:param username: 用户名
:param password: 密码
"""
self.zabbix_url = zabbix_url.rstrip('/')
self.username = username
self.password = password
self.auth_token = None
self.login()
def _get_api_url(self):
"""返回完整的 API 地址"""
if self.zabbix_url.endswith("api_jsonrpc.php"):
return self.zabbix_url
if self.zabbix_url.endswith("/zabbix"):
return f"{self.zabbix_url}/api_jsonrpc.php"
return f"{self.zabbix_url}/zabbix/api_jsonrpc.php"
def login(self):
"""智能登录:自动尝试新旧参数格式"""
# 尝试顺序:先新版 (username),再旧版 (user)
payloads = [
{
"jsonrpc": "2.0",
"method": "user.login",
"params": {"username": self.username, "password": self.password},
"id": 1
},
{
"jsonrpc": "2.0",
"method": "user.login",
"params": {"user": self.username, "password": self.password},
"id": 1
}
]
for i, payload in enumerate(payloads):
try:
logger.info(f"尝试登录方式 {'新版 (username)' if i == 0 else '旧版 (user)'}...")
response = requests.post(
self._get_api_url(),
json=payload,
timeout=10,
headers={"Content-Type": "application/json"}
)
if response.status_code != 200:
logger.warning(f"HTTP {response.status_code}: {response.reason}")
continue
data = response.json()
if "result" in data:
self.auth_token = data["result"]
version = "Zabbix 5.4+" if i == 0 else "Zabbix 5.2-"
logger.info(f"登录成功!检测到 {version} 格式。")
return
elif "error" in data:
err = data["error"]
msg = f"[{err.get('code')}] {err.get('message')} - {err.get('data', '')}"
logger.warning(f"登录失败: {msg}")
if "unexpected parameter" in msg and ("username" in msg or "user" in msg):
continue # 尝试下一个
else:
raise Exception(msg)
else:
logger.warning("未知响应格式: {data}")
continue
except requests.exceptions.RequestException as e:
logger.error(f"网络错误: {e}")
continue
except json.JSONDecodeError:
logger.warning("非 JSON 响应: {response.text[:200]}")
continue
raise Exception("所有登录方式均失败!请检查:\n1. Zabbix 地址是否正确\n2. 用户名/密码是否正确\n3. Zabbix 是否运行\n4. 是否允许 API 访问")
def _call_api(self, method: str, params: dict):
"""通用 API 调用"""
return self._call_api_with_retry(method, params, retry_count=1)
def _call_api_with_retry(self, method: str, params: dict, retry_count: int = 0):
"""带重试机制的 API 调用"""
payload = {
"jsonrpc": "2.0",
"method": method,
"params": params,
"auth": self.auth_token,
"id": 1
}
try:
response = requests.post(
self._get_api_url(),
json=payload,
timeout=15,
headers={"Content-Type": "application/json"}
)
response.raise_for_status()
data = response.json()
if "error" in data:
err = data["error"]
error_msg = err.get('message', '')
error_data = err.get('data', '')
# 检查是否是认证相关错误
# Zabbix API 错误码: -32602 (Invalid params) 有时也用于 session 失效
# 常见 Session 错误信息: "Session terminated", "Not authorized"
if retry_count > 0 and ("Session" in error_data or "authorized" in error_data or "auth" in error_msg.lower()):
logger.warning(f"Zabbix API 认证失败 ({error_msg} - {error_data}),尝试重新登录...")
self.login()
# 更新 auth token 后重试
return self._call_api_with_retry(method, params, retry_count=retry_count - 1)
raise Exception(f"[{err.get('code')}] {error_msg} - {error_data}")
return data["result"]
except Exception as e:
raise Exception(f"API 调用失败 ({method}): {e}")
def get_hosts(self):
"""获取主机列表"""
return self._call_api("host.get", {
"output": ["hostid", "name", "status"],
"selectTags": ["tag", "value"]
})
def get_triggers(self):
"""获取活动触发器"""
return self._call_api("trigger.get", {
"output": ["triggerid", "description", "priority", "status"],
"selectHosts": ["hostid", "name"],
"filter": {"value": 1},
"sortfield": "priority",
"sortorder": "DESC"
})
def get_events(self, time_from: Optional[int] = None, time_till: Optional[int] = None, limit: int = 100):
"""获取事件"""
params = {
"output": ["eventid", "clock", "name", "severity"],
"selectHosts": ["hostid", "name"],
"sortfield": "clock",
"sortorder": "DESC",
"limit": limit
}
if time_from is not None:
params["time_from"] = time_from
if time_till is not None:
params["time_till"] = time_till
return self._call_api("event.get", params)
def get_cpu_data(self):
"""获取CPU和硬件数据"""
logger.info("获取CPU和硬件数据...")
# 获取主机
hosts = self.get_hosts()
logger.info(f"获取到 {len(hosts)} 台主机")
# 获取触发器
triggers = self.get_triggers()
logger.info(f"获取到 {len(triggers)} 个活动触发器")
# 获取最近24小时事件
time_from = int((datetime.now() - timedelta(hours=24)).timestamp())
events = self.get_events(time_from=time_from, limit=200)
logger.info(f"获取到 {len(events)} 条事件")
# 构建CPU数据结构
cpu_data = []
for host in hosts:
host_data = {
"id": host["hostid"],
"name": host["name"],
"type": self._determine_device_type(host),
"status": "up" if host["status"] == "0" else "down",
"cpu_usage": self._get_mock_cpu_usage(), # 实际项目中应该从items获取
"memory_usage": self._get_mock_memory_usage(),
"disk_usage": self._get_mock_disk_usage(),
"logs": []
}
# 关联触发器日志
for t in triggers:
if any(h["hostid"] == host["hostid"] for h in t.get("hosts", [])):
host_data["logs"].append(f"{t['description']} - Priority: {t['priority']}")
# 关联事件日志
for e in events:
if any(h["hostid"] == host["hostid"] for h in e.get("hosts", [])):
ts = datetime.fromtimestamp(int(e["clock"])).strftime("%Y-%m-%d %H:%M:%S")
host_data["logs"].append(f"{ts} - {e['name']} - Severity: {e['severity']}")
cpu_data.append(host_data)
return {"hosts": cpu_data}
def get_network_data(self):
"""获取网络接口数据"""
logger.info("获取网络接口数据...")
# 获取主机
hosts = self.get_hosts()
logger.info(f"获取到 {len(hosts)} 台主机")
# 获取触发器
triggers = self.get_triggers()
logger.info(f"获取到 {len(triggers)} 个活动触发器")
# 获取最近24小时事件
time_from = int((datetime.now() - timedelta(hours=24)).timestamp())
events = self.get_events(time_from=time_from, limit=200)
logger.info(f"获取到 {len(events)} 条事件")
# 构建网络数据结构
network_data = []
for host in hosts:
host_data = {
"id": host["hostid"],
"name": host["name"],
"type": self._determine_device_type(host),
"status": "up" if host["status"] == "0" else "down",
"interfaces": self._get_mock_network_interfaces(),
"logs": []
}
# 关联触发器日志
for t in triggers:
if any(h["hostid"] == host["hostid"] for h in t.get("hosts", [])):
host_data["logs"].append(f"{t['description']} - Priority: {t['priority']}")
# 关联事件日志
for e in events:
if any(h["hostid"] == host["hostid"] for h in e.get("hosts", [])):
ts = datetime.fromtimestamp(int(e["clock"])).strftime("%Y-%m-%d %H:%M:%S")
host_data["logs"].append(f"{ts} - {e['name']} - Severity: {e['severity']}")
network_data.append(host_data)
return {"hosts": network_data}
def get_security_data_for_analysis(self):
"""获取安全分析数据"""
logger.info("获取安全分析数据...")
# 获取主机
hosts = self.get_hosts()
logger.info(f"获取到 {len(hosts)} 台主机")
# 获取活动触发器
triggers = self.get_triggers()
logger.info(f"获取到 {len(triggers)} 个活动触发器")
# 获取最近24小时事件
time_from = int((datetime.now() - timedelta(hours=24)).timestamp())
events = self.get_events(time_from=time_from, limit=200)
logger.info(f"获取到 {len(events)} 条事件")
devices = []
for host in hosts:
device = {
"id": host["hostid"],
"name": host["name"],
"type": self._determine_device_type(host),
"status": "up" if host["status"] == "0" else "down",
"logs": []
}
# 关联触发器
for t in triggers:
if any(h["hostid"] == host["hostid"] for h in t.get("hosts", [])):
device["logs"].append(f"{t['description']} - Priority: {t['priority']}")
# 关联事件
for e in events:
if any(h["hostid"] == host["hostid"] for h in e.get("hosts", [])):
ts = datetime.fromtimestamp(int(e["clock"])).strftime("%Y-%m-%d %H:%M:%S")
device["logs"].append(f"{ts} - {e['name']} - Severity: {e['severity']}")
devices.append(device)
return {"devices": devices}
def _determine_device_type(self, host):
"""智能识别设备类型"""
tags = host.get("tags", [])
for tag in tags:
if tag["tag"] == "device_type":
return tag["value"]
name = host["name"].lower()
if any(kw in name for kw in ["sw", "switch"]):
return "switch"
elif any(kw in name for kw in ["fw", "firewall"]):
return "firewall"
elif any(kw in name for kw in ["server", "web", "db", "srv"]):
return "server"
else:
return "unknown"
def _get_mock_cpu_usage(self):
"""模拟CPU使用率数据"""
return {
"usage": 45.2, # 模拟使用率
"cores": 4, # 模拟核心数
"load_average": [1.2, 1.1, 0.8] # 模拟负载
}
def _get_mock_memory_usage(self):
"""模拟内存使用数据"""
return {
"total": 16384, # MB
"used": 8192, # MB
"free": 8192, # MB
"usage_percent": 50.0
}
def _get_mock_disk_usage(self):
"""模拟磁盘使用数据"""
return [
{
"mount": "/",
"total": 500000, # MB
"used": 250000, # MB
"free": 250000, # MB
"usage_percent": 50.0
},
{
"mount": "/var",
"total": 100000, # MB
"used": 60000, # MB
"free": 40000, # MB
"usage_percent": 60.0
}
]
def _get_mock_network_interfaces(self):
"""模拟网络接口数据"""
return [
{
"name": "eth0",
"ip_address": "192.168.1.100",
"mac_address": "00:0C:29:12:34:56",
"status": "up",
"speed": "1000Mbps",
"rx_bytes": 1234567890,
"tx_bytes": 987654321,
"errors": {"rx": 0, "tx": 0}
},
{
"name": "eth1",
"ip_address": "192.168.2.100",
"mac_address": "00:0C:29:12:34:57",
"status": "up",
"speed": "1000Mbps",
"rx_bytes": 2345678901,
"tx_bytes": 1876543210,
"errors": {"rx": 1, "tx": 0}
}
]
class ZabbixService:
def __init__(self):
"""初始化Zabbix服务"""
self.collector = None
self.last_sync_time = None
self._initialize_collector()
async def _run_blocking(self, func, *args, **kwargs):
try:
to_thread = asyncio.to_thread
except AttributeError:
loop = asyncio.get_running_loop()
return await loop.run_in_executor(None, lambda: func(*args, **kwargs))
return await to_thread(func, *args, **kwargs)
def _initialize_collector(self):
"""初始化数据采集器"""
try:
self.collector = ZabbixDataCollector(
zabbix_url=settings.ZABBIX_URL,
username=settings.ZABBIX_USERNAME,
password=settings.ZABBIX_PASSWORD
)
logger.info("Zabbix服务初始化成功")
except Exception as e:
logger.error(f"Zabbix服务初始化失败: {e}")
self.collector = None
async def collect_device_data(self):
"""采集设备数据"""
if not self.collector:
raise Exception("Zabbix collector未初始化,请检查Zabbix配置")
return await self._run_blocking(self.collector.get_security_data_for_analysis)
async def collect_cpu_data(self):
"""采集CPU和硬件数据"""
if not self.collector:
raise Exception("Zabbix collector未初始化,请检查Zabbix配置")
return await self._run_blocking(self.collector.get_cpu_data)
async def collect_network_data(self):
"""采集网络接口数据"""
if not self.collector:
raise Exception("Zabbix collector未初始化,请检查Zabbix配置")
return await self._run_blocking(self.collector.get_network_data)
async def sync_data(self):
"""同步数据"""
try:
logger.info("开始同步Zabbix数据...")
# 并发采集不同类型的数据
device_task = self.collect_device_data()
cpu_task = self.collect_cpu_data()
network_task = self.collect_network_data()
# 等待所有任务完成
device_data, cpu_data, network_data = await asyncio.gather(
device_task, cpu_task, network_task
)
self.last_sync_time = datetime.now()
logger.info("Zabbix数据同步完成")
return {
"devices": len(device_data.get("devices", [])),
"hosts_cpu": len(cpu_data.get("hosts", [])),
"hosts_network": len(network_data.get("hosts", [])),
"sync_time": self.last_sync_time.isoformat()
}
except Exception as e:
logger.error(f"Zabbix数据同步失败: {e}")
raise Exception(f"数据同步失败: {e}")
def get_sync_status(self):
"""获取同步状态"""
return {
"last_sync_time": self.last_sync_time.isoformat() if self.last_sync_time else None,
"collector_initialized": self.collector is not None
}
......@@ -8,3 +8,5 @@ python-dotenv==1.0.1
feedparser>=6.0.10
motor==3.3.2
pymongo<4.7
requests>=2.31.0
@echo off
REM Security Service 启动脚本 (Windows)
echo ============================================================
echo Security Service - Zabbix集成版本
echo ============================================================
echo.
REM 检查Python是否安装
python --version >nul 2>&1
if %errorlevel% neq 0 (
echo ❌ Python未安装或未添加到PATH
pause
exit /b 1
)
REM 检查虚拟环境
if not exist "venv" (
echo 📦 创建虚拟环境...
python -m venv venv
)
REM 激活虚拟环境
call venv\Scripts\activate.bat
REM 安装依赖
echo 📥 安装依赖...
pip install -r requirements.txt -q
REM 检查.env文件
if not exist ".env" (
echo ⚠️ .env文件不存在,复制.env.example...
copy .env.example .env
echo ❗ 请编辑.env文件,配置Zabbix服务器信息
echo ❗ 配置完成后重新运行此脚本
pause
exit /b 1
)
REM 启动服务
echo.
echo 🚀 启动Security Service...
echo 📍 服务地址: http://localhost:8002
echo 📚 API文档: http://localhost:8002/docs
echo.
echo 按 Ctrl+C 停止服务
echo.
python -m uvicorn app.main:app --host 0.0.0.0 --port 8002 --reload
pause
\ No newline at end of file
#!/bin/bash
# Security Service 启动脚本 (Linux/MacOS)
echo "============================================================"
echo " Security Service - Zabbix集成版本"
echo "============================================================"
echo
# 检查Python是否安装
if ! command -v python3 &> /dev/null; then
echo "❌ Python3未安装"
exit 1
fi
# 检查虚拟环境
if [ ! -d "venv" ]; then
echo "📦 创建虚拟环境..."
python3 -m venv venv
fi
# 激活虚拟环境
echo "🔄 激活虚拟环境..."
source venv/bin/activate
# 安装依赖
echo "📥 安装依赖..."
pip install -r requirements.txt -q
# 检查.env文件
if [ ! -f ".env" ]; then
echo "⚠️ .env文件不存在,复制.env.example..."
cp .env.example .env
echo "❗ 请编辑.env文件,配置Zabbix服务器信息"
echo "❗ 配置完成后重新运行此脚本"
exit 1
fi
# 启动服务
echo
echo "🚀 启动Security Service..."
echo "📍 服务地址: http://localhost:8002"
echo "📚 API文档: http://localhost:8002/docs"
echo
echo "按 Ctrl+C 停止服务"
echo
python3 -m uvicorn app.main:app --host 0.0.0.0 --port 8002 --reload
\ No newline at end of file
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Security Service 本地工具
用途:替代 intelligentPerception/zabbixDataFrom 的采集与联调脚本,支持:
- Zabbix API → 采集 → 生成 JSON 文件
- JSON 文件 → 调用 Security Service REST API
- 保存 Dify 分析结果到文件(服务侧同时落库)
- 查询历史接口并保存结果
"""
import argparse
import asyncio
import json
import os
import sys
from datetime import datetime
from pathlib import Path
from typing import Any, Dict, Optional
import httpx
sys.path.append(os.path.join(os.path.dirname(__file__), "app"))
def _ensure_dir(path: Path) -> Path:
path.mkdir(parents=True, exist_ok=True)
return path
def _write_json(path: Path, data: Any) -> None:
path.write_text(json.dumps(data, indent=2, ensure_ascii=False), encoding="utf-8")
def _read_json(path: Path) -> Any:
return json.loads(path.read_text(encoding="utf-8"))
async def collect_zabbix_json(output_dir: Path) -> Dict[str, Path]:
from services.zabbix_service import ZabbixService
_ensure_dir(output_dir)
zabbix_service = ZabbixService()
status = zabbix_service.get_sync_status()
if not status.get("collector_initialized"):
raise RuntimeError("Zabbix collector 未初始化,请检查 ZABBIX_URL / ZABBIX_USERNAME / ZABBIX_PASSWORD")
print("=" * 70)
print("阶段1:Zabbix 数据采集 → 生成 JSON 文件")
print("=" * 70)
device_data = await zabbix_service.collect_device_data()
cpu_data = await zabbix_service.collect_cpu_data()
network_data = await zabbix_service.collect_network_data()
analysis_input = {"devices": device_data.get("devices", [])}
attack_target = "unknown"
attack_logs = ""
devices = device_data.get("devices", [])
if devices:
picked = next((d for d in devices if d.get("logs")), devices[0])
attack_target = picked.get("name") or "unknown"
logs = picked.get("logs") or []
attack_logs = "\n".join(str(x) for x in logs[:10])
attack_advice_input = {
"attack_type": "Suspicious Activity",
"target_device": attack_target,
"severity": "high",
"logs": attack_logs,
}
paths = {
"zabbix_devices.json": output_dir / "zabbix_devices.json",
"zabbix_cpu.json": output_dir / "zabbix_cpu.json",
"zabbix_network.json": output_dir / "zabbix_network.json",
"analysis_input.json": output_dir / "analysis_input.json",
"attack_advice_input.json": output_dir / "attack_advice_input.json",
}
_write_json(paths["zabbix_devices.json"], device_data)
_write_json(paths["zabbix_cpu.json"], cpu_data)
_write_json(paths["zabbix_network.json"], network_data)
_write_json(paths["analysis_input.json"], analysis_input)
_write_json(paths["attack_advice_input.json"], attack_advice_input)
print(f"✅ 已生成: {paths['analysis_input.json']}")
print(f"✅ 已生成: {paths['attack_advice_input.json']}")
print(f"✅ 设备数量: {len(analysis_input['devices'])}")
return paths
def _auth_headers(token: str) -> Dict[str, str]:
return {"Authorization": f"Bearer {token}", "Content-Type": "application/json"}
async def call_security_service_api(base_url: str, token: str, input_dir: Path, output_dir: Path) -> Dict[str, Path]:
_ensure_dir(output_dir)
print("\n" + "=" * 70)
print("阶段2:JSON 文件 → Security Service REST API → 保存结果")
print("=" * 70)
analysis_input = _read_json(input_dir / "analysis_input.json")
attack_input = _read_json(input_dir / "attack_advice_input.json")
endpoints = {
"analysis": f"{base_url.rstrip('/')}/api/v1/security/analysis",
"attack_advice": f"{base_url.rstrip('/')}/api/v1/security/attack-advice",
"monitor": f"{base_url.rstrip('/')}/api/v1/security/monitor",
"report": f"{base_url.rstrip('/')}/api/v1/security/report",
"analysis_history": f"{base_url.rstrip('/')}/api/v1/security/analysis/history?limit=20",
"attack_history": f"{base_url.rstrip('/')}/api/v1/security/attack-advice/history?limit=20",
}
out_paths = {
"analysis_result.json": output_dir / "analysis_result.json",
"attack_advice_result.json": output_dir / "attack_advice_result.json",
"monitor_result.json": output_dir / "monitor_result.json",
"report_result.json": output_dir / "report_result.json",
"analysis_history.json": output_dir / "analysis_history.json",
"attack_advice_history.json": output_dir / "attack_advice_history.json",
}
async with httpx.AsyncClient(timeout=120.0) as client:
r1 = await client.post(endpoints["analysis"], headers=_auth_headers(token), json=analysis_input)
r1.raise_for_status()
analysis_res = r1.json()
_write_json(out_paths["analysis_result.json"], analysis_res)
print(f"✅ /analysis 完成,risk_level={analysis_res.get('risk_level')}")
r2 = await client.post(endpoints["attack_advice"], headers=_auth_headers(token), json=attack_input)
r2.raise_for_status()
attack_res = r2.json()
_write_json(out_paths["attack_advice_result.json"], attack_res)
print(f"✅ /attack-advice 完成,immediate_actions={len(attack_res.get('immediate_actions', []))}")
r3 = await client.get(endpoints["monitor"], headers=_auth_headers(token))
r3.raise_for_status()
monitor_res = r3.json()
_write_json(out_paths["monitor_result.json"], monitor_res)
print(f"✅ /monitor 完成,detected={len(monitor_res.get('detected_vulnerabilities', []))}")
r4 = await client.get(endpoints["report"], headers=_auth_headers(token))
r4.raise_for_status()
report_res = r4.json()
_write_json(out_paths["report_result.json"], report_res)
print(f"✅ /report 完成,date={report_res.get('date')}")
r5 = await client.get(endpoints["analysis_history"], headers=_auth_headers(token))
r5.raise_for_status()
_write_json(out_paths["analysis_history.json"], r5.json())
print("✅ /analysis/history 完成")
r6 = await client.get(endpoints["attack_history"], headers=_auth_headers(token))
r6.raise_for_status()
_write_json(out_paths["attack_advice_history.json"], r6.json())
print("✅ /attack-advice/history 完成")
return out_paths
async def internal_smoke_test() -> bool:
print("=" * 70)
print("阶段0:内部冒烟(不经 REST,仅验证类可用)")
print("=" * 70)
try:
from services.zabbix_service import ZabbixService
from services.analysis import SecurityService
from schemas.payloads import DeviceInfo
zabbix_service = ZabbixService()
status = zabbix_service.get_sync_status()
print(f"Zabbix collector: {'✅ 已初始化' if status.get('collector_initialized') else '❌ 未初始化'}")
security_service = SecurityService(zabbix_service=zabbix_service)
res = await security_service.analyze_risks(
[
DeviceInfo(
id="smoke-1",
name="Smoke-Device",
type="server",
status="up",
logs=["CPU high", "Multiple failed login attempts"],
)
]
)
print(f"Security analyze_risks: ✅ 返回 risk_level={res.risk_level}")
return True
except Exception as e:
print(f"❌ 内部冒烟失败: {e}")
return False
def _env_or_arg(value: Optional[str], env_key: str) -> Optional[str]:
return value if value else os.getenv(env_key)
async def main() -> int:
parser = argparse.ArgumentParser(prog="test_refactor.py")
sub = parser.add_subparsers(dest="cmd", required=False)
p_collect = sub.add_parser("collect", help="采集Zabbix并生成JSON文件")
p_collect.add_argument("--out", default=".\\out", help="输出目录")
p_call = sub.add_parser("call-api", help="读取JSON并调用Security Service REST API")
p_call.add_argument("--base-url", default=None, help="服务地址,例如 http://localhost:8003 或 http://localhost:8080")
p_call.add_argument("--token", default=None, help="管理员JWT,或使用环境变量 ADMIN_TOKEN")
p_call.add_argument("--in", dest="in_dir", default=".\\out", help="输入目录(包含 analysis_input.json 等)")
p_call.add_argument("--out", dest="out_dir", default=".\\out", help="输出目录")
p_all = sub.add_parser("run-all", help="采集→生成JSON→调用REST→保存结果→查询历史")
p_all.add_argument("--base-url", default=None, help="服务地址,例如 http://localhost:8003 或 http://localhost:8080")
p_all.add_argument("--token", default=None, help="管理员JWT,或使用环境变量 ADMIN_TOKEN")
p_all.add_argument("--out", default=".\\out", help="输出目录")
sub.add_parser("smoke", help="内部冒烟测试")
args = parser.parse_args()
print("Security Service 一体化工具")
print(f"时间: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}")
if args.cmd in (None, "smoke"):
ok = await internal_smoke_test()
return 0 if ok else 1
if args.cmd == "collect":
out_dir = Path(args.out).resolve()
await collect_zabbix_json(out_dir)
return 0
if args.cmd == "call-api":
base_url = _env_or_arg(args.base_url, "SECURITY_SERVICE_URL") or "http://localhost:8003"
token = _env_or_arg(args.token, "ADMIN_TOKEN")
if not token:
raise RuntimeError("缺少管理员JWT:请传 --token 或设置环境变量 ADMIN_TOKEN")
in_dir = Path(args.in_dir).resolve()
out_dir = Path(args.out_dir).resolve()
await call_security_service_api(base_url, token, in_dir, out_dir)
return 0
if args.cmd == "run-all":
base_url = _env_or_arg(args.base_url, "SECURITY_SERVICE_URL") or "http://localhost:8003"
token = _env_or_arg(args.token, "ADMIN_TOKEN")
if not token:
raise RuntimeError("缺少管理员JWT:请传 --token 或设置环境变量 ADMIN_TOKEN")
out_dir = Path(args.out).resolve()
await collect_zabbix_json(out_dir)
await call_security_service_api(base_url, token, out_dir, out_dir)
return 0
raise RuntimeError(f"未知命令: {args.cmd}")
if __name__ == "__main__":
try:
sys.exit(asyncio.run(main()))
except KeyboardInterrupt:
sys.exit(130)
#!/usr/bin/env python3
"""
LLM Filter 系统环境变量生成脚本
功能:
1. 生成安全的随机密钥和密码
2. 创建 .env 文件
3. 执行多重安全检查
4. 验证配置完整性
使用方法:
python scripts/generate_secrets.py
注意事项:
- 不要将生成的 .env 文件提交到 Git
- 定期更新敏感信息
- 生产环境使用更强的密码
"""
import secrets
import os
import sys
import re
from datetime import datetime
from pathlib import Path
# ANSI 颜色代码
class Colors:
HEADER = '\033[95m'
OKBLUE = '\033[94m'
OKCYAN = '\033[96m'
OKGREEN = '\033[92m'
WARNING = '\033[93m'
FAIL = '\033[91m'
ENDC = '\033[0m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
def print_header(text):
"""打印标题"""
print(f"\n{Colors.HEADER}{Colors.BOLD}{'=' * 70}{Colors.ENDC}")
print(f"{Colors.HEADER}{Colors.BOLD}{text:^70}{Colors.ENDC}")
print(f"{Colors.HEADER}{Colors.BOLD}{'=' * 70}{Colors.ENDC}\n")
def print_success(text):
"""打印成功信息"""
print(f"{Colors.OKGREEN}[OK] {text}{Colors.ENDC}")
def print_warning(text):
"""打印警告信息"""
print(f"{Colors.WARNING}[WARN] {text}{Colors.ENDC}")
def print_error(text):
"""打印错误信息"""
print(f"{Colors.FAIL}[ERROR] {text}{Colors.ENDC}")
def print_info(text):
"""打印信息"""
print(f"{Colors.OKCYAN}[INFO] {text}{Colors.ENDC}")
def generate_jwt_secret(length=64):
"""
生成 JWT 密钥
Args:
length: 密钥长度(字节),默认 64 字节
Returns:
随机生成的 JWT 密钥字符串
"""
return secrets.token_urlsafe(length)
def generate_db_password(length=32):
"""
生成数据库密码
Args:
length: 密码长度(字节),默认 32 字节
Returns:
随机生成的数据库密码字符串
"""
return secrets.token_urlsafe(length)
def generate_admin_password(length=16):
"""
生成管理员密码
Args:
length: 密码长度(字节),默认 16 字节
Returns:
随机生成的管理员密码字符串
"""
return secrets.token_urlsafe(length)
def generate_api_key():
"""
生成 API Key 格式
Returns:
格式为 'app-{random_key}' 的 API Key
"""
return f"app-{secrets.token_urlsafe(32)}"
def check_secret_strength(secret, min_length=32, secret_type="Secret"):
"""
检查密钥强度
Args:
secret: 要检查的密钥
min_length: 最小长度要求
secret_type: 密钥类型(用于错误提示)
Returns:
bool: 是否满足强度要求
"""
if len(secret) < min_length:
print_error(f"{secret_type} 长度不足(要求至少 {min_length} 字节,实际 {len(secret)} 字节)")
return False
if secret in ["your_secret_key_here", "your_secure_password_here"]:
print_error(f"{secret_type} 使用了默认值,请修改")
return False
return True
def check_gitignore():
"""
检查 .gitignore 是否正确配置
Returns:
bool: .gitignore 是否包含必要规则
"""
gitignore_path = Path(".gitignore")
if not gitignore_path.exists():
print_warning(".gitignore 文件不存在")
return False
try:
content = gitignore_path.read_text(encoding='utf-8')
except Exception:
content = gitignore_path.read_text(encoding='gbk')
required_patterns = [".env", ".env.local", ".env.*.local", "*.key", "credentials.json"]
missing_patterns = []
for pattern in required_patterns:
if pattern not in content:
missing_patterns.append(pattern)
if missing_patterns:
print_warning(f".gitignore 缺少以下规则:{', '.join(missing_patterns)}")
return False
print_success(".gitignore 配置正确")
return True
def check_existing_env(env_file):
"""
检查是否已存在 .env 文件
Args:
env_file: .env 文件路径
Returns:
bool: 是否可以覆盖
"""
if not env_file.exists():
return True
print_warning(f"{env_file} 已存在")
# 检查是否包含真实的敏感信息
try:
content = env_file.read_text(encoding='utf-8')
except Exception:
content = env_file.read_text(encoding='gbk')
# 检查是否有模板值
template_patterns = [
"your_secure_password_here",
"your_jwt_secret_key",
"your_dify_api_key_here",
"your_admin_password_here"
]
has_real_values = any(pattern not in content for pattern in template_patterns)
if has_real_values:
print_warning("检测到 .env 文件包含真实配置,覆盖可能导致服务中断")
print_info("建议备份现有 .env 文件:")
print(f" cp {env_file} {env_file}.backup.{datetime.now().strftime('%Y%m%d_%H%M%S')}")
response = input("是否继续覆盖?(y/n): ").strip().lower()
return response == 'y'
def validate_generated_values(values):
"""
验证生成的配置值
Args:
values: 包含所有配置值的字典
Returns:
bool: 所有值是否有效
"""
print_header("安全检查")
all_valid = True
# 检查 JWT_SECRET
if check_secret_strength(values['JWT_SECRET'], 32, "JWT_SECRET"):
print_success(f"JWT_SECRET 长度:{len(values['JWT_SECRET'])} 字节")
else:
all_valid = False
# 检查 DB_PASSWORD
if check_secret_strength(values['DB_PASSWORD'], 16, "DB_PASSWORD"):
print_success(f"DB_PASSWORD 长度:{len(values['DB_PASSWORD'])} 字节")
else:
all_valid = False
# 检查 ADMIN_PASSWORD
if check_secret_strength(values['ADMIN_PASSWORD'], 8, "ADMIN_PASSWORD"):
print_success(f"ADMIN_PASSWORD 长度:{len(values['ADMIN_PASSWORD'])} 字节")
else:
all_valid = False
return all_valid
def generate_env_file():
"""
生成完整的 .env 文件内容
Returns:
str: .env 文件内容
"""
values = {
'JWT_SECRET': generate_jwt_secret(),
'DB_PASSWORD': generate_db_password(),
'ADMIN_PASSWORD': generate_admin_password(),
'DB_HOST': 'postgres',
'DB_PORT': '5432',
'DB_USER': 'admin',
'DB_NAME': 'llm_filter_db',
'DB_SSL_MODE': 'disable',
'ALGORITHM': 'HS256',
'ACCESS_TOKEN_EXPIRE_MINUTES': '30',
'DIFY_API_URL': 'http://192.168.6.6/v1',
'DIFY_API_KEY': 'your_dify_api_key_here',
'DIFY_RESPONSE_MODE': 'streaming',
'DIFY_MESSAGE_ENDPOINT': 'chat-messages',
'MONGODB_URL': 'mongodb://mongo:27017',
'MONGODB_DB_NAME': 'security_service_db',
'ZABBIX_URL': 'http://localhost/zabbix/api_jsonrpc.php',
'ZABBIX_USERNAME': 'Admin',
'ZABBIX_PASSWORD': generate_db_password(20),
'ZABBIX_SYNC_INTERVAL': '3600',
'ZABBIX_AUTO_SYNC': 'true',
'LOG_LEVEL': 'INFO',
'LOG_FORMAT': 'json',
'REDIS_HOST': 'redis',
'REDIS_PORT': '6379',
'REDIS_PASSWORD': generate_db_password(16),
'REDIS_DB': '0',
'REDIS_TIMEOUT': '2000',
'OLLAMA_BASE_URL': 'http://192.168.6.6:11434/',
'OLLAMA_MODEL': 'deepseek-r1:14b',
'APP_BASE_URL': 'http://localhost:8000',
'API_V1_STR': '/api/v1',
'APP_MODE': 'edu',
'TERM_START_DATE': '2025-09-01',
'CORS_ALLOWED_ORIGINS': '*',
'SERVER_PORT': '8082',
'JPA_DDL_AUTO': 'update',
'JPA_SHOW_SQL': 'true',
'JPA_FORMAT_SQL': 'true',
'JPA_DIALECT': 'org.hibernate.dialect.PostgreSQLDialect',
'LOGGING_LEVEL': 'INFO',
'ADMIN_USERNAME': 'admin',
'ADMIN_EMAIL': 'admin@example.com',
'TZ': 'Asia/Shanghai',
'GATEWAY_PORT': '8080',
'AUTH_SERVICE_PORT': '8081',
'EDU_SERVICE_PORT': '8082',
'LLM_SERVICE_PORT': '8000',
'SECURITY_SERVICE_PORT': '8003',
'POSTGRES_PORT_EXTERNAL': '5433',
'MONGODB_PORT_EXTERNAL': '27017',
'REDIS_PORT_EXTERNAL': '6379',
}
env_content = f"""# ============================================================
# LLM Filter 系统环境变量配置
# ============================================================
# 生成时间:{datetime.now().strftime('%Y-%m-%d %H:%M:%S')}
# 生成脚本:scripts/generate_secrets.py
#
# ⚠️ 安全警告:
# - 此文件包含敏感信息,不要提交到 Git 仓库
# - 不要分享此文件或其内容
# - 生产环境请使用更强的密码和密钥
# - 定期轮换敏感信息
#
# 📝 配置说明:
# - 所有敏感信息已自动生成
# - 保留了原有的服务地址配置
# - 需要手动填写 DIFY_API_KEY
# - 可根据实际需求修改其他配置
# ============================================================
# ============================================================
# 📊 数据库配置 (PostgreSQL)
# ============================================================
DB_HOST={values['DB_HOST']}
DB_PORT={values['DB_PORT']}
DB_USER={values['DB_USER']}
DB_PASSWORD={values['DB_PASSWORD']}
DB_NAME={values['DB_NAME']}
DB_SSL_MODE={values['DB_SSL_MODE']}
# ============================================================
# 🔐 JWT 认证配置
# ============================================================
JWT_SECRET={values['JWT_SECRET']}
ALGORITHM={values['ALGORITHM']}
ACCESS_TOKEN_EXPIRE_MINUTES={values['ACCESS_TOKEN_EXPIRE_MINUTES']}
# ============================================================
# 🔧 Dify AI 平台配置
# ============================================================
DIFY_API_URL={values['DIFY_API_URL']}
DIFY_API_KEY={values['DIFY_API_KEY']}
DIFY_RESPONSE_MODE={values['DIFY_RESPONSE_MODE']}
DIFY_MESSAGE_ENDPOINT={values['DIFY_MESSAGE_ENDPOINT']}
# ============================================================
# 🍃 MongoDB 配置
# ============================================================
MONGODB_URL={values['MONGODB_URL']}
DB_NAME={values['DB_NAME']}
MONGODB_DB_NAME={values['MONGODB_DB_NAME']}
# ============================================================
# 🖥️ Zabbix 监控系统配置
# ============================================================
ZABBIX_URL={values['ZABBIX_URL']}
ZABBIX_USERNAME={values['ZABBIX_USERNAME']}
ZABBIX_PASSWORD={values['ZABBIX_PASSWORD']}
ZABBIX_SYNC_INTERVAL={values['ZABBIX_SYNC_INTERVAL']}
ZABBIX_AUTO_SYNC={values['ZABBIX_AUTO_SYNC']}
# ============================================================
# 📝 日志配置
# ============================================================
LOG_LEVEL={values['LOG_LEVEL']}
LOG_FORMAT={values['LOG_FORMAT']}
# ============================================================
# 🚀 Redis 配置
# ============================================================
REDIS_HOST={values['REDIS_HOST']}
REDIS_PORT={values['REDIS_PORT']}
REDIS_PASSWORD={values['REDIS_PASSWORD']}
REDIS_DB={values['REDIS_DB']}
REDIS_TIMEOUT={values['REDIS_TIMEOUT']}
# ============================================================
# 🤖 Ollama 本地 LLM 配置
# ============================================================
OLLAMA_BASE_URL={values['OLLAMA_BASE_URL']}
OLLAMA_MODEL={values['OLLAMA_MODEL']}
# ============================================================
# 🌐 应用基础配置
# ============================================================
APP_BASE_URL={values['APP_BASE_URL']}
API_V1_STR={values['API_V1_STR']}
APP_MODE={values['APP_MODE']}
TERM_START_DATE={values['TERM_START_DATE']}
# ============================================================
# 🎨 CORS 跨域配置
# ============================================================
CORS_ALLOWED_ORIGINS={values['CORS_ALLOWED_ORIGINS']}
# ============================================================
# 🏢 Java Edu Service 配置
# ============================================================
SERVER_PORT={values['SERVER_PORT']}
JPA_DDL_AUTO={values['JPA_DDL_AUTO']}
JPA_SHOW_SQL={values['JPA_SHOW_SQL']}
JPA_FORMAT_SQL={values['JPA_FORMAT_SQL']}
JPA_DIALECT={values['JPA_DIALECT']}
LOGGING_LEVEL={values['LOGGING_LEVEL']}
# ============================================================
# 🔒 Auth Service 配置
# ============================================================
ADMIN_USERNAME={values['ADMIN_USERNAME']}
ADMIN_PASSWORD={values['ADMIN_PASSWORD']}
ADMIN_EMAIL={values['ADMIN_EMAIL']}
# ============================================================
# 🐳 Docker 配置
# ============================================================
TZ={values['TZ']}
# ============================================================
# 📊 服务端口映射(外部访问)
# ============================================================
GATEWAY_PORT={values['GATEWAY_PORT']}
AUTH_SERVICE_PORT={values['AUTH_SERVICE_PORT']}
EDU_SERVICE_PORT={values['EDU_SERVICE_PORT']}
LLM_SERVICE_PORT={values['LLM_SERVICE_PORT']}
SECURITY_SERVICE_PORT={values['SECURITY_SERVICE_PORT']}
POSTGRES_PORT_EXTERNAL={values['POSTGRES_PORT_EXTERNAL']}
MONGODB_PORT_EXTERNAL={values['MONGODB_PORT_EXTERNAL']}
REDIS_PORT_EXTERNAL={values['REDIS_PORT_EXTERNAL']}
"""
return env_content
def print_post_generation_instructions():
"""打印生成后的操作指南"""
print_header("后续操作步骤")
steps = [
"1️⃣ 填写 DIFY_API_KEY",
" 编辑 .env 文件,填入您的 Dify API Key",
" 格式:app-xxxxxxxxxxxxxxxxxxxxxxxxxxxx",
"",
"2️⃣ 验证 .gitignore",
" 确保 .gitignore 包含以下内容:",
" - .env",
" - .env.local",
" - .env.*.local",
"",
"3️⃣ 重启服务",
" docker-compose down",
" docker-compose up -d --build",
"",
"4️⃣ 验证服务启动",
" docker-compose ps",
" docker-compose logs -f [service-name]",
"",
"5️⃣ 测试认证",
" 使用管理员账号登录:",
f" 用户名:admin",
" 密码:查看 .env 中的 ADMIN_PASSWORD",
"",
"6️⃣ 备份配置",
" 定期备份 .env 文件到安全位置",
]
for step in steps:
print(step)
print_warning("\n⚠️ 重要提醒:")
print(" - 不要将 .env 文件提交到 Git")
print(" - 定期轮换密码和密钥")
print(" - 生产环境使用更强的安全措施")
def main():
"""主函数"""
print_header("LLM Filter 环境变量生成脚本")
# 检查 Python 版本
if sys.version_info < (3, 6):
print_error("需要 Python 3.6 或更高版本")
sys.exit(1)
print_info(f"Python 版本:{sys.version}")
print_info(f"当前目录:{os.getcwd()}")
# 切换到项目根目录
script_dir = Path(__file__).parent
project_root = script_dir.parent
if (project_root / "docker-compose.yml").exists():
os.chdir(project_root)
print_info(f"已切换到项目根目录:{project_root}")
else:
print_warning("未检测到 docker-compose.yml,请确保在项目根目录运行脚本")
# 检查 .gitignore
print_header("环境检查")
check_gitignore()
# 检查是否已存在 .env 文件
env_file = Path(".env")
if not check_existing_env(env_file):
print_info("操作已取消")
return
# 生成 .env 文件
print_header("生成环境变量")
env_content = generate_env_file()
# 解析生成的值进行验证
values = {}
for line in env_content.split('\n'):
if '=' in line and not line.strip().startswith('#'):
key, value = line.split('=', 1)
values[key.strip()] = value.strip()
# 验证生成的值
if not validate_generated_values(values):
print_error("生成的密钥不符合安全要求")
sys.exit(1)
# 写入 .env 文件
try:
env_file.write_text(env_content, encoding='utf-8')
print_success(f"成功生成 {env_file}")
print_info(f"文件大小:{env_file.stat().st_size} 字节")
except Exception as e:
print_error(f"写入文件失败:{e}")
sys.exit(1)
# 设置文件权限(仅所有者可读写)
try:
os.chmod(env_file, 0o600)
print_success("文件权限已设置为 600(仅所有者可读写)")
except Exception as e:
print_warning(f"设置文件权限失败:{e}")
# 打印后续操作指南
print_post_generation_instructions()
print_success("\n环境变量生成完成!")
if __name__ == "__main__":
try:
main()
except KeyboardInterrupt:
print_warning("\n\n操作已取消")
sys.exit(0)
except Exception as e:
print_error(f"\n发生错误:{e}")
sys.exit(1)
#!/bin/bash
# LLM Filter 项目启动脚本
echo "=========================================="
echo " LLM Filter 项目启动脚本"
echo "=========================================="
echo ""
# 检查 Docker 和 Docker Compose
if ! command -v docker &> /dev/null; then
echo "[ERROR] Docker 未安装"
exit 1
fi
if ! command -v docker-compose &> /dev/null; then
echo "[ERROR] Docker Compose 未安装"
exit 1
fi
echo "[INFO] Docker 和 Docker Compose 已安装"
echo ""
# 检查 .env 文件
if [ ! -f .env ]; then
echo "[ERROR] .env 文件不存在,请先运行:"
echo " python scripts/generate_secrets.py"
exit 1
fi
echo "[INFO] .env 文件存在"
echo ""
# 步骤 1:停止现有服务
echo "=========================================="
echo "步骤 1: 停止现有服务"
echo "=========================================="
docker-compose down
echo ""
# 步骤 2:启动基础设施
echo "=========================================="
echo "步骤 2: 启动基础设施 (PostgreSQL, Redis, MongoDB)"
echo "=========================================="
docker-compose up -d postgres redis mongo
echo "[INFO] 等待数据库启动..."
sleep 10
echo ""
echo "[INFO] 检查数据库状态..."
docker-compose ps postgres redis mongo
echo ""
# 步骤 3:启动业务服务
echo "=========================================="
echo "步骤 3: 启动业务服务"
echo "=========================================="
docker-compose up -d auth-service edu-service llm-service security-service
echo "[INFO] 等待服务启动..."
sleep 15
echo ""
echo "[INFO] 检查业务服务状态..."
docker-compose ps auth-service edu-service llm-service security-service
echo ""
# 步骤 4:启动网关
echo "=========================================="
echo "步骤 4: 启动 API 网关"
echo "=========================================="
docker-compose up -d gateway
echo "[INFO] 等待网关启动..."
sleep 5
echo ""
echo "[INFO] 检查网关状态..."
docker-compose ps gateway
echo ""
# 步骤 5:验证服务
echo "=========================================="
echo "步骤 5: 验证所有服务"
echo "=========================================="
docker-compose ps
echo ""
echo "=========================================="
echo " 启动完成!"
echo "=========================================="
echo ""
echo "📚 文档地址:"
echo " - Gateway: http://localhost:8080"
echo " - Auth Service: http://localhost:8080/docs/auth/"
echo " - Edu Service: http://localhost:8080/docs/edu/"
echo " - LLM Service: http://localhost:8080/docs/llm/"
echo " - Security: http://localhost:8080/docs/security/"
echo ""
echo "🔑 默认管理员账号:"
echo " - 用户名: admin"
echo " - 密码: 查看 .env 文件中的 ADMIN_PASSWORD"
echo ""
echo "📊 查看日志:"
echo " - 所有服务: docker-compose logs -f"
echo " - 特定服务: docker-compose logs -f [service-name]"
echo ""
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment