Compare commits

...

14 Commits

Author SHA1 Message Date
FengLee
8ae28e030d Add admin upgrade package preflight 2026-05-10 00:18:03 +08:00
FengLee
70656562b1 Persist PM2 process list after upgrades 2026-05-10 00:09:50 +08:00
FengLee
1a27177f51 Allow source local-storage route in upgrades 2026-05-10 00:05:08 +08:00
FengLee
66c82fd1ee Make admin upgrade restart non-blocking 2026-05-10 00:01:01 +08:00
FengLee
24be9c550b Add canvas workflow and harden data import 2026-05-09 23:54:18 +08:00
fenglee
1a0607fe8d docs: add detailed project readme 2026-05-09 16:45:30 +08:00
Codex
f2817ab8fd feat: improve admin upgrade logs 2026-05-09 08:07:24 +00:00
Codex
e072f219e4 fix: preserve upgrade runner logs 2026-05-09 07:55:12 +00:00
Codex
8ae0f57488 feat: add admin upgrade workflow 2026-05-09 07:52:57 +00:00
Codex
24eab34305 Handle long running custom image jobs 2026-05-09 06:21:38 +00:00
Codex
c8f0c37cd1 Prefer streaming for custom image generation 2026-05-09 05:42:33 +00:00
Codex
234da90ac6 Fix profile auth token handling 2026-05-09 03:54:46 +00:00
FengLee
e3d274cfd8 Merge remote-tracking branch 'origin/main' 2026-05-09 11:33:15 +08:00
FengLee
d499020d4e Initial miaojingAI project with image resolution guard 2026-05-09 11:32:34 +08:00
282 changed files with 59429 additions and 1 deletions

15
.babelrc Normal file
View File

@@ -0,0 +1,15 @@
{
"presets": [
[
"next/babel",
{
"preset-react": {
"development": true
}
}
]
],
"plugins": [
"@react-dev-inspector/babel-plugin"
]
}

15
.coze Normal file
View File

@@ -0,0 +1,15 @@
[project]
requires = [ "nodejs-24" ]
template = "nextjs"
version = "0.0.18"
appliedPatches = [ ]
[dev]
build = [ "bash", "./scripts/prepare.sh" ]
run = [ "bash", "./scripts/dev.sh" ]
deps = [ "git" ]
[deploy]
build = [ "bash", "./scripts/build.sh" ]
run = [ "bash", "./scripts/start.sh" ]
deps = [ "git" ]

107
.env.example Normal file
View File

@@ -0,0 +1,107 @@
# ============================================================
# 妙境 AI 创作平台 — 环境变量配置模板
# 复制此文件为 .env.local 并填写实际值
# cp .env.example .env.local
# ============================================================
# ----- 本地部署配置 (推荐) -----
# 本地 PostgreSQL 数据库
LOCAL_DB_URL=postgresql://postgres:postgres@localhost:5432/miaojing
LOCAL_DB_ANON_KEY=local-anon-key
LOCAL_DB_SERVICE_ROLE_KEY=local-service-role-key
# ----- Supabase 云端配置 (可选) -----
# 从 Supabase Dashboard → Settings → API 获取
# 支持 COZE_ 前缀和不带前缀两种变量名
# COZE_SUPABASE_URL=https://your-project.supabase.co
# COZE_SUPABASE_ANON_KEY=your-anon-key-here
# COZE_SUPABASE_SERVICE_ROLE_KEY=your-service-role-key-here
# 也可以使用不带 COZE_ 前缀的变量名 (二选一)
# SUPABASE_URL=https://your-project.supabase.co
# SUPABASE_ANON_KEY=your-anon-key-here
# SUPABASE_SERVICE_ROLE_KEY=your-service-role-key-here
# ----- 服务端口 (可选) -----
# 默认 5000一般无需修改
# DEPLOY_RUN_PORT=5000
# MIAOJING_API_PORT=5100
# MIAOJING_CONSOLE_PORT=5200
# ----- 管理员注册邀请码 (可选) -----
# 注册时输入此邀请码可创建管理员账号
# 默认值: miaojing-admin-2024
# ADMIN_INVITE_CODE=miaojing-admin-2024
# ADMIN_DEFAULT_PASSWORD=change-this-before-production
# ----- 运行环境 (可选) -----
# DEV = 开发环境, PROD = 生产环境
# COZE_PROJECT_ENV=PROD
# NODE_ENV=production
# APP_BIND_HOST=127.0.0.1
# 部署脚本默认自动安装/切换 Node.js 24 LTS如需使用 22 LTS 可设置为 22
# DEPLOY_NODE_MAJOR=24
# DEPLOY_NODE_INSTALL_DIR=/var/lib/miaojingAI/node
# ----- 项目域名 (可选) -----
# 用于构造回调 URL、分享链接等
# COZE_PROJECT_DOMAIN_DEFAULT=https://your-domain.com
# NEXT_PUBLIC_APP_URL=https://your-domain.com
# APP_BASE_URL=https://your-domain.com
# ----- 生产安全密钥 (生产环境必须设置) -----
# 建议使用 openssl rand -hex 32 生成
# DATA_ENCRYPTION_KEY=
# JWT_SECRET=
# GENERATION_INTERNAL_SECRET=
# ----- 持久化路径 (生产环境推荐放到项目目录外) -----
# LOCAL_STORAGE_DIR=/var/lib/miaojingAI/storage
# BACKUP_DIR=/var/lib/miaojingAI/backups
# ----- 数据库连接池 (可选) -----
# DB_POOL_MAX=20
# DB_CONNECTION_TIMEOUT_MS=5000
# DB_IDLE_TIMEOUT_MS=30000
# ----- Node HTTP 服务超时 (可选) -----
# HTTP_REQUEST_TIMEOUT_MS=190000
# HTTP_HEADERS_TIMEOUT_MS=65000
# HTTP_KEEP_ALIVE_TIMEOUT_MS=5000
# HTTP_MAX_HEADERS_COUNT=200
# ----- 危险管理功能开关 -----
# 生产环境保持 false。只有完成备份并明确需要清空非管理员用户时才临时改为 true。
# ENABLE_DANGER_ADMIN_CLEAR_USERS=false
# ----- 应用层限流 (可选) -----
# RATE_LIMIT_AUTH_MAX=10
# RATE_LIMIT_EMAIL_MAX=6
# RATE_LIMIT_GENERATION_MAX=20
# RATE_LIMIT_DOWNLOAD_MAX=60
# RATE_LIMIT_ADMIN_MAX=120
# ============================================================
# 说明:
# - 本地部署模式:
# 1. 安装并启动本地 PostgreSQL 数据库
# 2. 创建名为 miaojing 的数据库
# 3. 运行 scripts/init-database.sql 初始化数据库结构
# 4. 配置 LOCAL_DB_URL 等本地数据库环境变量
# 5. 系统会自动使用本地存储替代 S3 存储
#
# - Supabase 云端模式:
# 1. 创建 Supabase 项目
# 2. 运行 scripts/init-database.sql 初始化数据库
# 3. 在 Supabase Dashboard 创建 site-assets Storage 桶 (公开读)
# 4. 配置 COZE_SUPABASE_URL 等环境变量
#
# - 无数据库配置时,系统将运行在 Demo 模式:
# Demo 模式下:登录/注册返回模拟数据,公告/网站配置返回默认值
# 管理后台写入操作将返回 503 错误
#
# - AI 图片/视频生成:
# 1. 内置使用 coze-coding-dev-sdk (开发环境可用)
# 2. 用户可在前端"自定义 API"中配置自己的 AI 模型密钥
# 3. 管理员可在管理后台配置系统默认 API
# ============================================================

101
.gitignore vendored Normal file
View File

@@ -0,0 +1,101 @@
.next
# Dependencies
node_modules/
.pnp
.pnp.js
# Production build
dist/
build/
out/
.next/
.rsbuild/
# Testing
coverage/
*.lcov
.nyc_output
# Environment variables
.env
.env.local
.env.development.local
.env.test.local
.env.production.local
# Logs
logs/
*.log
npm-debug.log*
yarn-debug.log*
yarn-error.log*
pnpm-debug.log*
lerna-debug.log*
# Runtime data
pids
*.pid
*.seed
*.pid.lock
# Diagnostic reports
report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
# Cache
.cache/
.parcel-cache/
.eslintcache
.stylelintcache
.npm
.yarn/cache
.yarn/unplugged
.yarn/build-state.yml
.yarn/install-state.gz
node-compile-cache/
# Editor directories and files
.vscode/*
!.vscode/extensions.json
!.vscode/settings.json
.idea/
*.suo
*.ntvs*
*.njsproj
*.sln
*.sw?
*.swp
*.swo
*~
# OS files
.DS_Store
.DS_Store?
._*
.Spotlight-V100
.Trashes
ehthumbs.db
Thumbs.db
Desktop.ini
# Temporary files
*.tmp
*.temp
_tmp_*
.tmp/
.temp/
# Optional files
*.tgz
*.gz
*.zip
*.tar
# TypeScript
*.tsbuildinfo
# Misc
.vercel
.turbo
.coze-logs

26
.npmrc Normal file
View File

@@ -0,0 +1,26 @@
loglevel=error
registry=https://registry.npmmirror.com
strictStorePkgContentCheck=false
verifyStoreIntegrity=false
# 网络优化
network-concurrency=16
fetch-retries=3
fetch-timeout=60000
# 严格使用 peer dependencies
strict-peer-dependencies=false
# 自动生成 lockfile
auto-install-peers=true
# lockfile 配置
lockfile=true
prefer-frozen-lockfile=true
# 如果 lockfile 存在但过期,更新而不是失败
resolution-mode=highest
# 不提示 npm 更新
update-notifier=false

703
DEPLOY_BACKUP_UPGRADE.md Normal file
View File

@@ -0,0 +1,703 @@
# 妙境 AI 创作平台部署、备份、升级操作文档
本文档用于生产环境的一键部署、日常备份、安全升级和故障回滚。所有命令默认在服务器源码目录执行。
## 1. 适用范围
适用于 Linux 服务器本地化部署架构:
- 前端访问服务:`miaojing-web`
- 后端 API 服务:`miaojing-api`
- 管理后台服务:`miaojing-console`
- 数据库PostgreSQL
- 文件持久化:本地存储目录
- 进程管理PM2
默认路径和端口:
| 项目 | 默认值 |
| --- | --- |
| 项目部署目录 | `/opt/miaojingAI` |
| 数据存储目录 | `/var/lib/miaojingAI` |
| 前端访问端口 | `5000` |
| 后端 API 内部端口 | `5100` |
| 管理后台内部端口 | `5200` |
| 管理后台访问路径 | `/console` |
| 本地存储目录 | `/var/lib/miaojingAI/storage` |
| 备份目录 | `/var/lib/miaojingAI/backups` |
## 2. 部署前准备
### 2.1 推荐服务器配置与操作系统
生产环境推荐使用稳定版 Linux 发行版,不建议直接使用 Windows Server 运行生产服务。Windows 可以作为开发环境,生产环境建议使用 Ubuntu/Debian 系服务器,便于安装 PostgreSQL、PM2、Nginx、Certbot 和系统级守护服务。
推荐操作系统:
| 操作系统 | 推荐版本 | 适用场景 | 说明 |
| --- | --- | --- | --- |
| Ubuntu Server | `24.04 LTS` | 首选生产环境 | 生态成熟Node.js、PostgreSQL、Nginx、Certbot 支持完整 |
| Ubuntu Server | `26.04 LTS` | 新服务器可选 | 适合全新环境,部署前先在测试机完成一次完整构建和功能验证 |
| Ubuntu Server | `22.04 LTS` | 旧服务器可继续使用 | 仍可运行,但新采购服务器优先选择更新 LTS |
| Debian | `13` | 稳定生产环境 | 当前稳定版,适合长期运行 |
| Debian | `12` | 旧服务器可继续使用 | 已进入旧稳定版周期,可用但新部署优先 Debian 13 |
| CentOS Stream / Rocky Linux / AlmaLinux | `9` | 企业内网环境 | 可用,但脚本和文档示例默认以 Ubuntu/Debian 为主 |
服务器配置建议:
| 场景 | CPU | 内存 | 磁盘 | 带宽 | 适用说明 |
| --- | --- | --- | --- | --- | --- |
| 最低测试环境 | 2 核 | 4 GB | 40 GB SSD | 5 Mbps | 仅用于功能验证和少量测试用户,不建议正式上线 |
| 小型生产环境 | 4 核 | 8 GB | 100 GB SSD | 10 Mbps 以上 | 适合早期上线、低到中等访问量,是推荐起步配置 |
| 标准生产环境 | 8 核 | 16 GB | 200 GB SSD | 20 Mbps 以上 | 适合多人同时使用、较多图片/视频结果持久化 |
| 高并发/商用环境 | 16 核以上 | 32 GB 以上 | 500 GB SSD 或独立对象存储 | 50 Mbps 以上 | 建议拆分数据库、对象存储、反向代理和应用服务 |
磁盘规划建议:
| 目录 | 推荐大小 | 用途 |
| --- | --- | --- |
| 项目部署目录,如 `/opt/miaojingAI` | 20 GB 以上 | 源码、依赖、构建产物 |
| 数据存储目录,如 `/var/lib/miaojingAI` | 80 GB 以上 | 上传文件、生成结果、备份、部署日志 |
| PostgreSQL 数据目录 | 50 GB 以上 | 用户、作品、订单、配置、日志等业务数据 |
生产环境基础要求:
- CPU 架构:`x86_64/amd64`
- Node.js推荐 `24.x LTS`;可使用 `22.x LTS`;不建议新生产环境继续使用已过维护周期的 Node.js 20
- PostgreSQL`16+`,推荐 `17``18`;最低可用 `14+`,但需要确认仍在安全维护期
- pnpm`9.x+`
- PM2最新版稳定版
- Nginx建议用于域名反向代理、HTTPS、静态压缩和访问日志
- HTTPS正式上线必须配置有效 TLS 证书
- 时区:建议设置为 `Asia/Shanghai`
- 防火墙:只开放 `80/443` 和必要的 SSH 端口,`5100/5200` 保持内网访问
- 应用服务默认绑定 `127.0.0.1`,通过 Nginx 对外提供访问;不要把 `APP_BIND_HOST` 改为 `0.0.0.0`,除非已有上层网络隔离
- 备份:数据库和 `/var/lib/miaojingAI/storage` 必须有定期离线或异地备份
不建议用于正式生产的环境:
- 非 LTS 版本 Linux例如 Ubuntu 中间版本;这类系统生命周期短,适合测试,不适合长期生产。
- 低于 4 核 8 GB 的服务器Next.js 构建、图片/视频结果持久化和 PostgreSQL 同机运行时容易出现资源不足。
- 只暴露裸 IP 和 HTTP 端口正式上线必须使用域名、Nginx 反向代理和 HTTPS。
- 将数据库、上传文件、生成结果和备份放在项目代码目录内;升级和回滚时容易误删。
- 使用默认管理员密码、默认数据库密码或公开的 SSH 密码。
### 2.2 必需软件
部署脚本会自动安装或切换 Node.js 到生产推荐版本,默认使用 `24.x LTS`。如需固定为 `22.x LTS`,执行脚本前设置:
```bash
DEPLOY_NODE_MAJOR=22 bash scripts/deploy-or-upgrade.sh
```
Node.js 会优先从国内可访问镜像源下载,顺序包括 npmmirror、清华、腾讯、华为最后回退到官方源。默认安装目录为数据目录下的 `node` 子目录,例如 `/var/lib/miaojingAI/node`,不会覆盖系统自带 Node.js。
部署脚本会检查以下命令是否存在:
- `node` / `npm`:没有或版本不符合目标 LTS 时,脚本会自动安装/切换
- `pnpm`
- `pm2`
- `psql`
- `pg_dump`
- `tar`
- `rsync`
- `curl`
Ubuntu/Debian 可参考:
```bash
sudo apt update
sudo apt install -y postgresql-client tar rsync curl
node -v
npm -v
```
如果未安装 `pnpm``pm2`,一键脚本会通过当前 Node.js 对应的 npm 自动安装,并使用国内可访问镜像源。
### 2.3 PostgreSQL 数据库
部署前需要准备好 PostgreSQL 数据库,并确认服务器可以连接。
示例连接地址:
```text
postgresql://postgres:postgres@localhost:5432/miaojing
```
可先手动验证:
```bash
psql "postgresql://postgres:postgres@localhost:5432/miaojing" -c "SELECT 1;"
```
## 3. 首次部署
### 3.1 执行一键部署脚本
在服务器源码目录执行:
```bash
bash scripts/deploy-or-upgrade.sh
```
脚本会自动检测目标部署目录。如果目标目录没有部署记录,会进入首次部署流程。
### 3.2 按提示填写参数
首次部署时需要填写:
| 参数 | 说明 |
| --- | --- |
| 项目部署目录 | 生产运行目录,例如 `/opt/miaojingAI` |
| 数据存储目录 | 持久化数据根目录,例如 `/var/lib/miaojingAI` |
| 前端访问端口 | 浏览器访问端口,例如 `5000` |
| 后端 API 内部端口 | 后端服务端口,例如 `5100` |
| 管理后台内部端口 | 管理后台服务端口,例如 `5200` |
| 管理员账号/昵称 | 管理员登录账号展示名 |
| 管理员邮箱 | 管理员登录邮箱 |
| 管理员密码 | 管理员初始密码 |
| 正式访问地址 | 有域名时填写 `https://域名`,没有域名时可留空使用服务器 IP 和端口 |
| PostgreSQL 连接地址 | 数据库连接字符串 |
### 3.3 部署完成后的输出
部署成功后,脚本会输出:
- 前台访问地址
- 管理后台地址
- 管理员账号
- 管理员邮箱
- 管理员密码
- 项目目录
- 数据目录
- 日志文件路径
管理后台访问地址示例:
```text
https://你的域名/console
```
只有管理员账号可以登录管理后台。
## 4. Nginx、HTTPS 与防火墙
正式上线必须使用 Nginx 反向代理和 HTTPS不建议把 `5000/5100/5200` 直接暴露到公网。仓库已提供生产模板:
```text
deploy/nginx/miaojing-production.conf
```
### 4.1 配置 Nginx
```bash
sudo cp deploy/nginx/miaojing-production.conf /etc/nginx/sites-available/miaojing.conf
sudo nano /etc/nginx/sites-available/miaojing.conf
sudo ln -sf /etc/nginx/sites-available/miaojing.conf /etc/nginx/sites-enabled/miaojing.conf
sudo nginx -t
sudo systemctl reload nginx
```
需要替换模板中的:
- `example.com``www.example.com`
- 证书路径
- 如果一键脚本中修改过前端端口,同步替换 `proxy_pass http://127.0.0.1:5000`
### 4.2 配置 HTTPS 证书
推荐使用 Certbot
```bash
sudo apt install -y certbot python3-certbot-nginx
sudo certbot --nginx -d example.com -d www.example.com
sudo certbot renew --dry-run
```
### 4.3 防火墙要求
生产环境只开放 `80/443` 和必要 SSH 端口,应用内部端口只允许本机访问。
Ubuntu UFW 示例:
```bash
sudo ufw allow OpenSSH
sudo ufw allow 80/tcp
sudo ufw allow 443/tcp
sudo ufw deny 5000/tcp
sudo ufw deny 5100/tcp
sudo ufw deny 5200/tcp
sudo ufw enable
sudo ufw status verbose
```
云服务器安全组也必须同步只开放 `80/443/SSH`
## 5. 部署后的检查
### 5.1 检查 PM2 服务
```bash
pm2 list
```
正常情况下应看到:
- `miaojing-web`
- `miaojing-api`
- `miaojing-console`
状态应为 `online`
### 5.2 检查访问地址
```bash
curl -I http://127.0.0.1:5000
curl -fsS http://127.0.0.1:5000/api/health
curl -I http://127.0.0.1:5000/console
```
### 5.3 检查日志
```bash
pm2 logs miaojing-web --lines 100
pm2 logs miaojing-api --lines 100
pm2 logs miaojing-console --lines 100
```
部署脚本日志位于:
```text
数据存储目录/logs/deploy-日期时间.log
```
示例:
```text
/var/lib/miaojingAI/logs/deploy-20260503-120000.log
```
## 6. 备份操作
### 6.1 自动备份
执行升级流程时,一键脚本会自动创建升级前备份,备份内容包括:
- PostgreSQL 数据库 dump
- `.env.local`
- 本地存储文件
- `package.json`
- 备份清单 `manifest.json`
默认备份目录:
```text
/var/lib/miaojingAI/backups
```
### 6.2 手动创建备份
进入生产部署目录:
```bash
cd /opt/miaojingAI
pnpm backup:create
```
或直接执行:
```bash
cd /opt/miaojingAI
bash scripts/backup-create.sh
```
如果需要指定备份目录:
```bash
cd /opt/miaojingAI
BACKUP_DIR=/var/lib/miaojingAI/backups bash scripts/backup-create.sh
```
脚本成功后会输出备份文件路径,例如:
```text
/var/lib/miaojingAI/backups/miaojing-backup-20260503-120000.tar.gz
```
### 6.3 查看备份文件
```bash
ls -lh /var/lib/miaojingAI/backups
```
备份脚本默认保留最近 10 个 `miaojing-backup-*.tar.gz` 文件。
## 7. 升级操作
### 7.1 升级前确认
升级前确认:
- 数据库可连接
- 当前服务可访问
- 磁盘空间充足
- 已拿到新版本源码
- 没有正在进行的重要生成任务
建议检查:
```bash
df -h
pm2 list
psql "postgresql://postgres:postgres@localhost:5432/miaojing" -c "SELECT 1;"
```
### 7.2 执行升级
在新版本源码目录执行:
```bash
bash scripts/deploy-or-upgrade.sh
```
当脚本检测到目标部署目录已有 `package.json` 且存在 `.env.local``.miaojing-deployment` 时,会进入升级流程。
升级流程会自动执行:
1. 读取旧部署配置作为默认值
2. 创建升级前备份
3. 迁移旧本地存储到持久化目录
4. 同步新版本代码到部署目录
5. 保留 `.env.local` 中原有非部署配置,只更新数据库、端口、持久化目录和密钥等必要项
6. 补齐数据库结构和索引
7. 可选更新管理员密码
8. 安装依赖
9. 生产构建
10. 执行生产依赖漏洞扫描;`high/critical` 级别漏洞会阻断上线
11. 通过 PM2 重载前端、后端、管理后台
12. 检查 `/api/health``/console`
### 7.3 升级参数说明
升级时管理员密码可以留空:
- 留空:不修改管理员密码
- 输入新密码:更新管理员密码
升级不会删除或覆盖以下数据:
- 用户账号
- 用户资料
- 管理员账号
- 作品记录
- 积分记录
- 订单记录
- 网站配置
- API 供应商配置
- 系统 API 配置
- 用户自定义 API 密钥
- 支付配置
- 公告
- 邮件配置
- 本地存储文件
## 8. 安全与漏洞扫描
一键脚本在构建后会执行:
```bash
pnpm audit --prod --audit-level=high
```
发现 `high``critical` 级别生产依赖漏洞时,脚本会直接失败,必须先升级依赖链并重新构建。`moderate` 级别漏洞会记录在部署日志中,正式上线前仍建议处理。
可手动执行完整审计:
```bash
cd /opt/miaojingAI
pnpm audit --prod --registry=https://registry.npmjs.org
```
项目内置的生产安全措施:
- `/api/admin/clear-users` 默认禁用,只有临时设置 `ENABLE_DANGER_ADMIN_CLEAR_USERS=true` 才能使用。
- `/console` 管理后台登录要求管理员角色,普通用户不能登录。
- 登录、注册、邮箱验证码、生成、下载、管理 API 均有应用层基础限流。
- Nginx 模板提供边缘限流,建议正式生产同时启用应用层和 Nginx 层限流。
- 全局安全响应头包括 `Content-Security-Policy``HSTS``X-Content-Type-Options``X-Frame-Options``Referrer-Policy``Permissions-Policy``Origin-Agent-Cluster`
- 生产构建关闭 `X-Powered-By` 技术指纹,并设置 Node HTTP 请求、请求头、Keep-Alive 超时。
- 下载代理和远程文件持久化会阻断内网、回环和本地地址,降低 SSRF 风险。
- API Key、SMTP 密码等敏感配置采用服务端加密存储,生产环境必须设置 `DATA_ENCRYPTION_KEY``JWT_SECRET`
上线前必须确认:
- `ENABLE_DANGER_ADMIN_CLEAR_USERS=false`
- `.env.local` 权限为 `600`
- 管理员密码不是默认值,也不是弱口令
- 用户注册密码规则至少 8 位,并同时包含字母和数字
- 数据库密码不是默认值
- SSH 禁止公开弱密码,建议使用密钥登录并限制来源 IP
- 云安全组和系统防火墙均未开放 `5000/5100/5200`
- 备份文件目录权限为 `700`,备份文件权限为 `600`
## 9. 回滚操作
### 9.1 使用备份回滚
进入生产部署目录:
```bash
cd /opt/miaojingAI
pnpm backup:restore /var/lib/miaojingAI/backups/miaojing-backup-YYYYMMDD-HHMMSS.tar.gz
```
或直接执行:
```bash
cd /opt/miaojingAI
bash scripts/backup-restore.sh /var/lib/miaojingAI/backups/miaojing-backup-YYYYMMDD-HHMMSS.tar.gz
```
回滚会恢复:
- 数据库
- `.env.local`
- 本地存储文件
### 9.2 回滚后重启服务
```bash
cd /opt/miaojingAI
pm2 startOrReload ecosystem.config.cjs --update-env
pm2 save
```
### 9.3 回滚后验证
```bash
curl -fsS http://127.0.0.1:5000/api/health
curl -I http://127.0.0.1:5000/console
pm2 list
```
## 10. 数据持久化说明
生产部署中,代码目录和数据目录分离。
代码目录示例:
```text
/opt/miaojingAI
```
数据目录示例:
```text
/var/lib/miaojingAI
```
关键持久化路径:
| 数据 | 路径 |
| --- | --- |
| 本地上传和生成文件 | `/var/lib/miaojingAI/storage` |
| 备份文件 | `/var/lib/miaojingAI/backups` |
| 部署日志 | `/var/lib/miaojingAI/logs` |
| 生产环境变量 | `/opt/miaojingAI/.env.local` |
| 部署标记 | `/opt/miaojingAI/.miaojing-deployment` |
升级同步代码时会排除:
- `.env.local`
- `node_modules`
- `.next`
- `dist`
- `backups`
- `local-storage`
- `.git`
- `.codex_tmp`
因此正常升级不会覆盖用户数据和持久化文件。
## 11. 常用运维命令
### 11.1 查看服务状态
```bash
pm2 list
```
### 11.2 查看服务日志
```bash
pm2 logs miaojing-web --lines 100
pm2 logs miaojing-api --lines 100
pm2 logs miaojing-console --lines 100
```
### 11.3 重启服务
```bash
cd /opt/miaojingAI
pm2 startOrReload ecosystem.config.cjs --update-env
pm2 save
```
### 11.4 停止服务
```bash
pm2 stop miaojing-web miaojing-api miaojing-console
```
### 11.5 启动服务
```bash
cd /opt/miaojingAI
pm2 start ecosystem.config.cjs --update-env
pm2 save
```
### 11.6 查看部署日志
```bash
ls -lh /var/lib/miaojingAI/logs
tail -n 200 /var/lib/miaojingAI/logs/deploy-*.log
```
## 12. 常见问题处理
### 12.1 Node.js 自动安装失败
脚本默认会自动安装或切换到 Node.js `24.x LTS`。如果服务器无法访问所有 Node.js 镜像源,会提示安装失败。
```bash
node -v
curl -I https://npmmirror.com/mirrors/node/index.json
```
可改用 Node.js `22.x LTS`
```bash
DEPLOY_NODE_MAJOR=22 bash scripts/deploy-or-upgrade.sh
```
### 12.2 依赖安装失败
脚本会依次尝试以下源:
- `https://registry.npmmirror.com`
- `https://registry.npmjs.org`
- `https://mirrors.cloud.tencent.com/npm/`
- `https://mirrors.huaweicloud.com/repository/npm/`
如果全部失败,检查服务器网络和 DNS。
### 12.3 数据库连接失败
检查连接地址:
```bash
psql "postgresql://postgres:postgres@localhost:5432/miaojing" -c "SELECT 1;"
```
检查 PostgreSQL 服务:
```bash
systemctl status postgresql
```
### 12.4 健康检查失败
查看 PM2 日志:
```bash
pm2 logs miaojing-web --lines 120
pm2 logs miaojing-api --lines 120
pm2 logs miaojing-console --lines 120
```
检查端口是否被占用:
```bash
ss -lntp | grep -E ':5000|:5100|:5200'
```
### 12.5 管理后台无法登录
确认访问路径:
```text
http://服务器IP:5000/console
```
确认账号为管理员角色:
```sql
SELECT id, email, nickname, role, is_active FROM profiles WHERE role = 'admin';
```
升级时如果需要重置管理员密码,重新执行:
```bash
bash scripts/deploy-or-upgrade.sh
```
在提示“管理员密码(升级时可留空表示不修改)”时输入新密码。
### 12.6 作品图片或视频无法访问
检查 `.env.local` 中的本地存储目录:
```bash
grep LOCAL_STORAGE_DIR /opt/miaojingAI/.env.local
```
检查文件目录:
```bash
ls -lh /var/lib/miaojingAI/storage
```
升级脚本会自动将旧部署目录中的 `local-storage` 迁移到持久化目录。
## 13. 上线检查清单
上线前逐项确认:
- 数据库连接正常
- 一键部署脚本执行成功
- `pm2 list` 三个服务均为 `online`
- 首页可访问
- `/api/health` 返回正常
- `/console` 可访问
- 管理员可登录后台
- 网站设置可读取和保存
- API 管理配置可读取和保存
- 用户注册、登录正常
- 创作作品可以生成、保存和访问
- 本地存储目录存在且可写
- 手动备份可以成功生成
- 升级前备份路径已记录
- Nginx 已配置域名和 HTTPS
- 系统防火墙和云安全组只开放 `80/443/SSH`
- `5000/5100/5200` 未对公网开放
- `pnpm audit --prod``high/critical` 漏洞
- `.env.local``JWT_SECRET``DATA_ENCRYPTION_KEY``GENERATION_INTERNAL_SECRET` 均已设置
- `ENABLE_DANGER_ADMIN_CLEAR_USERS=false`
- `/console` 响应头不包含 `X-Powered-By`,并包含 `Content-Security-Policy`
- 管理后台“系统日志”可正常筛选查看
- 管理后台仪表盘中数据库、存储、日志状态正常
## 14. 关键文件
| 文件 | 用途 |
| --- | --- |
| `scripts/deploy-or-upgrade.sh` | 一键部署和升级 |
| `scripts/backup-create.sh` | 创建备份 |
| `scripts/backup-restore.sh` | 恢复备份 |
| `scripts/init-database.sql` | 初始化和补齐数据库结构 |
| `scripts/database-optimization-patch.sql` | 数据库优化补丁 |
| `scripts/start.sh` | PM2 调用的启动脚本 |
| `deploy/nginx/miaojing-production.conf` | Nginx 生产反向代理模板 |
| `.env.local` | 生产环境变量 |
| `ecosystem.config.cjs` | PM2 进程配置 |

613
README.md
View File

@@ -1,2 +1,613 @@
# miaojingAI # 妙境 AI 创作平台
妙境是一个面向个人创作者、内容团队和私有化部署场景的 AI 多模态创作平台。平台围绕“文生图、图生图、文生视频、图生视频、图片反推提示词”构建完整创作链路,提供用户体系、积分/会员、订单支付、作品历史、公开画廊、模型供应商管理、系统配置、数据备份和在线升级能力。
项目基于 Next.js App Router、React、PostgreSQL、本地文件存储和 PM2 运行,支持本地 PostgreSQL 部署,也兼容 Supabase 作为数据库/认证底座。AI 生成能力既支持系统默认供应商,也支持用户自定义 OpenAI/New API 兼容接口。
## 项目截图
以下截图来自开发服务器的真实页面,用于快速了解平台界面和核心工作流。
### 首页
![妙境首页](docs/images/home.png)
### 创作中心
![创作中心](docs/images/create.png)
### 作品画廊
![作品画廊](docs/images/gallery.png)
## 核心能力
### 创作能力
- 文生图:根据文本提示词生成图片,支持尺寸、比例、模型和提示词参数。
- 图生图:上传参考图后进行风格迁移、场景变换、细节重绘和创意延展。
- 文生视频:根据文字描述生成动态视频内容。
- 图生视频:基于静态图片生成动态视频。
- 图片反推提示词:从图片中提取提示词,支持普通提示词、复刻级像素提示词、像素级图生图、像素级文生图等模式。
- 生成任务队列:生成任务写入 `generation_jobs`,前端可轮询任务状态并从历史记录中查看结果。
- 作品管理:保存创作历史、生成参数、结果链接、尺寸、时长、消耗积分等信息。
- 画廊发布:用户可将作品公开到画廊,支持点赞、复制提示词、全屏预览和下载。
### 管理后台
管理后台入口为 `/console`,管理员登录后可进入仪表盘和各类管理模块。
- 仪表盘:统计用户、作品、任务、订单、供应商、公告、日志和系统健康状态。
- API 管理:配置系统供应商、模型推荐、系统 API、New API/OpenAI 兼容站点。
- 用户管理:管理用户资料、角色、会员、积分、账号状态。
- 价格设置:维护会员套餐、积分规则和付费能力。
- 订单管理:查看订单、支付状态和收入统计。
- 支付配置配置微信、支付宝、Stripe 等支付方式的展示和密钥。
- 公告管理:创建站点公告、弹窗公告、有效期和展示策略。
- 数据管理:导出/导入业务数据,适合迁移和人工备份。
- 系统升级:支持热更新和冷更新,自动备份、失败回滚、中文日志和历史记录。
- 系统日志:查看登录、安全、运行、管理操作等平台日志。
- 系统设置维护站点名称、Logo、页脚、邮箱、通知和站点政策内容。
### 运维能力
- 一键部署/升级脚本:`scripts/deploy-or-upgrade.sh`
- 构建脚本:`scripts/build.sh`
- 数据备份:`scripts/backup-create.sh`
- 数据恢复:`scripts/backup-restore.sh`
- 备份列表:`scripts/backup-list.sh`
- 数据库初始化:`scripts/init-database.sql`
- 数据库补丁:`scripts/apply-database-patch.sh`
- 管理后台在线升级 runner`scripts/admin-upgrade-runner.mjs`
- PM2 运行配置:`ecosystem.config.cjs`
## 技术栈
| 层级 | 技术 |
| --- | --- |
| 前端框架 | Next.js 16 App Router、React 19、TypeScript |
| UI 组件 | Radix UI、Tailwind CSS、lucide-react、sonner |
| 服务端 | Next.js Route Handlers、自定义 Node HTTP server、tsup |
| 数据库 | PostgreSQL 14+,可接 Supabase |
| 存储 | 本地文件存储,生产推荐 `LOCAL_STORAGE_DIR=/var/lib/miaojingAI/storage` |
| 认证 | 本地 auth schema + session/JWT兼容 Supabase 风格表结构 |
| AI 调用 | coze-coding-dev-sdk、用户自定义 API、系统 API、New API/OpenAI compatible |
| 进程管理 | PM2 |
| 构建工具 | pnpm、Turbopack、tsup、TypeScript |
## 系统架构
```text
┌─────────────────────────────────────────────────────────────┐
│ Browser │
│ 首页 / 创作中心 / 画廊 / 个人中心 / 管理后台 Console │
└──────────────────────────────┬──────────────────────────────┘
│ HTTP
┌──────────────────────────────▼──────────────────────────────┐
│ Next.js App Router │
│ app pages + route handlers + middleware + server components │
└───────────────┬──────────────────────┬──────────────────────┘
│ │
┌───────────────▼──────────────┐ │
│ API Route 层 │ │
│ /api/generate/image │ │
│ /api/generate/video │ │
│ /api/generation-jobs │ │
│ /api/admin/* │ │
│ /api/local-storage/* │ │
└───────────────┬──────────────┘ │
│ │
┌───────────────▼──────────────┐ │
│ 业务服务层 │ │
│ auth / model config / jobs │ │
│ credits / orders / storage │ │
│ platform logs / upgrade │ │
└───────┬──────────────┬───────┘ │
│ │ │
┌───────▼──────┐ ┌─────▼────────┐ ┌────▼──────────────────┐
│ PostgreSQL │ │ 本地文件存储 │ │ 上游 AI / New API 站点 │
│ profiles │ │ images/videos│ │ OpenAI compatible │
│ works │ │ avatars │ │ image/video providers │
│ jobs/orders │ │ backups │ │ │
└──────────────┘ └──────────────┘ └───────────────────────┘
```
## 目录结构
```text
.
├── assets/ # 项目内置图片资源
├── docs/images/ # README 使用的项目截图
├── public/ # favicon、logo、公开静态文件
├── scripts/
│ ├── admin-upgrade-runner.mjs # 管理后台热/冷更新执行器
│ ├── apply-database-patch.sh # 执行数据库补丁
│ ├── backup-create.sh # 创建数据库/存储/.env 备份
│ ├── backup-list.sh # 查看备份列表
│ ├── backup-restore.sh # 恢复备份
│ ├── build.sh # Next.js + server 构建
│ ├── deploy-or-upgrade.sh # 一键部署/升级
│ ├── dev.sh # 本地开发启动脚本
│ ├── init-database.sql # PostgreSQL 初始化脚本
│ └── start.sh # 生产启动脚本
├── src/
│ ├── app/ # Next.js App Router 页面与 API
│ ├── components/ # 页面组件、创作组件、管理后台组件、UI 组件
│ ├── hooks/ # 前端 hooks
│ ├── lib/ # 业务逻辑、认证、模型、存储、日志、支付等
│ ├── modules/ # api / console / web 模块入口
│ ├── server.ts # 自定义 Node HTTP server 入口
│ └── storage/ # 数据库客户端和存储适配
├── .env.example # 环境变量模板
├── ecosystem.config.cjs # PM2 配置
├── package.json
└── README.md
```
## 环境要求
### 基础环境
- Linux 服务器,推荐 Ubuntu 22.04+ / Debian 12+
- Node.js 22 或 24部署脚本默认安装/使用 Node.js 24 LTS
- pnpm 9+
- PostgreSQL 14+
- PM2
- curl、tar、rsync
- PostgreSQL 客户端工具:`psql``pg_dump``pg_restore`
### 推荐生产目录
```text
/opt/miaojingAI # 项目代码目录
/var/lib/miaojingAI/storage # 上传文件、生成结果、本地存储
/var/lib/miaojingAI/backups # 数据备份
/var/lib/miaojingAI/upgrade # 管理后台升级状态和升级包
/var/lib/miaojingAI/logs # 部署日志
```
## 环境变量
复制模板:
```bash
cp .env.example .env.local
```
常用变量:
| 变量 | 说明 |
| --- | --- |
| `LOCAL_DB_URL` | PostgreSQL 连接地址,例如 `postgresql://postgres:postgres@localhost:5432/miaojing` |
| `LOCAL_DB_ANON_KEY` | 本地模式 anon key可自定义 |
| `LOCAL_DB_SERVICE_ROLE_KEY` | 本地模式 service role key可自定义 |
| `DATA_ENCRYPTION_KEY` | 生产环境必填,用于加密 API Key 等敏感数据 |
| `JWT_SECRET` | 生产环境必填,用于会话/JWT 签名 |
| `GENERATION_INTERNAL_SECRET` | 生成任务内部密钥 |
| `LOCAL_STORAGE_DIR` | 本地文件存储路径 |
| `BACKUP_DIR` | 备份目录 |
| `DEPLOY_RUN_PORT` | 当前运行角色监听端口 |
| `MIAOJING_API_PORT` | 后端 API 内部端口 |
| `MIAOJING_CONSOLE_PORT` | 管理后台内部端口 |
| `NEXT_PUBLIC_APP_URL` | 前端公开访问地址 |
| `APP_BASE_URL` | 服务端使用的站点地址 |
| `ADMIN_INVITE_CODE` | 管理员邀请码 |
| `ENABLE_DANGER_ADMIN_CLEAR_USERS` | 危险清理功能开关,生产环境应保持 `false` |
| `UPGRADE_STATE_DIR` | 管理后台升级状态目录,默认基于存储目录推导 |
| `UPGRADE_HEALTH_URL` | 冷更新重启后的健康检查 URL |
| `UPGRADE_RESTART_COMMAND` | 冷更新使用的重启命令 |
生成生产密钥示例:
```bash
openssl rand -hex 32
```
## 本地开发
### 1. 安装依赖
```bash
corepack enable
corepack pnpm install --frozen-lockfile
```
### 2. 初始化数据库
创建数据库:
```bash
createdb miaojing
```
执行初始化脚本:
```bash
psql "postgresql://postgres:postgres@localhost:5432/miaojing" -f scripts/init-database.sql
```
### 3. 配置环境变量
```bash
cp .env.example .env.local
```
至少填写:
```env
LOCAL_DB_URL=postgresql://postgres:postgres@localhost:5432/miaojing
LOCAL_DB_ANON_KEY=local-anon-key
LOCAL_DB_SERVICE_ROLE_KEY=local-service-role-key
LOCAL_STORAGE_DIR=./local-storage
DATA_ENCRYPTION_KEY=替换为 openssl rand -hex 32 的结果
JWT_SECRET=替换为 openssl rand -hex 32 的结果
GENERATION_INTERNAL_SECRET=替换为 openssl rand -hex 32 的结果
```
### 4. 启动开发服务
```bash
corepack pnpm run dev
```
默认开发端口由 `scripts/dev.sh` 控制,可传入端口:
```bash
corepack pnpm run dev -- 5000
```
### 5. 常用开发命令
```bash
# TypeScript 检查
corepack pnpm run ts-check
# ESLint
corepack pnpm run lint
# 完整构建
corepack pnpm run build
# 边界检查
corepack pnpm run check:boundaries
```
## 生产部署
### 方式一:一键部署/升级脚本
推荐使用:
```bash
bash scripts/deploy-or-upgrade.sh
```
脚本会提示填写:
- 项目部署目录
- 数据存储目录
- 前端访问端口
- 后端 API 内部端口
- 管理后台内部端口
- 管理员账号/邮箱/密码
- PostgreSQL 连接地址
- 正式访问地址
首次部署时,脚本会:
1. 检查 tar、rsync、curl、psql、pg_dump 等依赖。
2. 自动安装或切换 Node.js 22/24。
3. 安装 pnpm 和 PM2。
4. 准备 `.env.local`
5. 初始化或检查数据库。
6. 构建 Next.js 和 Node server。
7. 写入 PM2 配置并启动服务。
8. 执行健康检查。
升级已有部署时,脚本会:
1. 检测已有部署目录。
2. 创建升级前备份。
3. 同步新代码。
4. 构建并重启服务。
5. 执行健康检查。
6. 失败时输出备份路径,方便手动恢复。
### 方式二:手动部署
安装依赖:
```bash
corepack enable
corepack pnpm install --frozen-lockfile --prod=false
```
构建:
```bash
corepack pnpm run build
```
启动 PM2
```bash
pm2 startOrReload ecosystem.config.cjs --update-env
pm2 save
```
健康检查:
```bash
curl http://127.0.0.1:5100/api/health
```
### PM2 服务说明
`ecosystem.config.cjs` 默认包含三个角色:
| PM2 名称 | 角色 | 默认端口 |
| --- | --- | --- |
| `miaojing-api` | 后端 API | `5100` |
| `miaojing-web` | 前端站点 | `5000` |
| `miaojing-console` | 管理后台 | `5200` |
在单进程开发部署中,也可以使用类似 `miaojing-dev` 的 PM2 进程直接运行完整服务。
## 数据库说明
数据库初始化脚本为:
```bash
scripts/init-database.sql
```
核心表包括:
| 表 | 说明 |
| --- | --- |
| `auth.users` | 本地认证用户 |
| `profiles` | 用户资料、角色、会员、积分 |
| `works` | 创作作品、提示词、结果 URL、尺寸、状态 |
| `generation_jobs` | 生成任务队列和进度 |
| `credit_transactions` | 积分流水 |
| `orders` | 订单和支付状态 |
| `user_api_keys` | 用户自定义 API 密钥 |
| `system_api_configs` | 系统默认 API 配置 |
| `api_providers` | 模型供应商 |
| `model_recommendations` | 模型推荐配置 |
| `announcements` | 公告 |
| `site_config` | 站点配置 |
| `platform_logs` | 平台日志 |
执行数据库补丁:
```bash
corepack pnpm run db:patch
```
## 备份与恢复
### 创建备份
```bash
corepack pnpm run backup:create
```
备份包包含:
- PostgreSQL dump`database.dump`
- 本地存储目录:`local-storage`
- 环境变量:`.env.local`
- `package.json`
- 备份 manifest
默认只保留最近 10 个备份。
### 查看备份
```bash
corepack pnpm run backup:list
```
### 恢复备份
```bash
corepack pnpm run backup:restore /path/to/miaojing-backup-YYYYMMDD-HHMMSS.tar.gz
```
恢复动作会:
1. 使用 `pg_restore --clean --if-exists --no-owner` 恢复数据库。
2. 替换本地存储目录。
3. 恢复 `.env.local`
生产环境恢复前建议先停服务,并额外复制当前数据目录。
## 管理后台在线升级
管理后台“系统升级”提供两类升级能力:
### 热更新
用于不影响运行时代码的小补丁,例如 `public/` 下的静态资源。
特点:
- 不重启平台。
- 升级前自动创建数据库/存储/环境备份。
- 升级前自动创建源码快照。
- 失败自动回滚。
- 升级界面实时显示中文日志。
### 冷更新
用于涉及源码、脚本、依赖、配置的大变更。
冷更新流程:
1. 上传升级包。
2. 校验升级包路径,拒绝 `.env``.git``node_modules``backups``local-storage` 等危险路径。
3. 创建数据库、存储、环境配置备份。
4. 创建源码快照。
5. 应用升级包文件。
6. 如依赖变化,执行 `pnpm install --frozen-lockfile --prod=false`
7. 执行 `pnpm run ts-check`
8. 执行 `pnpm run build`
9. 重启平台。
10. 调用 `/api/health` 做健康检查。
11. 失败时自动恢复源码和数据。
升级日志会写入磁盘状态文件。即使冷更新过程中平台重启,管理后台恢复后也能续上日志和状态。升级完成后,当前页面刷新或切换页面会默认收起实时日志,但历史升级记录中可随时查看完整升级内容和日志。
升级包格式:
```text
.tar
.tar.gz
.tgz
```
推荐升级包结构:
```text
upgrade-package/
├── manifest.json
├── src/...
├── public/...
├── scripts/...
├── package.json
└── pnpm-lock.yaml
```
手动运行升级 runner 示例:
```bash
UPGRADE_STATE_DIR=/var/lib/miaojingAI/upgrade \
corepack pnpm run upgrade:run -- \
--job-id manual-001 \
--mode cold \
--package /tmp/upgrade.tgz \
--package-name upgrade.tgz \
--project /opt/miaojingAI
```
## AI 模型与供应商
妙境支持多来源模型配置:
- 系统默认供应商。
- 用户自定义 API Key。
- New API / OpenAI-compatible API 站点。
- 图片模型、视频模型、文本模型分类型管理。
管理员可在管理后台配置供应商、默认模型、API 地址、模型推荐和启用状态。用户可在前端绑定自己的 API Key平台会使用加密存储和尾号预览保护密钥。
## 存储与下载
生成结果会持久化到本地存储目录或配置的存储服务。推荐生产环境把存储放在项目目录之外:
```env
LOCAL_STORAGE_DIR=/var/lib/miaojingAI/storage
```
下载链路通过 `/api/download` 或本地存储路由读取原始文件,不应对图片进行二次压缩。作品历史、生成结果、画廊、全屏预览和下载应始终使用原始结果链接或持久化文件路径。
## 健康检查与运维命令
健康检查:
```bash
curl http://127.0.0.1:5100/api/health
```
查看 PM2
```bash
pm2 list
pm2 logs miaojing-api --lines 100
pm2 logs miaojing-web --lines 100
pm2 logs miaojing-console --lines 100
```
重启服务:
```bash
corepack pnpm run pm2:restart
```
保存 PM2 开机配置:
```bash
corepack pnpm run pm2:save
```
## 安全建议
- 生产环境必须设置高强度 `DATA_ENCRYPTION_KEY``JWT_SECRET``GENERATION_INTERNAL_SECRET`
- `.env.local` 不应提交到 git。
- 生产环境不要开启 `ENABLE_DANGER_ADMIN_CLEAR_USERS`
- `LOCAL_STORAGE_DIR``BACKUP_DIR``UPGRADE_STATE_DIR` 建议放在项目目录外。
- 升级包不要包含 `.env`、数据库 dump、用户上传文件或备份目录。
- 对外暴露时建议在 Nginx/Caddy 后面启用 HTTPS。
- 管理后台账号应使用强密码,并限制管理员数量。
- 定期执行 `backup:create` 并把备份复制到异地。
## 常见问题
### 1. 构建时提示缺少 devDependencies
构建需要 devDependencies。部署或 CI 中不要只安装生产依赖,推荐:
```bash
corepack pnpm install --frozen-lockfile --prod=false
```
### 2. 管理后台修改后刷新丢失
优先检查对应接口是否成功写入数据库,再检查 `.env.local` 是否连接到了正确数据库。不要只看前端本地状态。
### 3. 图片下载尺寸不符合预期
排查顺序:
1. 检查上游请求参数是否为目标分辨率。
2. 检查生成任务结果中保存的原始文件尺寸。
3. 检查 `/api/download` 是否直接返回原始文件。
4. 检查前端是否使用缩略图地址下载。
### 4. 冷更新后页面仍是旧版本
检查:
```bash
pm2 list
pm2 describe miaojing-api
pm2 describe miaojing-web
pm2 describe miaojing-console
```
确认 PM2 的 `cwd` 是当前部署目录,并确认构建产物来自同一目录。
### 5. 数据恢复后作品图片丢失
确认备份包内是否包含 `local-storage`,并确认 `.env.local` 中的 `LOCAL_STORAGE_DIR` 指向恢复后的存储目录。
## 版本管理
推荐使用 `main` 作为稳定分支:
```bash
git clone http://172.16.10.127:3000/fenglee/miaojingAI.git
git checkout main
```
提交前建议执行:
```bash
corepack pnpm run ts-check
corepack pnpm run build
```
## License
当前项目为私有项目,未声明开源许可证。未经授权请勿公开分发、复制或商业使用。

View File

@@ -0,0 +1 @@
ALTER TABLE auth.users ADD COLUMN IF NOT EXISTS password_hash TEXT;

BIN
assets/image.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 83 KiB

BIN
assets/miaojinglogo.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.3 MiB

View File

@@ -0,0 +1,204 @@
const { Client } = require('pg');
const fs = require('fs');
const path = require('path');
require('dotenv').config({ path: '/root/miaojingAI/.env.local' });
const root = '/root/miaojingAI';
const client = new Client({ connectionString: process.env.LOCAL_DB_URL });
function storageKeyFromUrl(url) {
if (!url || typeof url !== 'string') return null;
const marker = '/api/local-storage/';
const index = url.indexOf(marker);
if (index < 0) return null;
try {
return decodeURIComponent(url.slice(index + marker.length).split('?')[0]);
} catch {
return null;
}
}
function countFiles(dir) {
if (!fs.existsSync(dir)) return 0;
let count = 0;
for (const entry of fs.readdirSync(dir, { withFileTypes: true })) {
const full = path.join(dir, entry.name);
count += entry.isDirectory() ? countFiles(full) : 1;
}
return count;
}
async function q(name, sql) {
const result = await client.query(sql);
return { name, rows: result.rows };
}
(async () => {
await client.connect();
const checks = {};
checks.tableCounts = await q('table_counts', `
select 'auth.users' as table_name, count(*)::int as count from auth.users
union all select 'profiles', count(*)::int from profiles
union all select 'user_api_keys', count(*)::int from user_api_keys
union all select 'works', count(*)::int from works
union all select 'generation_jobs', count(*)::int from generation_jobs
union all select 'credit_transactions', count(*)::int from credit_transactions
union all select 'orders', count(*)::int from orders
union all select 'api_providers', count(*)::int from api_providers
union all select 'model_recommendations', count(*)::int from model_recommendations
`);
checks.profiles = await q('profiles', `
select id, email, nickname, role, membership_tier, credits_balance, is_active,
avatar_url is not null as has_avatar, created_at, updated_at
from profiles
order by created_at desc
`);
checks.profileAuthIntegrity = await q('profile_auth_integrity', `
select 'profile_without_auth' as issue, count(*)::int as count
from profiles p left join auth.users u on u.id = p.id where u.id is null
union all
select 'auth_without_profile', count(*)::int
from auth.users u left join profiles p on p.id = u.id where p.id is null
`);
checks.admin = await q('admin', `
select p.id, p.email, p.nickname, p.role, p.membership_tier, p.is_active,
u.password_hash is not null as has_password_hash
from profiles p
left join auth.users u on u.id = p.id
where p.role = 'admin' or p.email = 'admin@example.com'
order by p.created_at
`);
checks.apiKeys = await q('api_keys', `
select id, user_id, provider, supplier_name, type, model_name, note,
api_key_preview, length(api_key_encrypted) as encrypted_len,
api_key_encrypted like 'mjenc:v1:%' as has_enc_prefix,
api_key_encrypted = api_key_preview as encrypted_equals_preview,
is_active, created_at
from user_api_keys
order by created_at desc
`);
checks.apiKeyIntegrity = await q('api_key_integrity', `
select count(*)::int as orphan_api_keys
from user_api_keys k left join profiles p on p.id = k.user_id
where p.id is null
`);
checks.worksDistribution = await q('works_distribution', `
select type, status, is_public, count(*)::int as count,
count(*) filter (where result_url like '/api/local-storage/%')::int as local_urls,
count(*) filter (where result_url like 'data:%')::int as data_urls,
count(*) filter (where result_url like '[%')::int as placeholder_urls
from works
group by type, status, is_public
order by type, status, is_public
`);
checks.worksByUser = await q('works_by_user', `
select p.email, p.nickname, w.user_id, count(*)::int as works
from works w
left join profiles p on p.id = w.user_id
group by p.email, p.nickname, w.user_id
order by works desc nulls last
`);
checks.workIntegrity = await q('work_integrity', `
select 'works_missing_user' as issue, count(*)::int as count
from works w left join profiles p on p.id = w.user_id
where p.id is null
union all
select 'works_null_user', count(*)::int from works where user_id is null
union all
select 'works_bad_url', count(*)::int
from works
where result_url is null or result_url = '' or result_url like 'data:%'
union all
select 'public_not_gallery_url', count(*)::int
from works
where is_public = true and result_url not like '/api/local-storage/gallery/%'
`);
checks.reversePromptWorks = await q('reverse_prompt_works', `
select id, user_id, type,
left(result_url, 100) as result_url,
left(coalesce(params->>'referenceImage', ''), 100) as reference_image,
prompt <> '' as has_prompt,
negative_prompt is not null as has_negative_prompt,
created_at
from works
where type = 'reverse-prompt'
order by created_at desc
limit 20
`);
checks.galleryWorks = await q('gallery_works', `
select id, user_id, type, is_public, left(result_url, 100) as result_url, created_at
from works
where is_public = true
order by created_at desc
`);
checks.generationJobs = await q('generation_jobs', `
select status, count(*)::int as count,
count(*) filter (where user_id is null)::int as null_user_count
from generation_jobs
group by status
order by status
`);
checks.generationJobsByUser = await q('generation_jobs_by_user', `
select p.email, p.nickname, j.user_id, count(*)::int as jobs
from generation_jobs j
left join profiles p on p.id = j.user_id
group by p.email, p.nickname, j.user_id
order by jobs desc nulls last
`);
const works = (await client.query(`
select id, result_url, thumbnail_url, params
from works
order by created_at desc
`)).rows;
const missingFiles = [];
let referencedLocalUrls = 0;
for (const work of works) {
const urls = [
['result_url', work.result_url],
['thumbnail_url', work.thumbnail_url],
['referenceImage', work.params && work.params.referenceImage],
];
if (work.params && Array.isArray(work.params.referenceImages)) {
work.params.referenceImages.forEach((url, index) => urls.push([`referenceImages[${index}]`, url]));
}
for (const [field, url] of urls) {
const key = storageKeyFromUrl(url);
if (!key) continue;
referencedLocalUrls++;
const full = path.join(root, 'local-storage', key);
if (!fs.existsSync(full)) missingFiles.push({ workId: work.id, field, key });
}
}
checks.storage = {
localStorageExists: fs.existsSync(path.join(root, 'local-storage')),
backupsExists: fs.existsSync(path.join(root, 'backups')),
localFiles: countFiles(path.join(root, 'local-storage')),
generatedFiles: countFiles(path.join(root, 'local-storage', 'generated')),
galleryFiles: countFiles(path.join(root, 'local-storage', 'gallery')),
reversePromptFiles: countFiles(path.join(root, 'local-storage', 'reverse-prompt')),
referencedLocalUrls,
missingFiles,
};
console.log(JSON.stringify(checks, null, 2));
await client.end();
})().catch((error) => {
console.error(error);
process.exit(1);
});

67
audit_db_storage.js Normal file
View File

@@ -0,0 +1,67 @@
const { Client } = require('pg');
const fs = require('fs');
const path = require('path');
require('dotenv').config({ path: '/root/miaojingAI/.env.local' });
const c = new Client({ connectionString: process.env.LOCAL_DB_URL });
const root = '/root/miaojingAI';
function storageKeyFromUrl(url) {
if (!url || typeof url !== 'string') return null;
const marker = '/api/local-storage/';
const idx = url.indexOf(marker);
if (idx < 0) return null;
try { return decodeURIComponent(url.slice(idx + marker.length).split('?')[0]); } catch { return null; }
}
async function query(name, sql, params=[]) {
try { const r = await c.query(sql, params); return { name, rows: r.rows }; }
catch (e) { return { name, error: e.message }; }
}
(async () => {
await c.connect();
const out = {};
out.db = await query('db', `select current_database() as database, current_user as user, now() as checked_at`);
out.tables = await query('tables', `select schemaname, tablename from pg_tables where schemaname in ('public','auth') order by schemaname, tablename`);
const tableNames = out.tables.rows || [];
out.counts = [];
for (const t of tableNames) {
const full = t.schemaname === 'public' ? t.tablename : `${t.schemaname}.${t.tablename}`;
const r = await query(full, `select count(*)::int as count from ${full}`);
out.counts.push({ table: full, count: r.rows?.[0]?.count ?? null, error: r.error });
}
out.admins = await query('admins', `select id,email,nickname,role,membership_tier,is_active,created_at,updated_at from profiles where role='admin' or email in ('admin@example.com','admin@miaojing.ai') order by created_at`);
out.authAdmins = await query('auth_admins', `select id,email,created_at, password_hash is not null as has_password from auth.users where email in ('admin@example.com','admin@miaojing.ai') order by created_at`);
out.profileWithoutAuth = await query('profile_without_auth', `select p.id,p.email,p.role from profiles p left join auth.users u on u.id=p.id where u.id is null order by p.created_at limit 50`);
out.authWithoutProfile = await query('auth_without_profile', `select u.id,u.email from auth.users u left join profiles p on p.id=u.id where p.id is null order by u.created_at limit 50`);
out.worksMissingUser = await query('works_missing_user', `select w.id,w.user_id,w.title,w.type,w.status,w.created_at from works w left join profiles p on p.id=w.user_id where w.user_id is not null and p.id is null order by w.created_at desc limit 50`);
out.worksNullUser = await query('works_null_user', `select id,title,type,status,created_at from works where user_id is null order by created_at desc limit 50`);
out.worksByStatus = await query('works_by_status', `select status, count(*)::int from works group by status order by status`);
out.worksByUser = await query('works_by_user', `select p.email,p.nickname,w.user_id,count(*)::int as works from works w left join profiles p on p.id=w.user_id group by p.email,p.nickname,w.user_id order by works desc nulls last limit 20`);
out.workLikesMissing = await query('work_likes_missing_refs', `select wl.id,wl.user_id,wl.work_id from work_likes wl left join profiles p on p.id=wl.user_id left join works w on w.id=wl.work_id where p.id is null or w.id is null limit 50`);
out.creditMissingUser = await query('credit_missing_user', `select ct.id,ct.user_id,ct.amount,ct.type,ct.created_at from credit_transactions ct left join profiles p on p.id=ct.user_id where p.id is null limit 50`);
out.ordersMissingUser = await query('orders_missing_user', `select o.id,o.user_id,o.order_no,o.status,o.created_at from orders o left join profiles p on p.id=o.user_id where o.user_id is not null and p.id is null limit 50`);
out.apiKeysMissingUser = await query('api_keys_missing_user', `select k.id,k.user_id,k.provider,k.type from user_api_keys k left join profiles p on p.id=k.user_id where p.id is null limit 50`);
out.jobsMissingUser = await query('jobs_missing_user', `select j.id,j.user_id,j.type,j.status,j.created_at from generation_jobs j left join profiles p on p.id=j.user_id where j.user_id is not null and p.id is null order by j.created_at desc limit 50`);
out.jobsNullUser = await query('jobs_null_user', `select id,type,status,created_at from generation_jobs where user_id is null order by created_at desc limit 50`);
out.jobsByUser = await query('jobs_by_user', `select p.email,p.nickname,j.user_id,count(*)::int as jobs from generation_jobs j left join profiles p on p.id=j.user_id group by p.email,p.nickname,j.user_id order by jobs desc nulls last limit 20`);
out.worksFileCheck = { totalLocalUrls: 0, missing: [] };
const works = await query('works_urls', `select id,result_url,thumbnail_url from works order by created_at desc`);
for (const w of works.rows || []) {
for (const field of ['result_url','thumbnail_url']) {
const key = storageKeyFromUrl(w[field]);
if (!key) continue;
out.worksFileCheck.totalLocalUrls++;
const abs = path.join(root, 'local-storage', key);
if (!fs.existsSync(abs)) out.worksFileCheck.missing.push({ workId: w.id, field, key });
}
}
out.storage = {
localStorageExists: fs.existsSync(path.join(root,'local-storage')),
backupsExists: fs.existsSync(path.join(root,'backups')),
localFiles: 0,
galleryFiles: 0,
};
function walk(dir) { if (!fs.existsSync(dir)) return 0; let n=0; for (const e of fs.readdirSync(dir,{withFileTypes:true})) { const p=path.join(dir,e.name); n += e.isDirectory()?walk(p):1; } return n; }
out.storage.localFiles = walk(path.join(root,'local-storage'));
out.storage.galleryFiles = walk(path.join(root,'local-storage','gallery'));
console.log(JSON.stringify(out, null, 2));
await c.end();
})().catch(e => { console.error(e); process.exit(1); });

206
audit_recovered_data.js Normal file
View File

@@ -0,0 +1,206 @@
const { Pool } = require('pg');
require('dotenv').config({ path: '.env.local' });
const SYSTEM_USER_ID = '00000000-0000-0000-0000-000000000000';
function short(value, length = 160) {
if (value == null) return value;
const text = typeof value === 'string' ? value : JSON.stringify(value);
return text.length > length ? `${text.slice(0, length)}...` : text;
}
async function main() {
const pool = new Pool({ connectionString: process.env.LOCAL_DB_URL });
const client = await pool.connect();
try {
const tableColumns = await client.query(`
SELECT table_schema, table_name, column_name, data_type
FROM information_schema.columns
WHERE (table_schema = 'public' AND table_name IN ('profiles', 'works', 'user_api_keys', 'orders', 'credit_transactions'))
OR (table_schema = 'auth' AND table_name = 'users')
ORDER BY table_schema, table_name, ordinal_position
`);
const userSummary = await client.query(`
SELECT
(SELECT COUNT(*)::int FROM auth.users) AS auth_users,
(SELECT COUNT(*)::int FROM profiles) AS profiles,
(SELECT COUNT(*)::int FROM profiles WHERE role = 'admin') AS admins,
(SELECT COUNT(*)::int FROM profiles WHERE role <> 'admin') AS non_admin_profiles,
(SELECT COUNT(*)::int FROM auth.users u LEFT JOIN profiles p ON p.id = u.id WHERE p.id IS NULL) AS auth_without_profile,
(SELECT COUNT(*)::int FROM profiles p LEFT JOIN auth.users u ON u.id = p.id WHERE u.id IS NULL) AS profile_without_auth,
(SELECT COUNT(*)::int FROM auth.users WHERE password_hash IS NULL OR password_hash = '') AS auth_without_password_hash,
(SELECT COUNT(*)::int FROM auth.users WHERE password_hash IS NOT NULL AND password_hash <> '') AS auth_with_password_hash
`);
const userSamples = await client.query(`
SELECT p.id, p.email, p.nickname, p.role, p.membership_tier, p.is_active,
u.id IS NOT NULL AS has_auth,
(u.password_hash IS NOT NULL AND u.password_hash <> '') AS has_password_hash,
p.created_at
FROM profiles p
LEFT JOIN auth.users u ON u.id = p.id
ORDER BY p.created_at DESC NULLS LAST
LIMIT 80
`);
const workSummary = await client.query(`
SELECT
COUNT(*)::int AS total,
COUNT(*) FILTER (WHERE status = 'completed')::int AS completed,
COUNT(*) FILTER (WHERE is_public = true AND status = 'completed')::int AS public_completed,
COUNT(*) FILTER (WHERE is_public = false AND status = 'completed')::int AS private_completed,
COUNT(*) FILTER (WHERE user_id IS NULL)::int AS null_user_id,
COUNT(*) FILTER (WHERE user_id = $1)::int AS system_user_id,
COUNT(*) FILTER (WHERE p.id IS NULL)::int AS missing_profile,
COUNT(*) FILTER (WHERE p.id IS NOT NULL)::int AS linked_profile
FROM works w
LEFT JOIN profiles p ON p.id = w.user_id
`, [SYSTEM_USER_ID]);
const publicWorkSummary = await client.query(`
SELECT
COUNT(*)::int AS public_total,
COUNT(*) FILTER (WHERE w.user_id IS NULL)::int AS null_user_id,
COUNT(*) FILTER (WHERE w.user_id = $1)::int AS system_user_id,
COUNT(*) FILTER (WHERE p.id IS NULL)::int AS missing_profile,
COUNT(*) FILTER (WHERE p.id IS NOT NULL)::int AS linked_profile
FROM works w
LEFT JOIN profiles p ON p.id = w.user_id
WHERE w.is_public = true AND w.status = 'completed'
`, [SYSTEM_USER_ID]);
const workByUser = await client.query(`
SELECT
COALESCE(p.email, '[missing-profile]') AS email,
COALESCE(p.nickname, '') AS nickname,
COALESCE(p.role, '') AS role,
w.user_id,
COUNT(*)::int AS total_works,
COUNT(*) FILTER (WHERE w.status = 'completed')::int AS completed_works,
COUNT(*) FILTER (WHERE w.is_public = true AND w.status = 'completed')::int AS public_works,
COUNT(*) FILTER (WHERE w.is_public = false AND w.status = 'completed')::int AS history_works
FROM works w
LEFT JOIN profiles p ON p.id = w.user_id
GROUP BY w.user_id, p.email, p.nickname, p.role
ORDER BY total_works DESC
LIMIT 120
`);
const orphanSamples = await client.query(`
SELECT w.id, w.user_id, w.type, w.status, w.is_public, w.result_url,
LEFT(COALESCE(w.prompt, ''), 140) AS prompt,
w.params,
w.created_at
FROM works w
LEFT JOIN profiles p ON p.id = w.user_id
WHERE p.id IS NULL OR w.user_id = $1 OR w.user_id IS NULL
ORDER BY w.created_at DESC NULLS LAST
LIMIT 80
`, [SYSTEM_USER_ID]);
const paramKeys = await client.query(`
SELECT key, COUNT(*)::int AS count
FROM works w
CROSS JOIN LATERAL jsonb_object_keys(COALESCE(w.params, '{}'::jsonb)) AS key
LEFT JOIN profiles p ON p.id = w.user_id
WHERE w.is_public = true
AND w.status = 'completed'
AND (p.id IS NULL OR w.user_id = $1 OR w.user_id IS NULL)
GROUP BY key
ORDER BY count DESC, key
LIMIT 80
`, [SYSTEM_USER_ID]);
const possibleOwnerFields = await client.query(`
SELECT
id,
user_id,
params->>'user_id' AS params_user_id,
params->>'userId' AS params_user_id_camel,
params->>'publisher_id' AS publisher_id,
params->>'publisherId' AS publisher_id_camel,
params->>'owner_id' AS owner_id,
params->>'ownerId' AS owner_id_camel,
params->>'created_by' AS created_by,
params->>'createdBy' AS created_by_camel,
params->>'email' AS params_email,
params->>'userEmail' AS params_user_email,
params->>'publisherEmail' AS params_publisher_email,
params->>'nickname' AS params_nickname,
params->>'userName' AS params_user_name,
LEFT(COALESCE(prompt, ''), 120) AS prompt
FROM works
WHERE is_public = true
AND status = 'completed'
AND (user_id IS NULL OR user_id = $1 OR NOT EXISTS (SELECT 1 FROM profiles p WHERE p.id = works.user_id))
ORDER BY created_at DESC NULLS LAST
LIMIT 80
`, [SYSTEM_USER_ID]);
const duplicateCandidates = await client.query(`
SELECT
public.id AS orphan_id,
public.user_id AS orphan_user_id,
private.id AS owned_id,
private.user_id AS owner_user_id,
p.email,
p.nickname,
CASE
WHEN private.result_url = public.result_url THEN 'result_url'
WHEN COALESCE(private.thumbnail_url, '') <> '' AND private.thumbnail_url = public.thumbnail_url THEN 'thumbnail_url'
WHEN COALESCE(private.prompt, '') <> '' AND private.prompt = public.prompt THEN 'prompt_time'
ELSE 'unknown'
END AS match_type,
ABS(EXTRACT(EPOCH FROM (private.created_at - public.created_at)))::int AS seconds_apart,
LEFT(COALESCE(public.prompt, ''), 120) AS prompt
FROM works public
JOIN works private
ON private.id <> public.id
AND private.user_id IS NOT NULL
AND private.user_id <> $1
AND (
private.result_url = public.result_url
OR (
COALESCE(public.thumbnail_url, '') <> ''
AND private.thumbnail_url = public.thumbnail_url
)
OR (
COALESCE(private.prompt, '') <> ''
AND private.prompt = public.prompt
AND private.created_at BETWEEN public.created_at - INTERVAL '30 minutes' AND public.created_at + INTERVAL '30 minutes'
)
)
JOIN profiles p ON p.id = private.user_id
LEFT JOIN profiles public_profile ON public_profile.id = public.user_id
WHERE public.is_public = true
AND public.status = 'completed'
AND (public_profile.id IS NULL OR public.user_id = $1 OR public.user_id IS NULL)
ORDER BY public.created_at DESC NULLS LAST, match_type, seconds_apart
LIMIT 100
`, [SYSTEM_USER_ID]);
const output = {
columns: tableColumns.rows,
userSummary: userSummary.rows[0],
userSamples: userSamples.rows,
workSummary: workSummary.rows[0],
publicWorkSummary: publicWorkSummary.rows[0],
workByUser: workByUser.rows,
orphanSamples: orphanSamples.rows.map(row => ({ ...row, result_url: short(row.result_url, 120), params: short(row.params, 300) })),
anonymousParamKeys: paramKeys.rows,
possibleOwnerFields: possibleOwnerFields.rows,
duplicateCandidates: duplicateCandidates.rows,
};
console.log(JSON.stringify(output, null, 2));
} finally {
client.release();
await pool.end();
}
}
main().catch(error => {
console.error(error);
process.exit(1);
});

View File

@@ -0,0 +1,64 @@
const { Pool } = require('pg');
require('dotenv').config({ path: '.env.local' });
const SYSTEM_USER_ID = '00000000-0000-0000-0000-000000000000';
(async () => {
const pool = new Pool({ connectionString: process.env.LOCAL_DB_URL });
const client = await pool.connect();
try {
await client.query('BEGIN');
const result = await client.query(`
WITH candidates AS (
SELECT DISTINCT ON (public.id)
public.id AS public_id,
private.user_id AS owner_user_id,
CASE
WHEN private.result_url = public.result_url THEN 1
WHEN private.thumbnail_url = public.thumbnail_url THEN 2
ELSE 3
END AS confidence_rank,
ABS(EXTRACT(EPOCH FROM (private.created_at - public.created_at))) AS time_distance
FROM works public
JOIN works private
ON private.id <> public.id
AND private.user_id <> $1
AND (
private.result_url = public.result_url
OR (
public.thumbnail_url IS NOT NULL
AND private.thumbnail_url = public.thumbnail_url
)
OR (
COALESCE(private.prompt, '') = COALESCE(public.prompt, '')
AND private.created_at BETWEEN public.created_at - INTERVAL '10 minutes' AND public.created_at + INTERVAL '10 minutes'
)
)
JOIN profiles p ON p.id = private.user_id
WHERE public.is_public = true
AND public.status = 'completed'
AND public.user_id = $1
ORDER BY public.id, confidence_rank, time_distance
),
updated AS (
UPDATE works w
SET user_id = candidates.owner_user_id
FROM candidates
WHERE w.id = candidates.public_id
RETURNING w.id, w.user_id
)
SELECT COUNT(*)::int AS fixed_count FROM updated
`, [SYSTEM_USER_ID]);
await client.query('COMMIT');
console.log(JSON.stringify(result.rows[0] || { fixed_count: 0 }, null, 2));
} catch (error) {
await client.query('ROLLBACK');
throw error;
} finally {
client.release();
await pool.end();
}
})().catch((error) => {
console.error(error);
process.exit(1);
});

View File

@@ -0,0 +1,46 @@
const { Client } = require('pg');
require('dotenv').config({ path: '/root/miaojingAI/.env.local' });
(async () => {
const client = new Client({ connectionString: process.env.LOCAL_DB_URL });
await client.connect();
try {
await client.query('BEGIN');
const admin = await client.query(
`SELECT id FROM profiles WHERE role = 'admin' ORDER BY created_at ASC LIMIT 1`,
);
if (!admin.rows[0]?.id) {
throw new Error('No admin profile found');
}
const result = await client.query(
`UPDATE generation_jobs
SET user_id = $1
WHERE user_id IS NULL
RETURNING id`,
[admin.rows[0].id],
);
await client.query('COMMIT');
const summary = await client.query(
`SELECT status,
count(*)::int AS count,
count(*) FILTER (WHERE user_id IS NULL)::int AS null_user_count
FROM generation_jobs
GROUP BY status
ORDER BY status`,
);
console.log(JSON.stringify({
backfilled: result.rowCount,
adminUserId: admin.rows[0].id,
summary: summary.rows,
}, null, 2));
} catch (error) {
await client.query('ROLLBACK');
throw error;
} finally {
await client.end();
}
})().catch((error) => {
console.error(error);
process.exit(1);
});

View File

@@ -0,0 +1,73 @@
const fs = require('fs');
const path = require('path');
const { Pool } = require('pg');
require('dotenv').config({ path: '.env.local' });
function collectLocalStorageKeys(value, keys = new Set()) {
if (typeof value === 'string') {
const marker = '/api/local-storage/';
const index = value.indexOf(marker);
if (index >= 0) {
keys.add(decodeURIComponent(value.slice(index + marker.length).split('?')[0]));
}
return keys;
}
if (Array.isArray(value)) {
value.forEach(item => collectLocalStorageKeys(item, keys));
return keys;
}
if (value && typeof value === 'object') {
Object.values(value).forEach(item => collectLocalStorageKeys(item, keys));
}
return keys;
}
(async () => {
const pool = new Pool({ connectionString: process.env.LOCAL_DB_URL });
const client = await pool.connect();
try {
const works = await client.query(
`SELECT id, result_url, thumbnail_url, params
FROM works
WHERE title LIKE 'codex-import-edge-%' OR prompt LIKE 'codex-import-edge-%'`,
);
const keys = new Set();
for (const row of works.rows) {
collectLocalStorageKeys(row.result_url, keys);
collectLocalStorageKeys(row.thumbnail_url, keys);
collectLocalStorageKeys(row.params, keys);
}
const deletedWorks = await client.query(
`DELETE FROM works
WHERE title LIKE 'codex-import-edge-%' OR prompt LIKE 'codex-import-edge-%'`,
);
const deletedAnnouncements = await client.query(
`DELETE FROM announcements
WHERE title LIKE 'codex-import-edge-%'`,
);
let deletedFiles = 0;
const base = path.join(process.cwd(), 'local-storage');
for (const key of keys) {
const filePath = path.normalize(path.join(base, key));
if (!filePath.startsWith(base)) continue;
if (fs.existsSync(filePath)) {
fs.unlinkSync(filePath);
deletedFiles += 1;
}
}
console.log(JSON.stringify({
deletedWorks: deletedWorks.rowCount || 0,
deletedAnnouncements: deletedAnnouncements.rowCount || 0,
deletedFiles,
}, null, 2));
} finally {
client.release();
await pool.end();
}
})().catch((error) => {
console.error(error);
process.exit(1);
});

21
components.json Normal file
View File

@@ -0,0 +1,21 @@
{
"$schema": "https://ui.shadcn.com/schema.json",
"style": "new-york",
"rsc": true,
"tsx": true,
"tailwind": {
"config": "",
"css": "src/app/globals.css",
"baseColor": "neutral",
"cssVariables": true,
"prefix": ""
},
"aliases": {
"components": "@/components",
"utils": "@/lib/utils",
"ui": "@/components/ui",
"lib": "@/lib",
"hooks": "@/hooks"
},
"iconLibrary": "lucide"
}

View File

@@ -0,0 +1,109 @@
# 妙境 AI 创作平台 Nginx 生产配置模板
# 使用前替换:
# - example.com
# - /etc/letsencrypt/live/example.com/fullchain.pem
# - /etc/letsencrypt/live/example.com/privkey.pem
# - 如脚本中修改了前端端口,请同步 proxy_pass 的 5000
limit_req_zone $binary_remote_addr zone=miaojing_auth:10m rate=10r/m;
limit_req_zone $binary_remote_addr zone=miaojing_email:10m rate=6r/m;
limit_req_zone $binary_remote_addr zone=miaojing_generation:10m rate=20r/m;
limit_req_zone $binary_remote_addr zone=miaojing_download:10m rate=60r/m;
limit_req_zone $binary_remote_addr zone=miaojing_admin:10m rate=120r/m;
map $http_upgrade $connection_upgrade {
default upgrade;
'' close;
}
server {
listen 80;
server_name example.com www.example.com;
location /.well-known/acme-challenge/ {
root /var/www/html;
}
location / {
return 301 https://$host$request_uri;
}
}
server {
listen 443 ssl http2;
server_name example.com www.example.com;
ssl_certificate /etc/letsencrypt/live/example.com/fullchain.pem;
ssl_certificate_key /etc/letsencrypt/live/example.com/privkey.pem;
ssl_session_timeout 1d;
ssl_session_cache shared:MozSSL:10m;
ssl_session_tickets off;
ssl_protocols TLSv1.2 TLSv1.3;
ssl_prefer_server_ciphers off;
client_max_body_size 80m;
keepalive_timeout 65;
proxy_connect_timeout 60s;
proxy_send_timeout 360s;
proxy_read_timeout 360s;
add_header Strict-Transport-Security "max-age=31536000; includeSubDomains; preload" always;
add_header X-Content-Type-Options "nosniff" always;
add_header X-Frame-Options "SAMEORIGIN" always;
add_header Referrer-Policy "strict-origin-when-cross-origin" always;
add_header Permissions-Policy "camera=(), microphone=(), geolocation=(), payment=()" always;
access_log /var/log/nginx/miaojing-access.log;
error_log /var/log/nginx/miaojing-error.log warn;
location = /api/auth/login {
limit_req zone=miaojing_auth burst=20 nodelay;
proxy_pass http://127.0.0.1:5000;
include proxy_params;
proxy_set_header X-Forwarded-Proto https;
}
location = /api/auth/register {
limit_req zone=miaojing_auth burst=20 nodelay;
proxy_pass http://127.0.0.1:5000;
include proxy_params;
proxy_set_header X-Forwarded-Proto https;
}
location ^~ /api/email/ {
limit_req zone=miaojing_email burst=10 nodelay;
proxy_pass http://127.0.0.1:5000;
include proxy_params;
proxy_set_header X-Forwarded-Proto https;
}
location ^~ /api/generate/ {
limit_req zone=miaojing_generation burst=30 nodelay;
proxy_pass http://127.0.0.1:5000;
include proxy_params;
proxy_set_header X-Forwarded-Proto https;
}
location = /api/download {
limit_req zone=miaojing_download burst=120 nodelay;
proxy_pass http://127.0.0.1:5000;
include proxy_params;
proxy_set_header X-Forwarded-Proto https;
}
location ^~ /api/admin/ {
limit_req zone=miaojing_admin burst=120 nodelay;
proxy_pass http://127.0.0.1:5000;
include proxy_params;
proxy_set_header X-Forwarded-Proto https;
}
location / {
proxy_pass http://127.0.0.1:5000;
include proxy_params;
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection $connection_upgrade;
proxy_set_header X-Forwarded-Proto https;
}
}

BIN
docs/images/create.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 156 KiB

BIN
docs/images/gallery.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 83 KiB

BIN
docs/images/home.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 154 KiB

54
ecosystem.config.cjs Normal file
View File

@@ -0,0 +1,54 @@
module.exports = {
apps: [
{
name: 'miaojing-api',
cwd: '/root/miaojingAI',
script: 'npm',
args: 'run start',
exec_mode: 'fork',
instances: 1,
max_memory_restart: '512M',
restart_delay: 3000,
env: {
NODE_ENV: 'production',
COZE_PROJECT_ENV: 'PROD',
APP_RUNTIME_ROLE: 'backend',
DEPLOY_RUN_PORT: '5100',
},
},
{
name: 'miaojing-web',
cwd: '/root/miaojingAI',
script: 'npm',
args: 'run start',
exec_mode: 'fork',
instances: 1,
max_memory_restart: '512M',
restart_delay: 3000,
env: {
NODE_ENV: 'production',
COZE_PROJECT_ENV: 'PROD',
APP_RUNTIME_ROLE: 'frontend',
BACKEND_INTERNAL_URL: 'http://127.0.0.1:5100',
CONSOLE_INTERNAL_URL: 'http://127.0.0.1:5200',
DEPLOY_RUN_PORT: '5000',
},
},
{
name: 'miaojing-console',
cwd: '/root/miaojingAI',
script: 'npm',
args: 'run start',
exec_mode: 'fork',
instances: 1,
max_memory_restart: '512M',
restart_delay: 3000,
env: {
NODE_ENV: 'production',
COZE_PROJECT_ENV: 'PROD',
APP_RUNTIME_ROLE: 'console',
DEPLOY_RUN_PORT: '5200',
},
},
],
};

52
eslint.config.mjs Normal file
View File

@@ -0,0 +1,52 @@
import nextTs from 'eslint-config-next/typescript';
import nextVitals from 'eslint-config-next/core-web-vitals';
import { defineConfig, globalIgnores } from 'eslint/config';
const syntaxRules = [
{
selector: 'JSXOpeningElement[name.name="head"]',
message:
'禁止使用 head 标签,优先使用 metadata。三方 CSS、字体等资源可以在 globals.css 中顶部通过 @import 引入或者使用 next/fontpreload, preconnect, dns-prefetch 通过 ReactDOM 的 preload、preconnect、dns-prefetch 方法引入json-ld 可阅读 https://nextjs.org/docs/app/guides/json-ld',
},
];
const nextConfigRestrictedSyntaxRules = [
{
selector:
'Property[key.name=/^(root|outputFileTracingRoot)$/] > Literal[value=/^\\//]',
message:
'禁止在 next.config 中写死绝对路径,请改用 path.resolve(__dirname, ...)、import.meta.dirname 或 process.cwd() 动态拼接。',
},
];
const eslintConfig = defineConfig([
...nextVitals,
...nextTs,
{
rules: {
'react-hooks/set-state-in-effect': 'off',
'no-restricted-syntax': ['error', ...syntaxRules],
},
},
{
files: ['next.config.ts'],
rules: {
'no-restricted-syntax': ['error', ...nextConfigRestrictedSyntaxRules],
},
},
// Override default ignores of eslint-config-next.
globalIgnores([
// Default ignores of eslint-config-next:
'.next/**',
'out/**',
'build/**',
'next-env.d.ts',
// Build artifacts:
'server.js',
'dist/**',
// Script files (CommonJS):
'scripts/**/*.js',
]),
]);
export default eslintConfig;

59
fix_persistence_data.js Normal file
View File

@@ -0,0 +1,59 @@
const { Client } = require('pg');
const fs = require('fs');
const path = require('path');
require('dotenv').config({ path: '/root/miaojingAI/.env.local' });
const root = '/root/miaojingAI';
const c = new Client({ connectionString: process.env.LOCAL_DB_URL });
function keyFromUrl(url) {
if (!url) return null;
const marker = '/api/local-storage/';
const idx = String(url).indexOf(marker);
if (idx < 0) return null;
try { return decodeURIComponent(String(url).slice(idx + marker.length).split('?')[0]); } catch { return null; }
}
function publicUrl(key) { return `/api/local-storage/${key}`; }
function copyToGallery(url, type, id, field) {
const key = keyFromUrl(url);
if (!key) return url;
if (key.startsWith('gallery/')) return url;
const src = path.join(root, 'local-storage', key);
if (!fs.existsSync(src)) return url;
const ext = path.extname(key) || '.bin';
const folder = field === 'thumbnail_url' ? 'gallery/thumbnails' : (type === 'text2video' || type === 'img2video' || type === 'video' ? 'gallery/videos' : 'gallery/images');
const destDir = path.join(root, 'local-storage', folder);
fs.mkdirSync(destDir, { recursive: true });
const destKey = `${folder}/${id}-${field}${ext}`;
const dest = path.join(root, 'local-storage', destKey);
if (!fs.existsSync(dest)) fs.copyFileSync(src, dest);
return publicUrl(destKey);
}
(async () => {
await c.connect();
await c.query('BEGIN');
try {
const adminRes = await c.query(`select id,email from profiles where role='admin' order by case when email='admin@example.com' then 0 else 1 end, created_at asc limit 1`);
if (!adminRes.rows.length) throw new Error('No admin profile found');
const admin = adminRes.rows[0];
const password = process.env.ADMIN_DEFAULT_PASSWORD || 'admin123';
await c.query(`update auth.users set password_hash = crypt($1, gen_salt('bf')) where id=$2`, [password, admin.id]);
const worksFixed = await c.query(`update works set user_id=$1 where user_id is null or user_id not in (select id from profiles) returning id`, [admin.id]);
const creditDeleted = await c.query(`delete from credit_transactions where user_id not in (select id from profiles) returning id`);
const publicWorks = await c.query(`select id,type,result_url,thumbnail_url from works where is_public=true order by created_at asc`);
let copied = 0;
for (const w of publicWorks.rows) {
const nextResult = copyToGallery(w.result_url, w.type, w.id, 'result_url');
const nextThumb = w.thumbnail_url ? copyToGallery(w.thumbnail_url, w.type, w.id, 'thumbnail_url') : null;
if (nextResult !== w.result_url || nextThumb !== w.thumbnail_url) {
await c.query(`update works set result_url=$1, thumbnail_url=$2, updated_at=now() where id=$3`, [nextResult, nextThumb, w.id]);
copied++;
}
}
await c.query('COMMIT');
console.log(JSON.stringify({ admin: admin.email, passwordHashSet: true, worksFixed: worksFixed.rowCount, orphanCreditsDeleted: creditDeleted.rowCount, publicWorksCopiedToGallery: copied }, null, 2));
} catch (e) {
await c.query('ROLLBACK');
throw e;
} finally {
await c.end();
}
})().catch(e => { console.error(e); process.exit(1); });

12
inspect_before_fix.js Normal file
View File

@@ -0,0 +1,12 @@
const { Client } = require('pg');
require('dotenv').config({ path: '/root/miaojingAI/.env.local' });
const c = new Client({ connectionString: process.env.LOCAL_DB_URL });
(async () => {
await c.connect();
for (const [name, sql] of [
['works_public', `select is_public, count(*)::int from works group by is_public order by is_public`],
['works_urls', `select id,user_id,is_public,type,result_url,thumbnail_url from works order by created_at desc`],
['orphan_credit', `select id,user_id,amount,type,description from credit_transactions where user_id not in (select id from profiles)`]
]) { const r=await c.query(sql); console.log('--- '+name); console.table(r.rows); }
await c.end();
})().catch(e=>{console.error(e);process.exit(1)});

19
inspect_db.js Normal file
View File

@@ -0,0 +1,19 @@
const { Client } = require('pg');
require('dotenv').config({ path: '/root/miaojingAI/.env.local' });
const c = new Client({ connectionString: process.env.LOCAL_DB_URL });
(async () => {
await c.connect();
const queries = [
['profiles_count', 'select count(*)::int as n from profiles'],
['profiles', 'select id,email,nickname,role,membership_tier,is_active,created_at from profiles order by created_at desc limit 20'],
['jobs_count', 'select count(*)::int as n from generation_jobs'],
['jobs', 'select id,user_id,type,status,created_at from generation_jobs order by created_at desc limit 20'],
['jobs_join_profiles', `select j.id,j.user_id,p.email,p.nickname,p.role,j.type,j.status,j.created_at from generation_jobs j left join profiles p on p.id=j.user_id order by j.created_at desc limit 20`]
];
for (const [name, sql] of queries) {
const r = await c.query(sql);
console.log('--- ' + name);
console.table(r.rows);
}
await c.end();
})().catch(e => { console.error(e); process.exit(1); });

View File

@@ -0,0 +1,61 @@
const { Pool } = require('pg');
require('dotenv').config({ path: '.env.local' });
const SYSTEM_USER_ID = '00000000-0000-0000-0000-000000000000';
(async () => {
const pool = new Pool({ connectionString: process.env.LOCAL_DB_URL });
const client = await pool.connect();
try {
const apiKeys = await client.query(`
SELECT id, user_id, provider, supplier_name, model_name, note, type, created_at
FROM user_api_keys
ORDER BY created_at DESC
LIMIT 200
`);
const modelCounts = await client.query(`
SELECT params->>'model' AS model, COUNT(*)::int AS count
FROM works
WHERE is_public = true
AND status = 'completed'
AND user_id = $1
GROUP BY params->>'model'
ORDER BY count DESC
`, [SYSTEM_USER_ID]);
const directMatches = await client.query(`
SELECT
w.params->>'model' AS work_model,
COUNT(*)::int AS work_count,
k.id AS api_key_id,
k.user_id,
p.email,
p.nickname,
k.provider,
k.supplier_name,
k.model_name,
k.note
FROM works w
JOIN user_api_keys k ON w.params->>'model' = CONCAT('custom:', k.id::text)
JOIN profiles p ON p.id = k.user_id
WHERE w.is_public = true
AND w.status = 'completed'
AND w.user_id = $1
GROUP BY w.params->>'model', k.id, k.user_id, p.email, p.nickname, k.provider, k.supplier_name, k.model_name, k.note
ORDER BY work_count DESC
`, [SYSTEM_USER_ID]);
console.log(JSON.stringify({
userApiKeys: apiKeys.rows,
anonymousGalleryModelCounts: modelCounts.rows,
directMatches: directMatches.rows,
}, null, 2));
} finally {
client.release();
await pool.end();
}
})().catch((error) => {
console.error(error);
process.exit(1);
});

View File

@@ -0,0 +1,53 @@
const { Pool } = require('pg');
require('dotenv').config({ path: '.env.local' });
(async () => {
const pool = new Pool({ connectionString: process.env.LOCAL_DB_URL });
const client = await pool.connect();
try {
const summary = await client.query(`
SELECT
COUNT(*)::int AS total,
COUNT(*) FILTER (WHERE w.user_id IS NULL)::int AS null_user_id,
COUNT(*) FILTER (WHERE p.id IS NULL)::int AS missing_profile,
COUNT(*) FILTER (WHERE p.id IS NOT NULL)::int AS linked_profile
FROM works w
LEFT JOIN profiles p ON p.id = w.user_id
WHERE w.is_public = true AND w.status = 'completed'
`);
const samples = await client.query(`
SELECT
w.id,
w.user_id,
w.title,
LEFT(COALESCE(w.prompt, ''), 80) AS prompt_preview,
w.params,
w.created_at
FROM works w
LEFT JOIN profiles p ON p.id = w.user_id
WHERE w.is_public = true AND w.status = 'completed' AND p.id IS NULL
ORDER BY w.created_at DESC
LIMIT 20
`);
const profileSamples = await client.query(`
SELECT id, email, nickname, role, created_at
FROM profiles
ORDER BY created_at DESC
LIMIT 20
`);
console.log(JSON.stringify({
summary: summary.rows[0],
missingSamples: samples.rows,
profileSamples: profileSamples.rows,
}, null, 2));
} finally {
client.release();
await pool.end();
}
})().catch((error) => {
console.error(error);
process.exit(1);
});

9
inspect_payload.js Normal file
View File

@@ -0,0 +1,9 @@
const { Client } = require('pg');
require('dotenv').config({ path: '/root/miaojingAI/.env.local' });
const c = new Client({ connectionString: process.env.LOCAL_DB_URL });
(async () => {
await c.connect();
const r = await c.query(`select id, payload from generation_jobs order by created_at desc limit 3`);
for (const row of r.rows) { console.log('---', row.id); console.log(JSON.stringify(row.payload).slice(0, 1200)); }
await c.end();
})().catch(e => { console.error(e); process.exit(1); });

View File

@@ -0,0 +1,111 @@
const { Pool } = require('pg');
require('dotenv').config({ path: '.env.local' });
const SYSTEM_USER_ID = '00000000-0000-0000-0000-000000000000';
(async () => {
const pool = new Pool({ connectionString: process.env.LOCAL_DB_URL });
const client = await pool.connect();
try {
const byModelOwner = await client.query(`
SELECT
COALESCE(w.params->>'model', '') AS model,
COALESCE(p.email, '[missing-profile]') AS email,
COALESCE(p.nickname, '') AS nickname,
w.user_id,
COUNT(*)::int AS count,
COUNT(*) FILTER (WHERE w.is_public = true)::int AS public_count,
COUNT(*) FILTER (WHERE w.is_public = false)::int AS private_count,
MIN(w.created_at) AS first_at,
MAX(w.created_at) AS last_at
FROM works w
LEFT JOIN profiles p ON p.id = w.user_id
WHERE w.status = 'completed'
GROUP BY model, w.user_id, p.email, p.nickname
ORDER BY model, count DESC
`);
const orphanModelTotals = await client.query(`
SELECT params->>'model' AS model, COUNT(*)::int AS count
FROM works
WHERE status = 'completed'
AND is_public = true
AND user_id = $1
GROUP BY params->>'model'
ORDER BY count DESC
`, [SYSTEM_USER_ID]);
const ownersByPrompt = await client.query(`
WITH orphan AS (
SELECT id, prompt, created_at
FROM works
WHERE status = 'completed'
AND is_public = true
AND user_id = $1
AND COALESCE(prompt, '') <> ''
),
owned AS (
SELECT w.id, w.user_id, p.email, p.nickname, w.prompt, w.created_at
FROM works w
JOIN profiles p ON p.id = w.user_id
WHERE w.status = 'completed'
AND w.user_id <> $1
AND COALESCE(w.prompt, '') <> ''
)
SELECT
orphan.id AS orphan_id,
owned.user_id,
owned.email,
owned.nickname,
COUNT(*)::int AS matches,
MIN(ABS(EXTRACT(EPOCH FROM (owned.created_at - orphan.created_at))))::int AS best_seconds_apart
FROM orphan
JOIN owned ON owned.prompt = orphan.prompt
GROUP BY orphan.id, owned.user_id, owned.email, owned.nickname
ORDER BY matches DESC, best_seconds_apart
LIMIT 120
`, [SYSTEM_USER_ID]);
const maybeAdminByDates = await client.query(`
SELECT
w.id,
w.created_at,
w.params->>'model' AS model,
LEFT(w.prompt, 100) AS prompt,
(
SELECT json_agg(json_build_object(
'user_id', nearby.user_id,
'email', p.email,
'nickname', p.nickname,
'seconds_apart', ABS(EXTRACT(EPOCH FROM (nearby.created_at - w.created_at)))::int,
'prompt', LEFT(nearby.prompt, 80)
) ORDER BY ABS(EXTRACT(EPOCH FROM (nearby.created_at - w.created_at))))
FROM works nearby
JOIN profiles p ON p.id = nearby.user_id
WHERE nearby.user_id <> $1
AND nearby.status = 'completed'
AND nearby.created_at BETWEEN w.created_at - INTERVAL '1 hour' AND w.created_at + INTERVAL '1 hour'
LIMIT 8
) AS nearby_owned
FROM works w
WHERE w.status = 'completed'
AND w.is_public = true
AND w.user_id = $1
ORDER BY w.created_at DESC
LIMIT 80
`, [SYSTEM_USER_ID]);
console.log(JSON.stringify({
orphanModelTotals: orphanModelTotals.rows,
byModelOwner: byModelOwner.rows,
ownersByPrompt: ownersByPrompt.rows,
nearbyOwnedByTime: maybeAdminByDates.rows,
}, null, 2));
} finally {
client.release();
await pool.end();
}
})().catch(error => {
console.error(error);
process.exit(1);
});

View File

@@ -0,0 +1,59 @@
const { Pool } = require('pg');
require('dotenv').config({ path: '.env.local' });
(async () => {
const pool = new Pool({ connectionString: process.env.LOCAL_DB_URL });
const client = await pool.connect();
try {
const duplicates = await client.query(`
SELECT
public.id AS public_id,
public.user_id AS public_user_id,
private.id AS private_id,
private.user_id AS private_user_id,
p.email,
p.nickname,
public.result_url AS public_url,
private.result_url AS private_url,
LEFT(COALESCE(public.prompt, ''), 80) AS prompt_preview
FROM works public
JOIN works private
ON private.id <> public.id
AND private.user_id <> '00000000-0000-0000-0000-000000000000'
AND (
private.result_url = public.result_url
OR private.thumbnail_url = public.thumbnail_url
OR (
COALESCE(private.prompt, '') = COALESCE(public.prompt, '')
AND private.created_at BETWEEN public.created_at - INTERVAL '10 minutes' AND public.created_at + INTERVAL '10 minutes'
)
)
JOIN profiles p ON p.id = private.user_id
WHERE public.is_public = true
AND public.status = 'completed'
AND public.user_id = '00000000-0000-0000-0000-000000000000'
LIMIT 50
`);
const paramKeys = await client.query(`
SELECT id, params
FROM works
WHERE is_public = true
AND status = 'completed'
AND user_id = '00000000-0000-0000-0000-000000000000'
LIMIT 10
`);
console.log(JSON.stringify({
duplicateCandidateCount: duplicates.rowCount,
duplicateCandidates: duplicates.rows,
paramSamples: paramKeys.rows,
}, null, 2));
} finally {
client.release();
await pool.end();
}
})().catch((error) => {
console.error(error);
process.exit(1);
});

View File

@@ -0,0 +1,57 @@
CREATE TABLE IF NOT EXISTS api_providers (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
name VARCHAR(128) NOT NULL UNIQUE,
default_api_url TEXT,
default_model VARCHAR(255),
type VARCHAR(16) NOT NULL DEFAULT 'image',
website TEXT,
is_active BOOLEAN NOT NULL DEFAULT true,
sort_order INTEGER NOT NULL DEFAULT 0,
created_at TIMESTAMPTZ NOT NULL DEFAULT now(),
updated_at TIMESTAMPTZ
);
CREATE TABLE IF NOT EXISTS model_recommendations (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
model_name VARCHAR(255) NOT NULL,
display_name VARCHAR(255),
type VARCHAR(16) NOT NULL DEFAULT 'image',
provider_id UUID REFERENCES api_providers(id) ON DELETE SET NULL,
is_active BOOLEAN NOT NULL DEFAULT true,
sort_order INTEGER NOT NULL DEFAULT 0,
created_at TIMESTAMPTZ NOT NULL DEFAULT now(),
updated_at TIMESTAMPTZ
);
CREATE INDEX IF NOT EXISTS api_providers_active_sort_idx ON api_providers (is_active, sort_order);
CREATE INDEX IF NOT EXISTS model_recommendations_active_type_sort_idx ON model_recommendations (is_active, type, sort_order);
CREATE INDEX IF NOT EXISTS model_recommendations_provider_idx ON model_recommendations (provider_id);
INSERT INTO api_providers (name, default_api_url, default_model, type, website, is_active, sort_order)
VALUES
('硅基流动', 'https://api.siliconflow.cn/v1/images/generations', 'black-forest-labs/FLUX.1-schnell', 'image', 'https://cloud.siliconflow.cn', true, 10),
('mozheAPI', 'https://openai.mozhevip.top', '', 'image', 'https://openai.mozhevip.top', true, 20),
('OpenAI', 'https://api.openai.com/v1/images/generations', 'dall-e-3', 'image', NULL, true, 30),
('Stability AI', 'https://api.stability.ai/v1/generation/stable-diffusion-xl/text-to-image', 'stable-diffusion-xl', 'image', NULL, true, 40),
('Midjourney', '', 'midjourney-v6', 'image', NULL, true, 50),
('Runway', 'https://api.runwayml.com/v1/image_to_video', 'gen-3-alpha', 'video', NULL, true, 60),
('Pika', '', 'pika-1.0', 'video', NULL, true, 70),
('Kling', '', 'kling-v1', 'video', NULL, true, 80),
('DeepSeek', 'https://api.deepseek.com/v1/chat/completions', 'deepseek-chat', 'text', NULL, true, 90),
('OpenAI GPT', 'https://api.openai.com/v1/chat/completions', 'gpt-4o', 'text', NULL, true, 100),
('自定义', '', '', 'image', NULL, true, 999)
ON CONFLICT (name) DO UPDATE SET
default_api_url = EXCLUDED.default_api_url,
default_model = EXCLUDED.default_model,
type = EXCLUDED.type,
website = EXCLUDED.website,
is_active = EXCLUDED.is_active,
sort_order = EXCLUDED.sort_order,
updated_at = NOW();
INSERT INTO model_recommendations (model_name, display_name, type, provider_id, is_active, sort_order)
SELECT 'gpt-image-2', 'gpt-image-2', 'image', NULL, true, 10
WHERE NOT EXISTS (
SELECT 1 FROM model_recommendations
WHERE model_name = 'gpt-image-2' AND type = 'image' AND provider_id IS NULL
);

6
next-env.d.ts vendored Normal file
View File

@@ -0,0 +1,6 @@
/// <reference types="next" />
/// <reference types="next/image-types/global" />
import "./.next/types/routes.d.ts";
// NOTE: This file should not be edited
// see https://nextjs.org/docs/app/api-reference/config/typescript for more information.

19
next.config.ts Normal file
View File

@@ -0,0 +1,19 @@
import type { NextConfig } from 'next';
const nextConfig: NextConfig = {
// outputFileTracingRoot: path.resolve(__dirname, '../../'), // Uncomment and add 'import path from "path"' if needed
/* config options here */
poweredByHeader: false,
allowedDevOrigins: ['*.dev.coze.site'],
images: {
remotePatterns: [
{
protocol: 'https',
hostname: '*',
pathname: '/**',
},
],
},
};
export default nextConfig;

116
package.json Normal file
View File

@@ -0,0 +1,116 @@
{
"name": "projects",
"version": "0.1.0",
"private": true,
"scripts": {
"build": "bash ./scripts/build.sh",
"build:with-install": "INSTALL_DEPS=1 bash ./scripts/build.sh",
"backup:create": "bash ./scripts/backup-create.sh",
"backup:list": "bash ./scripts/backup-list.sh",
"backup:restore": "bash ./scripts/backup-restore.sh",
"upgrade:run": "node ./scripts/admin-upgrade-runner.mjs",
"db:patch": "bash ./scripts/apply-database-patch.sh",
"dev": "bash ./scripts/dev.sh",
"preinstall": "npx only-allow pnpm",
"lint": "eslint",
"start": "bash ./scripts/start.sh",
"pm2:restart": "pm2 startOrReload ecosystem.config.cjs --update-env",
"pm2:save": "pm2 save",
"ts-check": "tsc -p tsconfig.json",
"check:boundaries": "bash ./scripts/check-boundaries.sh"
},
"dependencies": {
"@aws-sdk/client-s3": "^3.958.0",
"@aws-sdk/lib-storage": "^3.958.0",
"@hookform/resolvers": "^5.2.2",
"@radix-ui/react-accordion": "^1.2.12",
"@radix-ui/react-alert-dialog": "^1.1.15",
"@radix-ui/react-aspect-ratio": "^1.1.8",
"@radix-ui/react-avatar": "^1.1.11",
"@radix-ui/react-checkbox": "^1.3.3",
"@radix-ui/react-collapsible": "^1.1.12",
"@radix-ui/react-context-menu": "^2.2.16",
"@radix-ui/react-dialog": "^1.1.15",
"@radix-ui/react-dropdown-menu": "^2.1.16",
"@radix-ui/react-hover-card": "^1.1.15",
"@radix-ui/react-label": "^2.1.8",
"@radix-ui/react-menubar": "^1.1.16",
"@radix-ui/react-navigation-menu": "^1.2.14",
"@radix-ui/react-popover": "^1.1.15",
"@radix-ui/react-progress": "^1.1.8",
"@radix-ui/react-radio-group": "^1.3.8",
"@radix-ui/react-scroll-area": "^1.2.10",
"@radix-ui/react-select": "^2.2.6",
"@radix-ui/react-separator": "^1.1.8",
"@radix-ui/react-slider": "^1.3.6",
"@radix-ui/react-slot": "^1.2.4",
"@radix-ui/react-switch": "^1.2.6",
"@radix-ui/react-tabs": "^1.1.13",
"@radix-ui/react-toggle": "^1.1.10",
"@radix-ui/react-toggle-group": "^1.1.11",
"@radix-ui/react-tooltip": "^1.2.8",
"@supabase/supabase-js": "2.95.3",
"class-variance-authority": "^0.7.1",
"clsx": "^2.1.1",
"cmdk": "^1.1.1",
"coze-coding-dev-sdk": "^0.7.21",
"date-fns": "^4.1.0",
"dotenv": "^17.2.3",
"drizzle-orm": "^0.45.2",
"drizzle-zod": "^0.8.3",
"embla-carousel-react": "^8.6.0",
"input-otp": "^1.4.2",
"lucide-react": "^0.468.0",
"next": "16.2.4",
"next-themes": "^0.4.6",
"pg": "^8.17.2",
"react": "19.2.3",
"react-day-picker": "^9.13.0",
"react-dom": "19.2.3",
"react-hook-form": "^7.70.0",
"react-markdown": "^10.1.0",
"react-resizable-panels": "^4.2.0",
"remark-gfm": "^4.0.1",
"sharp": "^0.34.5",
"sonner": "^2.0.7",
"tailwind-merge": "^2.6.0",
"tw-animate-css": "^1.4.0",
"vaul": "^1.1.2",
"zod": "^4.3.5",
"@xyflow/react": "^12.10.2",
"ag-psd": "^30.1.1"
},
"devDependencies": {
"@react-dev-inspector/babel-plugin": "^2.0.1",
"@react-dev-inspector/middleware": "^2.0.1",
"@tailwindcss/postcss": "^4",
"@types/node": "^20",
"@types/pg": "^8.16.0",
"@types/react": "^19",
"@types/react-dom": "^19",
"drizzle-kit": "^0.31.8",
"eslint": "^9",
"eslint-config-next": "16.2.4",
"only-allow": "^1.2.2",
"react-dev-inspector": "^2.0.1",
"recharts": "2.15.4",
"shadcn": "latest",
"tailwindcss": "^4",
"tsup": "^8.3.5",
"tsx": "^4.19.2",
"typescript": "^5"
},
"packageManager": "pnpm@9.0.0",
"pnpm": {
"overrides": {
"@langchain/core": "1.1.44",
"langsmith": "0.6.0",
"fast-xml-parser": "5.7.0",
"postcss": "8.5.10",
"uuid": "14.0.0"
}
},
"engines": {
"pnpm": ">=9.0.0"
}
}

View File

@@ -0,0 +1,4 @@
ALTER TABLE user_api_keys ADD COLUMN IF NOT EXISTS supplier_name VARCHAR(128);
ALTER TABLE user_api_keys ADD COLUMN IF NOT EXISTS type VARCHAR(16) NOT NULL DEFAULT 'image';
CREATE INDEX IF NOT EXISTS user_api_keys_user_active_idx ON user_api_keys (user_id, is_active);
CREATE INDEX IF NOT EXISTS works_user_result_url_idx ON works (user_id, result_url);

13502
pnpm-lock.yaml generated Normal file

File diff suppressed because it is too large Load Diff

7
postcss.config.mjs Normal file
View File

@@ -0,0 +1,7 @@
const config = {
plugins: {
'@tailwindcss/postcss': {},
},
};
export default config;

BIN
public/apple-touch-icon.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 46 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 898 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 936 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.0 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 902 KiB

BIN
public/favicon.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.8 KiB

1
public/file.svg Normal file
View File

@@ -0,0 +1 @@
<svg fill="none" viewBox="0 0 16 16" xmlns="http://www.w3.org/2000/svg"><path d="M14.5 13.5V5.41a1 1 0 0 0-.3-.7L9.8.29A1 1 0 0 0 9.08 0H1.5v13.5A2.5 2.5 0 0 0 4 16h8a2.5 2.5 0 0 0 2.5-2.5m-1.5 0v-7H8v-5H3v12a1 1 0 0 0 1 1h8a1 1 0 0 0 1-1M9.5 5V2.12L12.38 5zM5.13 5h-.62v1.25h2.12V5zm-.62 3h7.12v1.25H4.5zm.62 3h-.62v1.25h7.12V11z" clip-rule="evenodd" fill="#666" fill-rule="evenodd"/></svg>

After

Width:  |  Height:  |  Size: 391 B

1
public/globe.svg Normal file
View File

@@ -0,0 +1 @@
<svg fill="none" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 16 16"><g clip-path="url(#a)"><path fill-rule="evenodd" clip-rule="evenodd" d="M10.27 14.1a6.5 6.5 0 0 0 3.67-3.45q-1.24.21-2.7.34-.31 1.83-.97 3.1M8 16A8 8 0 1 0 8 0a8 8 0 0 0 0 16m.48-1.52a7 7 0 0 1-.96 0H7.5a4 4 0 0 1-.84-1.32q-.38-.89-.63-2.08a40 40 0 0 0 3.92 0q-.25 1.2-.63 2.08a4 4 0 0 1-.84 1.31zm2.94-4.76q1.66-.15 2.95-.43a7 7 0 0 0 0-2.58q-1.3-.27-2.95-.43a18 18 0 0 1 0 3.44m-1.27-3.54a17 17 0 0 1 0 3.64 39 39 0 0 1-4.3 0 17 17 0 0 1 0-3.64 39 39 0 0 1 4.3 0m1.1-1.17q1.45.13 2.69.34a6.5 6.5 0 0 0-3.67-3.44q.65 1.26.98 3.1M8.48 1.5l.01.02q.41.37.84 1.31.38.89.63 2.08a40 40 0 0 0-3.92 0q.25-1.2.63-2.08a4 4 0 0 1 .85-1.32 7 7 0 0 1 .96 0m-2.75.4a6.5 6.5 0 0 0-3.67 3.44 29 29 0 0 1 2.7-.34q.31-1.83.97-3.1M4.58 6.28q-1.66.16-2.95.43a7 7 0 0 0 0 2.58q1.3.27 2.95.43a18 18 0 0 1 0-3.44m.17 4.71q-1.45-.12-2.69-.34a6.5 6.5 0 0 0 3.67 3.44q-.65-1.27-.98-3.1" fill="#666"/></g><defs><clipPath id="a"><path fill="#fff" d="M0 0h16v16H0z"/></clipPath></defs></svg>

After

Width:  |  Height:  |  Size: 1.0 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 117 KiB

BIN
public/logo.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 8.5 KiB

View File

@@ -0,0 +1,6 @@
<svg width="40" height="40" viewBox="0 0 40 40" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M10 10L20 20L10 30" stroke="#3B82F6" stroke-width="4" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M20 10L30 20L20 30" stroke="#3B82F6" stroke-width="4" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M15 15L25 25L15 35" stroke="#3B82F6" stroke-width="4" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M25 15L35 25L25 35" stroke="#3B82F6" stroke-width="4" stroke-linecap="round" stroke-linejoin="round"/>
</svg>

After

Width:  |  Height:  |  Size: 558 B

1
public/next.svg Normal file
View File

@@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 394 80"><path fill="#000" d="M262 0h68.5v12.7h-27.2v66.6h-13.6V12.7H262V0ZM149 0v12.7H94v20.4h44.3v12.6H94v21h55v12.6H80.5V0h68.7zm34.3 0h-17.8l63.8 79.4h17.9l-32-39.7 32-39.6h-17.9l-23 28.6-23-28.6zm18.3 56.7-9-11-27.1 33.7h17.8l18.3-22.7z"/><path fill="#000" d="M81 79.3 17 0H0v79.3h13.6V17l50.2 62.3H81Zm252.6-.4c-1 0-1.8-.4-2.5-1s-1.1-1.6-1.1-2.6.3-1.8 1-2.5 1.6-1 2.6-1 1.8.3 2.5 1a3.4 3.4 0 0 1 .6 4.3 3.7 3.7 0 0 1-3 1.8zm23.2-33.5h6v23.3c0 2.1-.4 4-1.3 5.5a9.1 9.1 0 0 1-3.8 3.5c-1.6.8-3.5 1.3-5.7 1.3-2 0-3.7-.4-5.3-1s-2.8-1.8-3.7-3.2c-.9-1.3-1.4-3-1.4-5h6c.1.8.3 1.6.7 2.2s1 1.2 1.6 1.5c.7.4 1.5.5 2.4.5 1 0 1.8-.2 2.4-.6a4 4 0 0 0 1.6-1.8c.3-.8.5-1.8.5-3V45.5zm30.9 9.1a4.4 4.4 0 0 0-2-3.3 7.5 7.5 0 0 0-4.3-1.1c-1.3 0-2.4.2-3.3.5-.9.4-1.6 1-2 1.6a3.5 3.5 0 0 0-.3 4c.3.5.7.9 1.3 1.2l1.8 1 2 .5 3.2.8c1.3.3 2.5.7 3.7 1.2a13 13 0 0 1 3.2 1.8 8.1 8.1 0 0 1 3 6.5c0 2-.5 3.7-1.5 5.1a10 10 0 0 1-4.4 3.5c-1.8.8-4.1 1.2-6.8 1.2-2.6 0-4.9-.4-6.8-1.2-2-.8-3.4-2-4.5-3.5a10 10 0 0 1-1.7-5.6h6a5 5 0 0 0 3.5 4.6c1 .4 2.2.6 3.4.6 1.3 0 2.5-.2 3.5-.6 1-.4 1.8-1 2.4-1.7a4 4 0 0 0 .8-2.4c0-.9-.2-1.6-.7-2.2a11 11 0 0 0-2.1-1.4l-3.2-1-3.8-1c-2.8-.7-5-1.7-6.6-3.2a7.2 7.2 0 0 1-2.4-5.7 8 8 0 0 1 1.7-5 10 10 0 0 1 4.3-3.5c2-.8 4-1.2 6.4-1.2 2.3 0 4.4.4 6.2 1.2 1.8.8 3.2 2 4.3 3.4 1 1.4 1.5 3 1.5 5h-5.8z"/></svg>

After

Width:  |  Height:  |  Size: 1.3 KiB

1
public/vercel.svg Normal file
View File

@@ -0,0 +1 @@
<svg fill="none" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 1155 1000"><path d="m577.3 0 577.4 1000H0z" fill="#fff"/></svg>

After

Width:  |  Height:  |  Size: 128 B

1
public/window.svg Normal file
View File

@@ -0,0 +1 @@
<svg fill="none" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 16 16"><path fill-rule="evenodd" clip-rule="evenodd" d="M1.5 2.5h13v10a1 1 0 0 1-1 1h-11a1 1 0 0 1-1-1zM0 1h16v11.5a2.5 2.5 0 0 1-2.5 2.5h-11A2.5 2.5 0 0 1 0 12.5zm3.75 4.5a.75.75 0 1 0 0-1.5.75.75 0 0 0 0 1.5M7 4.75a.75.75 0 1 1-1.5 0 .75.75 0 0 1 1.5 0m1.75.75a.75.75 0 1 0 0-1.5.75.75 0 0 0 0 1.5" fill="#666"/></svg>

After

Width:  |  Height:  |  Size: 385 B

View File

@@ -0,0 +1,98 @@
const { Pool } = require('pg');
require('dotenv').config({ path: '.env.local' });
const SYSTEM_USER_ID = '00000000-0000-0000-0000-000000000000';
(async () => {
const pool = new Pool({ connectionString: process.env.LOCAL_DB_URL });
const client = await pool.connect();
try {
await client.query('BEGIN');
const byUniqueCustomModel = await client.query(`
WITH model_owners AS (
SELECT
params->>'model' AS model,
ARRAY_AGG(DISTINCT user_id) FILTER (WHERE user_id IS NOT NULL AND user_id <> $1) AS owner_ids
FROM works
WHERE status = 'completed'
AND params->>'model' LIKE 'custom:%'
AND EXISTS (SELECT 1 FROM profiles p WHERE p.id = works.user_id)
GROUP BY params->>'model'
),
unique_model_owners AS (
SELECT model, owner_ids[1] AS owner_id
FROM model_owners
WHERE CARDINALITY(owner_ids) = 1
),
updated AS (
UPDATE works w
SET user_id = umo.owner_id,
updated_at = NOW()
FROM unique_model_owners umo
WHERE w.user_id = $1
AND w.status = 'completed'
AND w.is_public = true
AND w.params->>'model' = umo.model
RETURNING w.id, w.user_id, w.params->>'model' AS model
)
SELECT COUNT(*)::int AS fixed_count, json_agg(updated) AS rows
FROM updated
`, [SYSTEM_USER_ID]);
const byExactPromptTime = await client.query(`
WITH candidates AS (
SELECT DISTINCT ON (public.id)
public.id AS public_id,
private.user_id AS owner_user_id,
ABS(EXTRACT(EPOCH FROM (private.created_at - public.created_at))) AS time_distance
FROM works public
JOIN works private
ON private.id <> public.id
AND private.user_id IS NOT NULL
AND private.user_id <> $1
AND COALESCE(private.prompt, '') <> ''
AND private.prompt = public.prompt
AND private.created_at BETWEEN public.created_at - INTERVAL '30 minutes' AND public.created_at + INTERVAL '30 minutes'
JOIN profiles p ON p.id = private.user_id
WHERE public.user_id = $1
AND public.is_public = true
AND public.status = 'completed'
ORDER BY public.id, time_distance
),
unambiguous AS (
SELECT public_id, MIN(owner_user_id::text)::uuid AS owner_user_id
FROM candidates
GROUP BY public_id
HAVING COUNT(DISTINCT owner_user_id) = 1
),
updated AS (
UPDATE works w
SET user_id = unambiguous.owner_user_id,
updated_at = NOW()
FROM unambiguous
WHERE w.id = unambiguous.public_id
AND w.user_id = $1
RETURNING w.id, w.user_id, w.params->>'model' AS model
)
SELECT COUNT(*)::int AS fixed_count, json_agg(updated) AS rows
FROM updated
`, [SYSTEM_USER_ID]);
await client.query('COMMIT');
console.log(JSON.stringify({
byUniqueCustomModel: byUniqueCustomModel.rows[0] || { fixed_count: 0, rows: [] },
byExactPromptTime: byExactPromptTime.rows[0] || { fixed_count: 0, rows: [] },
}, null, 2));
} catch (error) {
await client.query('ROLLBACK');
throw error;
} finally {
client.release();
await pool.end();
}
})().catch(error => {
console.error(error);
process.exit(1);
});

564
scripts/admin-upgrade-runner.mjs Executable file
View File

@@ -0,0 +1,564 @@
#!/usr/bin/env node
import { spawnSync } from 'node:child_process';
import { createHash } from 'node:crypto';
import fs from 'node:fs';
import path from 'node:path';
const args = parseArgs(process.argv.slice(2));
const projectRoot = path.resolve(args.project || process.cwd());
loadEnvFile(path.join(projectRoot, '.env.local'));
const stateRoot = path.resolve(
process.env.UPGRADE_STATE_DIR ||
(process.env.LOCAL_STORAGE_DIR ? path.join(path.dirname(process.env.LOCAL_STORAGE_DIR), 'upgrade') : path.join(projectRoot, 'upgrade-state')),
);
const jobId = requireArg(args, 'job-id');
const mode = requireArg(args, 'mode');
const dryRun = args['dry-run'] === 'true';
const packagePath = path.resolve(requireArg(args, 'package'));
const packageName = args['package-name'] || path.basename(packagePath);
const jobDir = path.join(stateRoot, 'jobs', jobId);
const stateFile = path.join(jobDir, 'state.json');
const extractDir = path.join(jobDir, 'extract');
const sourceBackupFile = path.join(jobDir, `source-before-${jobId}.tar.gz`);
const HOT_ALLOWED_PREFIXES = ['public/'];
const HOT_ALLOWED_FILES = new Set([
'manifest.json',
'robots.txt',
'sitemap.xml',
'favicon.ico',
'icon.png',
'apple-icon.png',
]);
const COLD_ALLOWED_PREFIXES = ['src/', 'public/', 'scripts/', 'database/', 'docs/'];
const COLD_ALLOWED_FILES = new Set([
'manifest.json',
'package.json',
'pnpm-lock.yaml',
'next.config.js',
'next.config.mjs',
'next.config.ts',
'tsconfig.json',
'postcss.config.mjs',
'components.json',
'ecosystem.config.cjs',
]);
const BLOCKED_TOP_LEVEL_NAMES = new Set(['.git', 'node_modules', '.next', 'dist', 'backups', 'local-storage', 'upgrade-state']);
const BLOCKED_ANYWHERE_NAMES = new Set(['.git', 'node_modules', '.next']);
let state = readState() || {
id: jobId,
mode,
status: 'queued',
step: 'queued',
message: '升级任务已创建',
progress: 0,
packageName,
startedAt: new Date().toISOString(),
updatedAt: new Date().toISOString(),
logs: [],
};
main().catch(error => {
log(`fatal: ${error instanceof Error ? error.stack || error.message : String(error)}`);
if (dryRun) {
updateState({
status: 'failed',
step: 'preflight_failed',
progress: 100,
message: '升级包预检失败,请按错误信息调整升级包',
error: error instanceof Error ? error.message : '升级包预检异常退出',
finishedAt: new Date().toISOString(),
});
return;
}
rollbackAfterFailure(error instanceof Error ? error.message : '升级任务异常退出').catch(rollbackError => {
updateState({
status: 'rollback_failed',
step: 'rollback_failed',
progress: 100,
message: '升级失败,自动回滚也失败,请立即人工检查',
error: `${error instanceof Error ? error.message : String(error)}; rollback: ${rollbackError instanceof Error ? rollbackError.message : String(rollbackError)}`,
finishedAt: new Date().toISOString(),
});
});
});
async function main() {
ensureDir(jobDir);
updateState({
status: 'running',
step: 'preflight',
progress: 5,
message: '正在检查升级包与运行环境',
startedAt: state.startedAt || new Date().toISOString(),
});
logStep('开始升级任务', `任务 ${jobId} 使用${mode === 'hot' ? '热更新' : '冷更新'}模式,升级包 ${packageName}${dryRun ? ',仅执行预检' : ''}`);
if (mode !== 'hot' && mode !== 'cold') {
throw new Error('升级方式无效');
}
if (!fs.existsSync(packagePath)) {
throw new Error(`升级包不存在: ${packagePath}`);
}
if (!isAllowedArchive(packageName) && !isAllowedArchive(packagePath)) {
throw new Error('仅支持 .tar、.tar.gz、.tgz 升级包');
}
logStep('校验升级包', '正在读取压缩包目录并检查格式');
run('tar', tarReadArgs('list', packagePath), { cwd: projectRoot, label: '检查升级包结构' });
resetDir(extractDir);
run('tar', [...tarReadArgs('extract', packagePath), '-C', extractDir], { cwd: projectRoot, label: '解压升级包' });
const payloadRoot = resolvePayloadRoot(extractDir);
const files = listFiles(payloadRoot);
if (files.length === 0) {
throw new Error('升级包为空');
}
const validation = validateFiles(files, mode);
logStep('升级包内容', `校验通过,共 ${files.length} 个文件:${files.slice(0, 20).join('、')}${files.length > 20 ? `${files.length} 个文件` : ''}`);
updateState({
step: 'validated',
progress: 14,
message: `升级包校验通过,共 ${files.length} 个文件`,
restartRequired: mode === 'cold' || validation.requiresRestart,
packageHash: sha256(packagePath),
changedFiles: files,
dryRun,
});
if (dryRun) {
logStep('预检完成', `升级包可用于${mode === 'hot' ? '热更新' : '冷更新'}${mode === 'cold' || validation.requiresRestart ? '需要重启平台' : '无需重启平台'}`);
updateState({
status: 'succeeded',
step: 'preflight_completed',
progress: 100,
message: `预检通过:共 ${files.length} 个文件,${mode === 'cold' || validation.requiresRestart ? '执行时需要重启平台' : '执行时无需重启平台'}`,
finishedAt: new Date().toISOString(),
restartRequired: mode === 'cold' || validation.requiresRestart,
dryRun: true,
});
return;
}
updateState({ step: 'backup_data', progress: 22, message: '正在创建数据库、存储与环境配置备份' });
logStep('创建数据备份', '开始备份数据库、存储目录和环境配置');
const backupFile = runCapture('bash', ['./scripts/backup-create.sh'], {
cwd: projectRoot,
label: '创建数据备份',
env: { BACKUP_DIR: path.join(stateRoot, 'data-backups'), COZE_WORKSPACE_PATH: projectRoot },
}).trim().split('\n').pop();
if (!backupFile || !fs.existsSync(backupFile)) {
throw new Error('数据备份创建失败');
}
updateState({ backupFile });
logStep('数据备份完成', `备份文件:${backupFile}`);
updateState({ step: 'backup_source', progress: 30, message: '正在创建源码快照' });
logStep('创建源码快照', '开始保存升级前源码状态');
createSourceBackup(sourceBackupFile);
updateState({ sourceBackupFile });
logStep('源码快照完成', `快照文件:${sourceBackupFile}`);
updateState({ step: 'apply', progress: 42, message: '正在应用升级包文件' });
logStep('应用升级文件', '开始覆盖升级包中的文件');
updateState({ preExistingFiles: files.filter(file => fs.existsSync(path.join(projectRoot, file))) });
applyFiles(payloadRoot, files);
logStep('升级文件应用完成', `已应用 ${files.filter(file => file !== 'manifest.json').length} 个文件`);
if (mode === 'hot') {
updateState({ step: 'verify_hot', progress: 70, message: '正在验证热更新文件' });
logStep('热更新验证', '正在执行 TypeScript 校验,确认补丁不会破坏现有代码');
run('pnpm', ['run', 'ts-check'], { cwd: projectRoot, label: 'TypeScript 校验' });
logStep('热更新完成', '升级成功,平台未重启,前端业务不中断');
updateState({
status: 'succeeded',
step: 'completed',
progress: 100,
message: '热更新成功,平台未重启',
finishedAt: new Date().toISOString(),
restartRequired: false,
});
return;
}
const dependencyChanged = files.some(file => file === 'package.json' || file === 'pnpm-lock.yaml');
if (dependencyChanged) {
updateState({ step: 'install', progress: 54, message: '依赖文件发生变化,正在安装依赖' });
logStep('安装依赖', '检测到 package.json 或 pnpm-lock.yaml 变化,开始安装依赖');
run('pnpm', ['install', '--frozen-lockfile', '--prod=false'], { cwd: projectRoot, label: '安装依赖' });
logStep('依赖安装完成', '依赖安装已完成');
}
updateState({ step: 'ts_check', progress: 64, message: '正在执行 TypeScript 校验' });
logStep('代码校验', '开始执行 TypeScript 校验');
run('pnpm', ['run', 'ts-check'], { cwd: projectRoot, label: 'TypeScript 校验' });
logStep('代码校验完成', 'TypeScript 校验已通过');
updateState({ step: 'build', progress: 75, message: '正在构建平台' });
logStep('平台构建', '开始构建生产版本');
run('pnpm', ['run', 'build'], { cwd: projectRoot, label: '构建平台' });
logStep('平台构建完成', '生产构建已完成');
updateState({ step: 'restart', progress: 94, message: '构建已完成,正在后台重启平台进程' });
logStep('冷更新完成', '升级文件已应用并完成构建,将在后台重启平台进程');
updateState({
status: 'succeeded',
step: 'completed',
progress: 100,
message: '冷更新成功,平台正在后台重启',
finishedAt: new Date().toISOString(),
restartRequired: true,
});
restartPlatform({ detached: true });
}
async function rollbackAfterFailure(message) {
const originalError = message;
logStep('升级失败', `失败原因:${originalError}`);
updateState({
status: 'rolling_back',
step: 'rolling_back',
progress: 96,
message: '升级失败,正在自动回滚到升级前状态',
error: originalError,
});
if (fs.existsSync(sourceBackupFile)) {
logStep('回滚源码', '正在恢复升级前源码快照,并移除升级中新建的文件');
restoreSourceBackup(sourceBackupFile);
logStep('源码回滚完成', '源码已恢复到升级开始前状态');
}
if (state.backupFile && fs.existsSync(state.backupFile)) {
logStep('回滚数据', '正在恢复数据库、存储目录和环境配置备份');
run('bash', ['./scripts/backup-restore.sh', state.backupFile], {
cwd: projectRoot,
label: '恢复数据备份',
env: { COZE_WORKSPACE_PATH: projectRoot },
});
logStep('数据回滚完成', '数据库、存储目录和环境配置已恢复');
}
if (mode === 'cold') {
try {
logStep('回滚后重建', '冷更新失败后正在重新构建回滚版本');
run('pnpm', ['run', 'build'], { cwd: projectRoot, label: '回滚后重新构建' });
logStep('回滚后重启', '将后台重启回滚后的平台版本');
} catch (error) {
throw new Error(`回滚后平台恢复检查失败: ${error instanceof Error ? error.message : String(error)}`);
}
}
logStep('自动回滚完成', '升级失败,但已自动恢复到升级开始前状态');
updateState({
status: 'rolled_back',
step: 'rolled_back',
progress: 100,
message: '升级失败,已自动回滚到升级开始前状态',
error: originalError,
finishedAt: new Date().toISOString(),
});
if (mode === 'cold') {
restartPlatform({ detached: true });
}
}
function parseArgs(argv) {
const parsed = {};
for (let index = 0; index < argv.length; index += 1) {
const item = argv[index];
if (!item.startsWith('--')) continue;
const key = item.slice(2);
const next = argv[index + 1];
if (!next || next.startsWith('--')) {
parsed[key] = 'true';
} else {
parsed[key] = next;
index += 1;
}
}
return parsed;
}
function requireArg(parsed, key) {
const value = parsed[key];
if (!value) throw new Error(`missing --${key}`);
return value;
}
function loadEnvFile(file) {
if (!fs.existsSync(file)) return;
const lines = fs.readFileSync(file, 'utf8').split(/\r?\n/);
for (const line of lines) {
const trimmed = line.trim();
if (!trimmed || trimmed.startsWith('#') || !trimmed.includes('=')) continue;
const index = trimmed.indexOf('=');
const key = trimmed.slice(0, index).trim();
let value = trimmed.slice(index + 1).trim();
if ((value.startsWith('"') && value.endsWith('"')) || (value.startsWith("'") && value.endsWith("'"))) {
value = value.slice(1, -1);
}
if (!process.env[key]) process.env[key] = value;
}
}
function readState() {
try {
return JSON.parse(fs.readFileSync(stateFile, 'utf8'));
} catch {
return null;
}
}
function updateState(patch) {
state = {
...state,
...patch,
updatedAt: new Date().toISOString(),
logs: patch.logs || state.logs || [],
};
ensureDir(path.dirname(stateFile));
const tempFile = `${stateFile}.tmp`;
fs.writeFileSync(tempFile, `${JSON.stringify(state, null, 2)}\n`);
fs.renameSync(tempFile, stateFile);
}
function log(line) {
const timestamped = `[${new Date().toISOString()}] ${line}`;
const logs = [...(state.logs || []), timestamped].slice(-1000);
updateState({ logs });
}
function logStep(title, detail = '') {
log(detail ? `${title}${detail}` : title);
}
function ensureDir(dir) {
fs.mkdirSync(dir, { recursive: true, mode: 0o700 });
}
function resetDir(dir) {
fs.rmSync(dir, { recursive: true, force: true });
ensureDir(dir);
}
function run(command, commandArgs, options = {}) {
runCapture(command, commandArgs, options);
}
function runCapture(command, commandArgs, options = {}) {
const label = options.label || command;
logStep(label, `执行命令 ${command} ${commandArgs.join(' ')}`);
const result = spawnSync(command, commandArgs, {
cwd: options.cwd || projectRoot,
env: { ...process.env, COREPACK_HOME: process.env.COREPACK_HOME || '/tmp/corepack', ...(options.env || {}) },
encoding: 'utf8',
maxBuffer: 20 * 1024 * 1024,
});
const output = `${result.stdout || ''}${result.stderr || ''}`.trim();
if (output) {
for (const line of output.split(/\r?\n/).slice(-180)) log(`${label}输出:${line}`);
}
if (result.status !== 0) {
throw new Error(`${label}失败,退出码 ${result.status ?? 'unknown'}`);
}
return result.stdout || '';
}
function isAllowedArchive(file) {
return file.endsWith('.tar') || file.endsWith('.tar.gz') || file.endsWith('.tgz');
}
function resolvePayloadRoot(root) {
const entries = fs.readdirSync(root, { withFileTypes: true }).filter(entry => entry.name !== '__MACOSX');
if (entries.length === 1 && entries[0].isDirectory()) {
return path.join(root, entries[0].name);
}
return root;
}
function listFiles(root) {
const files = [];
walk(root, '');
return files.sort();
function walk(currentRoot, relativeRoot) {
for (const entry of fs.readdirSync(currentRoot, { withFileTypes: true })) {
if (entry.name === '.DS_Store') continue;
const relative = toPosix(path.join(relativeRoot, entry.name));
const absolute = path.join(currentRoot, entry.name);
if (entry.isDirectory()) {
walk(absolute, relative);
} else if (entry.isFile()) {
files.push(relative);
} else {
throw new Error(`升级包包含不支持的文件类型: ${relative}`);
}
}
}
}
function validateFiles(files, updateMode) {
for (const file of files) {
assertSafeRelativePath(file);
if (isBlockedPackagePath(file)) {
throw new Error(`升级包包含禁止覆盖的路径: ${file}`);
}
if (updateMode === 'hot' && !isHotAllowed(file)) {
throw new Error(`热更新只能包含 public 等无需重启的静态资源;${file} 需要使用冷更新`);
}
if (updateMode === 'cold' && !isColdAllowed(file)) {
throw new Error(`冷更新包包含未授权路径: ${file}`);
}
}
return { requiresRestart: files.some(file => !isHotAllowed(file)) };
}
function isBlockedPackagePath(file) {
const parts = file.split('/');
return (
parts.some(part => part.startsWith('.env')) ||
BLOCKED_TOP_LEVEL_NAMES.has(parts[0]) ||
parts.some(part => BLOCKED_ANYWHERE_NAMES.has(part))
);
}
function assertSafeRelativePath(file) {
if (!file || file.startsWith('/') || file.startsWith('\\') || file.includes('\\')) {
throw new Error(`升级包包含非法路径: ${file}`);
}
const normalized = path.posix.normalize(file);
if (normalized !== file || normalized === '.' || normalized.startsWith('../') || normalized.includes('/../')) {
throw new Error(`升级包包含目录穿越路径: ${file}`);
}
}
function isHotAllowed(file) {
return HOT_ALLOWED_FILES.has(file) || HOT_ALLOWED_PREFIXES.some(prefix => file.startsWith(prefix));
}
function isColdAllowed(file) {
return COLD_ALLOWED_FILES.has(file) || COLD_ALLOWED_PREFIXES.some(prefix => file.startsWith(prefix));
}
function applyFiles(root, files) {
for (const file of files) {
if (file === 'manifest.json') continue;
const source = path.join(root, file);
const target = path.join(projectRoot, file);
ensureDir(path.dirname(target));
fs.copyFileSync(source, target);
}
}
function createSourceBackup(target) {
ensureDir(path.dirname(target));
run('tar', [
'-czf',
target,
'--exclude=.git',
'--exclude=node_modules',
'--exclude=.next',
'--exclude=dist',
'--exclude=backups',
'--exclude=local-storage',
'--exclude=upgrade-state',
'--exclude=tsconfig.tsbuildinfo',
'-C',
projectRoot,
'.',
], { cwd: projectRoot, label: '创建源码快照' });
}
function restoreSourceBackup(source) {
log(`恢复源码快照: ${source}`);
const preExistingFiles = new Set(Array.isArray(state.preExistingFiles) ? state.preExistingFiles : []);
const changedFiles = Array.isArray(state.changedFiles) ? state.changedFiles : [];
for (const file of changedFiles) {
if (file === 'manifest.json' || preExistingFiles.has(file)) continue;
const target = path.join(projectRoot, file);
if (target.startsWith(projectRoot)) {
fs.rmSync(target, { force: true });
}
}
run('tar', [
'-xzf',
source,
'--exclude=.git',
'--exclude=node_modules',
'--exclude=.next',
'--exclude=dist',
'-C',
projectRoot,
], { cwd: projectRoot, label: '恢复源码快照' });
}
function restartPlatform(options = {}) {
const restartCommand = process.env.UPGRADE_RESTART_COMMAND || detectRestartCommand();
if (options.detached) {
const logFile = path.join(jobDir, 'restart.log');
const detachedCommand = `nohup bash -lc ${JSON.stringify(restartCommand)} >> ${JSON.stringify(logFile)} 2>&1 &`;
spawnSync('bash', ['-lc', detachedCommand], {
cwd: projectRoot,
env: { ...process.env, COREPACK_HOME: process.env.COREPACK_HOME || '/tmp/corepack' },
encoding: 'utf8',
});
logStep('后台重启平台', `已触发后台重启命令,日志:${logFile}`);
return;
}
run('bash', ['-lc', restartCommand], { cwd: projectRoot, label: '重启平台' });
}
function detectRestartCommand() {
const pm2Names = runCapture('bash', ['-lc', 'command -v pm2 >/dev/null 2>&1 && pm2 jlist || true'], {
cwd: projectRoot,
label: '检测 PM2 进程',
});
if (pm2Names.includes('"name":"miaojing-dev"')) return 'pm2 restart miaojing-dev --update-env';
if (fs.existsSync(path.join(projectRoot, 'ecosystem.config.cjs'))) return 'pm2 startOrReload ecosystem.config.cjs --update-env && pm2 save';
return 'pm2 restart miaojing-dev --update-env';
}
function tarReadArgs(action, archivePath) {
const flag = action === 'list' ? '-tf' : '-xf';
const gzipFlag = action === 'list' ? '-tzf' : '-xzf';
return archivePath.endsWith('.tar') ? [flag, archivePath] : [gzipFlag, archivePath];
}
function waitForHealth() {
const healthUrl = process.env.UPGRADE_HEALTH_URL || process.env.APP_HEALTH_URL || 'http://127.0.0.1:5100/api/health';
const timeoutMs = Number(process.env.UPGRADE_HEALTH_TIMEOUT_MS || 90000);
const startedAt = Date.now();
let lastError = '';
while (Date.now() - startedAt < timeoutMs) {
const result = spawnSync('curl', ['-fsS', healthUrl], { encoding: 'utf8', timeout: 8000 });
if (result.status === 0) {
log(`健康检查通过: ${healthUrl}`);
return;
}
lastError = `${result.stderr || result.stdout || `exit ${result.status}`}`.trim();
sleep(3000);
}
throw new Error(`健康检查超时: ${healthUrl}; ${lastError}`);
}
function sleep(ms) {
Atomics.wait(new Int32Array(new SharedArrayBuffer(4)), 0, 0, ms);
}
function sha256(file) {
const hash = createHash('sha256');
hash.update(fs.readFileSync(file));
return hash.digest('hex');
}
function toPosix(file) {
return file.split(path.sep).join('/');
}

View File

@@ -0,0 +1,20 @@
#!/bin/bash
set -Eeuo pipefail
COZE_WORKSPACE_PATH="${COZE_WORKSPACE_PATH:-$(pwd)}"
if [ -f "${COZE_WORKSPACE_PATH}/.env.local" ]; then
set +u
set -a
# shellcheck disable=SC1091
source "${COZE_WORKSPACE_PATH}/.env.local"
set +a
set -u
fi
if [ -z "${LOCAL_DB_URL:-}" ]; then
echo "LOCAL_DB_URL is not set" >&2
exit 1
fi
psql "${LOCAL_DB_URL}" -v ON_ERROR_STOP=1 -f "${COZE_WORKSPACE_PATH}/scripts/database-optimization-patch.sql"

72
scripts/backup-create.sh Normal file
View File

@@ -0,0 +1,72 @@
#!/bin/bash
set -Eeuo pipefail
COZE_WORKSPACE_PATH="${COZE_WORKSPACE_PATH:-$(pwd)}"
REQUESTED_BACKUP_DIR="${BACKUP_DIR:-}"
REQUESTED_LOCAL_DB_URL="${LOCAL_DB_URL:-}"
TIMESTAMP="$(date +%Y%m%d-%H%M%S)"
TMP_DIR="$(mktemp -d)"
cleanup() {
rm -rf "${TMP_DIR}"
}
trap cleanup EXIT
cd "${COZE_WORKSPACE_PATH}"
if [ -f ".env.local" ]; then
set +u
set -a
# shellcheck disable=SC1091
source ".env.local"
set +a
set -u
fi
[ -n "${REQUESTED_LOCAL_DB_URL}" ] && LOCAL_DB_URL="${REQUESTED_LOCAL_DB_URL}"
BACKUP_DIR="${REQUESTED_BACKUP_DIR:-${BACKUP_DIR:-${COZE_WORKSPACE_PATH}/backups}}"
BACKUP_FILE="${BACKUP_DIR}/miaojing-backup-${TIMESTAMP}.tar.gz"
mkdir -p "${BACKUP_DIR}"
chmod 700 "${BACKUP_DIR}"
if [ -z "${LOCAL_DB_URL:-}" ]; then
echo "LOCAL_DB_URL is required in .env.local or environment." >&2
exit 1
fi
command -v pg_dump >/dev/null 2>&1 || {
echo "pg_dump is required to create backups." >&2
exit 1
}
pg_dump "${LOCAL_DB_URL}" --format=custom --file "${TMP_DIR}/database.dump"
STORAGE_SOURCE="${LOCAL_STORAGE_DIR:-${COZE_WORKSPACE_PATH}/local-storage}"
if [ -d "${STORAGE_SOURCE}" ]; then
cp -a "${STORAGE_SOURCE}" "${TMP_DIR}/local-storage"
fi
if [ -f ".env.local" ]; then
cp ".env.local" "${TMP_DIR}/.env.local"
fi
if [ -f "package.json" ]; then
cp "package.json" "${TMP_DIR}/package.json"
fi
cat > "${TMP_DIR}/manifest.json" <<EOF
{
"app": "miaojingAI",
"createdAt": "$(date -Iseconds)",
"hostname": "$(hostname)",
"includes": ["database.dump", "local-storage", ".env.local", "package.json"]
}
EOF
tar -czf "${BACKUP_FILE}" -C "${TMP_DIR}" .
chmod 600 "${BACKUP_FILE}"
find "${BACKUP_DIR}" -maxdepth 1 -name 'miaojing-backup-*.tar.gz' -type f \
-printf '%T@ %p\n' | sort -rn | awk 'NR>10 {print $2}' | xargs -r rm -f
echo "${BACKUP_FILE}"

32
scripts/backup-list.sh Normal file
View File

@@ -0,0 +1,32 @@
#!/bin/bash
set -Eeuo pipefail
COZE_WORKSPACE_PATH="${COZE_WORKSPACE_PATH:-$(pwd)}"
REQUESTED_BACKUP_DIR="${BACKUP_DIR:-}"
cd "${COZE_WORKSPACE_PATH}"
if [ -f ".env.local" ]; then
set +u
set -a
# shellcheck disable=SC1091
source ".env.local"
set +a
set -u
fi
BACKUP_DIR="${REQUESTED_BACKUP_DIR:-${BACKUP_DIR:-${COZE_WORKSPACE_PATH}/backups}}"
mkdir -p "${BACKUP_DIR}"
chmod 700 "${BACKUP_DIR}"
if ! compgen -G "${BACKUP_DIR}/miaojing-backup-*.tar.gz" >/dev/null; then
echo "No backups found in ${BACKUP_DIR}"
exit 0
fi
printf '%-40s %-12s %s\n' "FILE" "SIZE" "MODIFIED"
find "${BACKUP_DIR}" -maxdepth 1 -name 'miaojing-backup-*.tar.gz' -type f \
-printf '%T@ %f %s %TY-%Tm-%Td %TH:%TM\n' \
| sort -rn \
| awk '{printf "%-40s %-12s %s %s\n", $2, $3, $4, $5}'

65
scripts/backup-restore.sh Normal file
View File

@@ -0,0 +1,65 @@
#!/bin/bash
set -Eeuo pipefail
COZE_WORKSPACE_PATH="${COZE_WORKSPACE_PATH:-$(pwd)}"
BACKUP_FILE="${1:-}"
TMP_DIR="$(mktemp -d)"
cleanup() {
rm -rf "${TMP_DIR}"
}
trap cleanup EXIT
if [ -z "${BACKUP_FILE}" ]; then
echo "Usage: pnpm backup:restore <backup-file.tar.gz>" >&2
exit 2
fi
if [ ! -f "${BACKUP_FILE}" ]; then
echo "Backup file not found: ${BACKUP_FILE}" >&2
exit 2
fi
cd "${COZE_WORKSPACE_PATH}"
if [ -f ".env.local" ]; then
set +u
set -a
# shellcheck disable=SC1091
source ".env.local"
set +a
set -u
fi
if [ -z "${LOCAL_DB_URL:-}" ]; then
echo "LOCAL_DB_URL is required in .env.local or environment." >&2
exit 1
fi
command -v pg_restore >/dev/null 2>&1 || {
echo "pg_restore is required to restore backups." >&2
exit 1
}
tar -xzf "${BACKUP_FILE}" -C "${TMP_DIR}"
if [ ! -f "${TMP_DIR}/database.dump" ]; then
echo "Invalid backup: missing database.dump." >&2
exit 2
fi
pg_restore --clean --if-exists --no-owner --dbname "${LOCAL_DB_URL}" "${TMP_DIR}/database.dump"
if [ -d "${TMP_DIR}/local-storage" ]; then
STORAGE_TARGET="${LOCAL_STORAGE_DIR:-${COZE_WORKSPACE_PATH}/local-storage}"
rm -rf "${STORAGE_TARGET}"
mkdir -p "$(dirname "${STORAGE_TARGET}")"
cp -a "${TMP_DIR}/local-storage" "${STORAGE_TARGET}"
fi
if [ -f "${TMP_DIR}/.env.local" ]; then
cp "${TMP_DIR}/.env.local" ".env.local"
chmod 600 ".env.local"
fi
echo "Restore completed from ${BACKUP_FILE}"

21
scripts/build.sh Normal file
View File

@@ -0,0 +1,21 @@
#!/bin/bash
set -Eeuo pipefail
COZE_WORKSPACE_PATH="${COZE_WORKSPACE_PATH:-$(pwd)}"
cd "${COZE_WORKSPACE_PATH}"
if [ "${INSTALL_DEPS:-0}" = "1" ] || [ ! -d node_modules ]; then
echo "Installing dependencies..."
pnpm install --prefer-frozen-lockfile --prefer-offline --loglevel debug --reporter=append-only
else
echo "Skipping dependency install. Set INSTALL_DEPS=1 to force it."
fi
echo "Building the Next.js project..."
pnpm next build
echo "Bundling server with tsup..."
pnpm tsup src/server.ts --format cjs --platform node --target node20 --outDir dist --no-splitting --no-minify
echo "Build completed successfully!"

View File

@@ -0,0 +1,50 @@
#!/bin/bash
set -Eeuo pipefail
fail=0
search_pattern() {
local pattern="$1"
shift
if command -v rg >/dev/null 2>&1; then
rg -n "$pattern" "$@" || true
else
grep -RInE "$pattern" "$@" || true
fi
}
check_no_match() {
local label="$1"
local pattern="$2"
shift 2
local output
output="$(search_pattern "$pattern" "$@")"
if [ -n "$output" ]; then
echo "Boundary violation: ${label}" >&2
echo "$output" >&2
fail=1
fi
}
check_no_match \
"web module must not import server database/storage internals" \
"@/storage|@/lib/local-storage|@/lib/session-auth|@/lib/admin-auth|@/lib/runtime-env|@/lib/server-crypto" \
src/modules/web
check_no_match \
"console module must not import server database/storage internals directly" \
"@/storage|@/lib/local-storage|@/lib/runtime-env|@/lib/server-crypto" \
src/modules/console
check_no_match \
"shared module must not depend on app-specific modules" \
"@/modules/(web|console|api)|@/app/|@/components/admin" \
src/modules/shared
if [ "$fail" -ne 0 ]; then
exit 1
fi
echo "Module boundaries OK"

View File

@@ -0,0 +1,169 @@
-- Idempotent local PostgreSQL patch for production maintenance.
-- It creates missing application tables and adds indexes used by hot paths.
CREATE EXTENSION IF NOT EXISTS "pgcrypto";
CREATE EXTENSION IF NOT EXISTS "pg_trgm";
CREATE TABLE IF NOT EXISTS works (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
user_id UUID,
title VARCHAR(255),
type VARCHAR(32) NOT NULL,
prompt TEXT,
negative_prompt TEXT,
params JSONB DEFAULT '{}'::jsonb,
result_url TEXT,
thumbnail_url TEXT,
width INTEGER,
height INTEGER,
duration NUMERIC(6, 2),
is_public BOOLEAN NOT NULL DEFAULT false,
likes_count INTEGER NOT NULL DEFAULT 0,
credits_cost INTEGER NOT NULL DEFAULT 0,
status VARCHAR(32) NOT NULL DEFAULT 'completed',
created_at TIMESTAMPTZ NOT NULL DEFAULT now(),
updated_at TIMESTAMPTZ
);
CREATE TABLE IF NOT EXISTS orders (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
user_id UUID,
order_no VARCHAR(64) NOT NULL UNIQUE,
product_type VARCHAR(32) NOT NULL,
product_name VARCHAR(255) NOT NULL,
amount NUMERIC(10, 2) NOT NULL,
credits_amount INTEGER,
status VARCHAR(32) NOT NULL DEFAULT 'pending',
payment_method VARCHAR(32),
paid_at TIMESTAMPTZ,
created_at TIMESTAMPTZ NOT NULL DEFAULT now(),
updated_at TIMESTAMPTZ
);
CREATE TABLE IF NOT EXISTS user_api_keys (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
user_id UUID,
provider VARCHAR(64) NOT NULL,
api_url TEXT,
model_name VARCHAR(128),
api_key_encrypted TEXT NOT NULL,
api_key_preview VARCHAR(20),
is_active BOOLEAN NOT NULL DEFAULT true,
created_at TIMESTAMPTZ NOT NULL DEFAULT now(),
updated_at TIMESTAMPTZ
);
CREATE TABLE IF NOT EXISTS work_likes (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
user_id UUID,
work_id UUID NOT NULL REFERENCES works(id) ON DELETE CASCADE,
created_at TIMESTAMPTZ NOT NULL DEFAULT now()
);
CREATE TABLE IF NOT EXISTS generation_jobs (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
type VARCHAR(16) NOT NULL,
status VARCHAR(16) NOT NULL DEFAULT 'queued',
payload JSONB NOT NULL DEFAULT '{}'::jsonb,
result JSONB,
error TEXT,
user_id UUID,
provider VARCHAR(128),
model_name VARCHAR(255),
api_url TEXT,
progress JSONB NOT NULL DEFAULT '{}'::jsonb,
created_at TIMESTAMPTZ NOT NULL DEFAULT now(),
started_at TIMESTAMPTZ,
finished_at TIMESTAMPTZ,
updated_at TIMESTAMPTZ NOT NULL DEFAULT now()
);
ALTER TABLE generation_jobs ADD COLUMN IF NOT EXISTS user_id UUID;
ALTER TABLE generation_jobs ADD COLUMN IF NOT EXISTS provider VARCHAR(128);
ALTER TABLE generation_jobs ADD COLUMN IF NOT EXISTS model_name VARCHAR(255);
ALTER TABLE generation_jobs ADD COLUMN IF NOT EXISTS api_url TEXT;
ALTER TABLE generation_jobs ADD COLUMN IF NOT EXISTS progress JSONB NOT NULL DEFAULT '{}'::jsonb;
CREATE INDEX IF NOT EXISTS works_user_created_idx ON works (user_id, created_at DESC);
CREATE INDEX IF NOT EXISTS works_public_status_created_idx ON works (is_public, status, created_at DESC);
CREATE INDEX IF NOT EXISTS works_public_status_likes_idx ON works (is_public, status, likes_count DESC);
CREATE INDEX IF NOT EXISTS works_type_created_idx ON works (type, created_at DESC);
CREATE INDEX IF NOT EXISTS works_status_created_idx ON works (status, created_at DESC);
CREATE INDEX IF NOT EXISTS credit_transactions_user_created_idx ON credit_transactions (user_id, created_at DESC);
CREATE INDEX IF NOT EXISTS credit_transactions_type_created_idx ON credit_transactions (type, created_at DESC);
CREATE INDEX IF NOT EXISTS orders_user_created_idx ON orders (user_id, created_at DESC);
CREATE INDEX IF NOT EXISTS orders_status_created_idx ON orders (status, created_at DESC);
CREATE INDEX IF NOT EXISTS orders_order_no_idx ON orders (order_no);
CREATE INDEX IF NOT EXISTS user_api_keys_user_active_idx ON user_api_keys (user_id, is_active);
CREATE INDEX IF NOT EXISTS user_api_keys_provider_idx ON user_api_keys (provider);
CREATE INDEX IF NOT EXISTS work_likes_user_id_idx ON work_likes (user_id);
CREATE INDEX IF NOT EXISTS work_likes_work_id_idx ON work_likes (work_id);
CREATE UNIQUE INDEX IF NOT EXISTS work_likes_user_work_uniq ON work_likes (user_id, work_id);
CREATE INDEX IF NOT EXISTS announcements_active_window_idx ON announcements (is_active, starts_at, expires_at);
CREATE INDEX IF NOT EXISTS profiles_email_trgm_idx ON profiles USING GIN (LOWER(email) gin_trgm_ops);
CREATE INDEX IF NOT EXISTS profiles_nickname_trgm_idx ON profiles USING GIN (LOWER(COALESCE(nickname, '')) gin_trgm_ops);
CREATE INDEX IF NOT EXISTS profiles_phone_trgm_idx ON profiles USING GIN (LOWER(COALESCE(phone, '')) gin_trgm_ops);
CREATE INDEX IF NOT EXISTS generation_jobs_status_created_idx ON generation_jobs (status, created_at DESC);
CREATE INDEX IF NOT EXISTS generation_jobs_status_updated_idx ON generation_jobs (status, updated_at DESC);
CREATE INDEX IF NOT EXISTS generation_jobs_running_timeout_idx ON generation_jobs (updated_at) WHERE status = 'running';
CREATE INDEX IF NOT EXISTS generation_jobs_created_idx ON generation_jobs (created_at DESC);
CREATE INDEX IF NOT EXISTS generation_jobs_user_created_idx ON generation_jobs (user_id, created_at DESC);
CREATE INDEX IF NOT EXISTS generation_jobs_provider_model_created_idx ON generation_jobs (type, provider, model_name, created_at DESC);
CREATE TABLE IF NOT EXISTS platform_log_settings (
id INTEGER PRIMARY KEY DEFAULT 1,
retention_days INTEGER NOT NULL DEFAULT 30,
updated_at TIMESTAMPTZ NOT NULL DEFAULT now()
);
INSERT INTO platform_log_settings (id, retention_days)
VALUES (1, 30)
ON CONFLICT (id) DO NOTHING;
CREATE TABLE IF NOT EXISTS platform_logs (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
type VARCHAR(32) NOT NULL,
level VARCHAR(16) NOT NULL DEFAULT 'info',
action VARCHAR(128) NOT NULL,
message TEXT NOT NULL,
user_id UUID,
user_name VARCHAR(255),
user_email VARCHAR(255),
target_type VARCHAR(64),
target_id VARCHAR(255),
ip_address VARCHAR(64),
user_agent TEXT,
metadata JSONB NOT NULL DEFAULT '{}'::jsonb,
created_at TIMESTAMPTZ NOT NULL DEFAULT now()
);
CREATE INDEX IF NOT EXISTS platform_logs_type_created_idx ON platform_logs (type, created_at DESC);
CREATE INDEX IF NOT EXISTS platform_logs_level_created_idx ON platform_logs (level, created_at DESC);
CREATE INDEX IF NOT EXISTS platform_logs_user_created_idx ON platform_logs (user_id, created_at DESC);
CREATE INDEX IF NOT EXISTS platform_logs_created_idx ON platform_logs (created_at DESC);
CREATE INDEX IF NOT EXISTS platform_logs_user_name_idx ON platform_logs (LOWER(COALESCE(user_name, '')));
CREATE INDEX IF NOT EXISTS platform_logs_user_email_idx ON platform_logs (LOWER(COALESCE(user_email, '')));
DROP POLICY IF EXISTS "site_config_write_auth" ON site_config;
DROP POLICY IF EXISTS "announcements_write_auth" ON announcements;
DROP POLICY IF EXISTS "site_stats_write_auth" ON site_stats;
DROP POLICY IF EXISTS "site_config_admin_write" ON site_config;
DROP POLICY IF EXISTS "announcements_admin_write" ON announcements;
CREATE POLICY "site_config_admin_write" ON site_config FOR ALL USING (
EXISTS (SELECT 1 FROM profiles WHERE id = auth.uid() AND role = 'admin')
) WITH CHECK (
EXISTS (SELECT 1 FROM profiles WHERE id = auth.uid() AND role = 'admin')
);
CREATE POLICY "announcements_admin_write" ON announcements FOR ALL USING (
EXISTS (SELECT 1 FROM profiles WHERE id = auth.uid() AND role = 'admin')
) WITH CHECK (
EXISTS (SELECT 1 FROM profiles WHERE id = auth.uid() AND role = 'admin')
);
ANALYZE;

1156
scripts/deploy-or-upgrade.sh Normal file

File diff suppressed because it is too large Load Diff

34
scripts/dev.sh Normal file
View File

@@ -0,0 +1,34 @@
#!/bin/bash
set -Eeuo pipefail
PORT=${1:-5000}
COZE_WORKSPACE_PATH="${COZE_WORKSPACE_PATH:-$(pwd)}"
DEPLOY_RUN_PORT=$PORT
cd "${COZE_WORKSPACE_PATH}"
kill_port_if_listening() {
local pids
pids=$(ss -H -lntp 2>/dev/null | awk -v port="${DEPLOY_RUN_PORT}" '$4 ~ ":"port"$"' | grep -o 'pid=[0-9]*' | cut -d= -f2 | paste -sd' ' - || true)
if [[ -z "${pids}" ]]; then
echo "Port ${DEPLOY_RUN_PORT} is free."
return
fi
echo "Port ${DEPLOY_RUN_PORT} in use by PIDs: ${pids} (SIGKILL)"
echo "${pids}" | xargs -I {} kill -9 {}
sleep 1
pids=$(ss -H -lntp 2>/dev/null | awk -v port="${DEPLOY_RUN_PORT}" '$4 ~ ":"port"$"' | grep -o 'pid=[0-9]*' | cut -d= -f2 | paste -sd' ' - || true)
if [[ -n "${pids}" ]]; then
echo "Warning: port ${DEPLOY_RUN_PORT} still busy after SIGKILL, PIDs: ${pids}"
else
echo "Port ${DEPLOY_RUN_PORT} cleared."
fi
}
echo "Clearing port ${PORT} before start."
kill_port_if_listening
echo "Starting HTTP service on port ${PORT} for dev..."
PORT=$PORT pnpm tsx watch src/server.ts

663
scripts/init-database.sql Normal file
View File

@@ -0,0 +1,663 @@
-- ============================================================
-- 妙境 AI 创作平台 — 数据库初始化脚本
-- 适用于: PostgreSQL 14+ (Supabase / 自托管)
-- 执行方式: 在 Supabase SQL Editor 或 psql 中运行
-- ============================================================
-- 0. 启用必要扩展
CREATE EXTENSION IF NOT EXISTS "uuid-ossp";
CREATE EXTENSION IF NOT EXISTS "pgcrypto";
-- 1. 创建 auth 模式和 users 表
CREATE SCHEMA IF NOT EXISTS auth;
DO $$
BEGIN
IF NOT EXISTS (
SELECT 1 FROM pg_proc p
JOIN pg_namespace n ON n.oid = p.pronamespace
WHERE n.nspname = 'auth' AND p.proname = 'uid'
) THEN
EXECUTE 'CREATE FUNCTION auth.uid() RETURNS UUID AS $fn$ SELECT NULLIF(current_setting(''request.jwt.claim.sub'', true), '''')::UUID; $fn$ LANGUAGE SQL STABLE';
END IF;
IF NOT EXISTS (
SELECT 1 FROM pg_proc p
JOIN pg_namespace n ON n.oid = p.pronamespace
WHERE n.nspname = 'auth' AND p.proname = 'role'
) THEN
EXECUTE 'CREATE FUNCTION auth.role() RETURNS TEXT AS $fn$ SELECT COALESCE(NULLIF(current_setting(''request.jwt.claim.role'', true), ''''), ''anon''); $fn$ LANGUAGE SQL STABLE';
END IF;
END $$;
CREATE TABLE IF NOT EXISTS auth.users (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
email VARCHAR(255) UNIQUE,
password_hash TEXT,
raw_user_meta_data JSONB,
created_at TIMESTAMPTZ NOT NULL DEFAULT now()
);
CREATE INDEX IF NOT EXISTS auth_users_email_idx ON auth.users (email);
-- ============================================================
-- 1. 用户资料表 (profiles)
-- 与 Supabase Auth 的 auth.users 表关联
-- ============================================================
CREATE TABLE IF NOT EXISTS profiles (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
email VARCHAR(255) NOT NULL UNIQUE,
nickname VARCHAR(128),
avatar_url TEXT,
phone VARCHAR(20),
role VARCHAR(32) NOT NULL DEFAULT 'user', -- guest, user, vip, enterprise_admin, enterprise_member, admin
membership_tier VARCHAR(32) NOT NULL DEFAULT 'free', -- free, basic, pro, enterprise
membership_expires_at TIMESTAMPTZ,
credits_balance INTEGER NOT NULL DEFAULT 0,
daily_quota_used INTEGER NOT NULL DEFAULT 0,
daily_quota_limit INTEGER NOT NULL DEFAULT 5,
is_active BOOLEAN NOT NULL DEFAULT true,
email_verified BOOLEAN NOT NULL DEFAULT false,
email_verified_at TIMESTAMPTZ,
email_bound_at TIMESTAMPTZ,
email_sender_domain VARCHAR(255),
preferred_theme VARCHAR(16) NOT NULL DEFAULT 'dark',
created_at TIMESTAMPTZ NOT NULL DEFAULT now(),
updated_at TIMESTAMPTZ
);
CREATE INDEX IF NOT EXISTS profiles_email_idx ON profiles (email);
CREATE INDEX IF NOT EXISTS profiles_role_idx ON profiles (role);
-- ============================================================
-- 2. 创作作品表 (works)
-- ============================================================
CREATE TABLE IF NOT EXISTS works (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
user_id UUID DEFAULT auth.uid(),
title VARCHAR(255),
type VARCHAR(32) NOT NULL, -- text2img, img2img, text2video, img2video
prompt TEXT,
negative_prompt TEXT,
params JSONB, -- 生成参数 (画面比例、分辨率、模型等)
result_url TEXT, -- 生成文件的 URL
thumbnail_url TEXT,
width INTEGER,
height INTEGER,
duration NUMERIC(6, 2), -- 视频时长 (秒)
is_public BOOLEAN NOT NULL DEFAULT false,
likes_count INTEGER NOT NULL DEFAULT 0,
views_count INTEGER NOT NULL DEFAULT 0,
credits_cost INTEGER NOT NULL DEFAULT 0,
status VARCHAR(32) NOT NULL DEFAULT 'completed', -- pending, processing, completed, failed
created_at TIMESTAMPTZ NOT NULL DEFAULT now(),
updated_at TIMESTAMPTZ
);
CREATE INDEX IF NOT EXISTS works_user_id_idx ON works (user_id);
CREATE INDEX IF NOT EXISTS works_type_idx ON works (type);
CREATE INDEX IF NOT EXISTS works_is_public_idx ON works (is_public);
CREATE INDEX IF NOT EXISTS works_created_at_idx ON works (created_at);
CREATE INDEX IF NOT EXISTS works_status_idx ON works (status);
-- ============================================================
-- 3. 积分记录表 (credit_transactions)
-- ============================================================
CREATE TABLE IF NOT EXISTS credit_transactions (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
user_id UUID DEFAULT auth.uid(),
amount INTEGER NOT NULL, -- 正数=入账, 负数=消费
balance_after INTEGER NOT NULL,
type VARCHAR(32) NOT NULL, -- purchase, consume, gift, reward, refund
description VARCHAR(500),
related_work_id UUID,
created_at TIMESTAMPTZ NOT NULL DEFAULT now()
);
CREATE INDEX IF NOT EXISTS credit_transactions_user_id_idx ON credit_transactions (user_id);
CREATE INDEX IF NOT EXISTS credit_transactions_type_idx ON credit_transactions (type);
CREATE INDEX IF NOT EXISTS credit_transactions_created_at_idx ON credit_transactions (created_at);
-- ============================================================
-- 4. 订单表 (orders)
-- ============================================================
CREATE TABLE IF NOT EXISTS orders (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
user_id UUID DEFAULT auth.uid(),
order_no VARCHAR(64) NOT NULL UNIQUE,
product_type VARCHAR(32) NOT NULL, -- membership, credits, api
product_name VARCHAR(255) NOT NULL,
amount NUMERIC(10, 2) NOT NULL,
credits_amount INTEGER, -- 购买的积分数
status VARCHAR(32) NOT NULL DEFAULT 'pending', -- pending, paid, cancelled, refunded
payment_method VARCHAR(32), -- wechat, alipay, stripe
paid_at TIMESTAMPTZ,
created_at TIMESTAMPTZ NOT NULL DEFAULT now(),
updated_at TIMESTAMPTZ
);
CREATE INDEX IF NOT EXISTS orders_user_id_idx ON orders (user_id);
CREATE INDEX IF NOT EXISTS orders_order_no_idx ON orders (order_no);
CREATE INDEX IF NOT EXISTS orders_status_idx ON orders (status);
CREATE INDEX IF NOT EXISTS orders_created_at_idx ON orders (created_at);
-- ============================================================
-- 5. 生成任务队列表 (generation_jobs)
-- ============================================================
CREATE TABLE IF NOT EXISTS generation_jobs (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
type VARCHAR(16) NOT NULL,
status VARCHAR(16) NOT NULL DEFAULT 'queued',
payload JSONB NOT NULL DEFAULT '{}'::jsonb,
result JSONB,
error TEXT,
user_id UUID,
provider VARCHAR(128),
model_name VARCHAR(255),
api_url TEXT,
progress JSONB NOT NULL DEFAULT '{}'::jsonb,
created_at TIMESTAMPTZ NOT NULL DEFAULT now(),
started_at TIMESTAMPTZ,
finished_at TIMESTAMPTZ,
updated_at TIMESTAMPTZ NOT NULL DEFAULT now()
);
CREATE INDEX IF NOT EXISTS generation_jobs_status_created_idx ON generation_jobs (status, created_at DESC);
CREATE INDEX IF NOT EXISTS generation_jobs_status_updated_idx ON generation_jobs (status, updated_at DESC);
CREATE INDEX IF NOT EXISTS generation_jobs_running_timeout_idx ON generation_jobs (updated_at) WHERE status = 'running';
CREATE INDEX IF NOT EXISTS generation_jobs_created_idx ON generation_jobs (created_at DESC);
CREATE INDEX IF NOT EXISTS generation_jobs_user_created_idx ON generation_jobs (user_id, created_at DESC);
CREATE INDEX IF NOT EXISTS generation_jobs_provider_model_created_idx ON generation_jobs (type, provider, model_name, created_at DESC);
-- ============================================================
-- 6. 用户自定义 API 密钥表 (user_api_keys)
-- ============================================================
CREATE TABLE IF NOT EXISTS user_api_keys (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
user_id UUID DEFAULT auth.uid(),
provider VARCHAR(64) NOT NULL, -- openai, stabilityai, runway, etc.
api_url TEXT, -- 完整 API 端点 URL
model_name VARCHAR(128), -- 具体模型名称
api_key_encrypted TEXT NOT NULL, -- 加密存储的 API Key
api_key_preview VARCHAR(20), -- Key 尾号 (如 sk-...4f3e)
supplier_name VARCHAR(128),
note TEXT NOT NULL DEFAULT '',
type VARCHAR(16) NOT NULL DEFAULT 'image',
is_active BOOLEAN NOT NULL DEFAULT true,
created_at TIMESTAMPTZ NOT NULL DEFAULT now(),
updated_at TIMESTAMPTZ
);
CREATE INDEX IF NOT EXISTS user_api_keys_user_id_idx ON user_api_keys (user_id);
CREATE INDEX IF NOT EXISTS user_api_keys_provider_idx ON user_api_keys (provider);
-- ============================================================
-- 7. 作品点赞表 (work_likes)
-- ============================================================
CREATE TABLE IF NOT EXISTS work_likes (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
user_id UUID DEFAULT auth.uid(),
work_id UUID NOT NULL REFERENCES works(id) ON DELETE CASCADE,
created_at TIMESTAMPTZ NOT NULL DEFAULT now()
);
CREATE INDEX IF NOT EXISTS work_likes_user_id_idx ON work_likes (user_id);
CREATE INDEX IF NOT EXISTS work_likes_work_id_idx ON work_likes (work_id);
-- 唯一约束:每个用户对每个作品只能点赞一次
CREATE UNIQUE INDEX IF NOT EXISTS work_likes_user_work_uniq ON work_likes (user_id, work_id);
-- ============================================================
-- 8. 网站配置表 (site_config)
-- ============================================================
CREATE TABLE IF NOT EXISTS site_config (
id INTEGER PRIMARY KEY DEFAULT 1,
site_name VARCHAR(128) NOT NULL DEFAULT '妙境',
site_tab_title VARCHAR(255) NOT NULL DEFAULT '妙境 - AI创作平台',
site_description TEXT NOT NULL DEFAULT '',
site_keywords TEXT NOT NULL DEFAULT '',
logo_url TEXT,
favicon_url TEXT,
announcement TEXT NOT NULL DEFAULT '',
membership_enabled BOOLEAN NOT NULL DEFAULT TRUE,
terms_of_service TEXT NOT NULL DEFAULT '',
privacy_policy TEXT NOT NULL DEFAULT '',
about_us TEXT NOT NULL DEFAULT '',
help_center TEXT NOT NULL DEFAULT '',
filing_info TEXT NOT NULL DEFAULT '',
filing_url TEXT NOT NULL DEFAULT '',
public_security_filing_info TEXT NOT NULL DEFAULT '',
public_security_filing_url TEXT NOT NULL DEFAULT '',
log_retention_days INTEGER NOT NULL DEFAULT 30,
created_at TIMESTAMPTZ NOT NULL DEFAULT now(),
updated_at TIMESTAMPTZ
);
-- 插入默认配置
INSERT INTO site_config (id, site_name, site_tab_title)
VALUES (1, '妙境', '妙境 - AI创作平台')
ON CONFLICT (id) DO NOTHING;
-- ============================================================
-- 9. 公告表 (announcements)
-- ============================================================
CREATE TABLE IF NOT EXISTS announcements (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
title VARCHAR(255) NOT NULL,
content TEXT NOT NULL, -- 支持 Markdown
type VARCHAR(32) NOT NULL DEFAULT 'site',
is_active BOOLEAN NOT NULL DEFAULT true,
starts_at TIMESTAMPTZ,
expires_at TIMESTAMPTZ,
created_by UUID,
created_at TIMESTAMPTZ NOT NULL DEFAULT now(),
updated_at TIMESTAMPTZ
);
CREATE INDEX IF NOT EXISTS announcements_is_active_idx ON announcements (is_active);
CREATE INDEX IF NOT EXISTS announcements_expires_at_idx ON announcements (expires_at);
-- ============================================================
-- 10. 网站统计表 (site_stats)
-- ============================================================
CREATE TABLE IF NOT EXISTS site_stats (
id INTEGER PRIMARY KEY DEFAULT 1,
total_visits BIGINT NOT NULL DEFAULT 0,
updated_at TIMESTAMPTZ NOT NULL DEFAULT now()
);
INSERT INTO site_stats (id, total_visits) VALUES (1, 0) ON CONFLICT (id) DO NOTHING;
-- ============================================================
-- 11. 平台日志
-- ============================================================
CREATE TABLE IF NOT EXISTS platform_log_settings (
id INTEGER PRIMARY KEY DEFAULT 1,
retention_days INTEGER NOT NULL DEFAULT 30,
updated_at TIMESTAMPTZ NOT NULL DEFAULT now()
);
INSERT INTO platform_log_settings (id, retention_days)
VALUES (1, 30)
ON CONFLICT (id) DO NOTHING;
CREATE TABLE IF NOT EXISTS platform_logs (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
type VARCHAR(32) NOT NULL,
level VARCHAR(16) NOT NULL DEFAULT 'info',
action VARCHAR(128) NOT NULL,
message TEXT NOT NULL,
user_id UUID,
user_name VARCHAR(255),
user_email VARCHAR(255),
target_type VARCHAR(64),
target_id VARCHAR(255),
ip_address VARCHAR(64),
user_agent TEXT,
metadata JSONB NOT NULL DEFAULT '{}'::jsonb,
created_at TIMESTAMPTZ NOT NULL DEFAULT now()
);
CREATE INDEX IF NOT EXISTS platform_logs_type_created_idx ON platform_logs (type, created_at DESC);
CREATE INDEX IF NOT EXISTS platform_logs_level_created_idx ON platform_logs (level, created_at DESC);
CREATE INDEX IF NOT EXISTS platform_logs_user_created_idx ON platform_logs (user_id, created_at DESC);
CREATE INDEX IF NOT EXISTS platform_logs_created_idx ON platform_logs (created_at DESC);
-- ============================================================
-- 12. API 供应商与推荐模型配置
-- ============================================================
CREATE TABLE IF NOT EXISTS api_providers (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
name VARCHAR(128) NOT NULL UNIQUE,
default_api_url TEXT,
default_model VARCHAR(255),
type VARCHAR(16) NOT NULL DEFAULT 'image',
website TEXT,
is_active BOOLEAN NOT NULL DEFAULT true,
sort_order INTEGER NOT NULL DEFAULT 0,
created_at TIMESTAMPTZ NOT NULL DEFAULT now(),
updated_at TIMESTAMPTZ
);
CREATE TABLE IF NOT EXISTS model_recommendations (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
model_name VARCHAR(255) NOT NULL,
display_name VARCHAR(255),
type VARCHAR(16) NOT NULL DEFAULT 'image',
provider_id UUID REFERENCES api_providers(id) ON DELETE SET NULL,
is_active BOOLEAN NOT NULL DEFAULT true,
sort_order INTEGER NOT NULL DEFAULT 0,
created_at TIMESTAMPTZ NOT NULL DEFAULT now(),
updated_at TIMESTAMPTZ
);
CREATE INDEX IF NOT EXISTS api_providers_active_sort_idx ON api_providers (is_active, sort_order);
CREATE INDEX IF NOT EXISTS model_recommendations_active_type_sort_idx ON model_recommendations (is_active, type, sort_order);
CREATE INDEX IF NOT EXISTS model_recommendations_provider_idx ON model_recommendations (provider_id);
INSERT INTO api_providers (name, default_api_url, default_model, type, website, is_active, sort_order)
VALUES
('硅基流动', 'https://api.siliconflow.cn/v1/images/generations', 'black-forest-labs/FLUX.1-schnell', 'image', 'https://cloud.siliconflow.cn', true, 10),
('mozheAPI', 'https://openai.mozhevip.top', '', 'image', 'https://openai.mozhevip.top', true, 20),
('OpenAI', 'https://api.openai.com/v1/images/generations', 'dall-e-3', 'image', NULL, true, 30),
('Stability AI', 'https://api.stability.ai/v1/generation/stable-diffusion-xl/text-to-image', 'stable-diffusion-xl', 'image', NULL, true, 40),
('Midjourney', '', 'midjourney-v6', 'image', NULL, true, 50),
('Runway', 'https://api.runwayml.com/v1/image_to_video', 'gen-3-alpha', 'video', NULL, true, 60),
('Pika', '', 'pika-1.0', 'video', NULL, true, 70),
('Kling', '', 'kling-v1', 'video', NULL, true, 80),
('DeepSeek', 'https://api.deepseek.com/v1/chat/completions', 'deepseek-chat', 'text', NULL, true, 90),
('OpenAI GPT', 'https://api.openai.com/v1/chat/completions', 'gpt-4o', 'text', NULL, true, 100),
('自定义', '', '', 'image', NULL, true, 999)
ON CONFLICT (name) DO NOTHING;
INSERT INTO model_recommendations (model_name, display_name, type, provider_id, is_active, sort_order)
SELECT 'gpt-image-2', 'gpt-image-2', 'image', NULL, true, 10
WHERE NOT EXISTS (
SELECT 1 FROM model_recommendations
WHERE model_name = 'gpt-image-2' AND type = 'image' AND provider_id IS NULL
);
-- ============================================================
-- 兼容旧版本库结构的幂等补丁
-- ============================================================
ALTER TABLE profiles
ADD COLUMN IF NOT EXISTS email_verified BOOLEAN NOT NULL DEFAULT false,
ADD COLUMN IF NOT EXISTS email_verified_at TIMESTAMPTZ,
ADD COLUMN IF NOT EXISTS email_bound_at TIMESTAMPTZ,
ADD COLUMN IF NOT EXISTS email_sender_domain VARCHAR(255),
ADD COLUMN IF NOT EXISTS preferred_theme VARCHAR(16) NOT NULL DEFAULT 'dark';
UPDATE profiles
SET preferred_theme = 'dark'
WHERE preferred_theme IS NULL
OR preferred_theme NOT IN ('dark', 'light');
ALTER TABLE works
ADD COLUMN IF NOT EXISTS views_count INTEGER NOT NULL DEFAULT 0,
ADD COLUMN IF NOT EXISTS updated_at TIMESTAMPTZ;
ALTER TABLE user_api_keys
ADD COLUMN IF NOT EXISTS supplier_name VARCHAR(128),
ADD COLUMN IF NOT EXISTS note TEXT NOT NULL DEFAULT '',
ADD COLUMN IF NOT EXISTS type VARCHAR(16) NOT NULL DEFAULT 'image';
ALTER TABLE site_config
ADD COLUMN IF NOT EXISTS site_description TEXT NOT NULL DEFAULT '',
ADD COLUMN IF NOT EXISTS site_keywords TEXT NOT NULL DEFAULT '',
ADD COLUMN IF NOT EXISTS announcement TEXT NOT NULL DEFAULT '',
ADD COLUMN IF NOT EXISTS membership_enabled BOOLEAN NOT NULL DEFAULT TRUE,
ADD COLUMN IF NOT EXISTS terms_of_service TEXT NOT NULL DEFAULT '',
ADD COLUMN IF NOT EXISTS privacy_policy TEXT NOT NULL DEFAULT '',
ADD COLUMN IF NOT EXISTS about_us TEXT NOT NULL DEFAULT '',
ADD COLUMN IF NOT EXISTS help_center TEXT NOT NULL DEFAULT '',
ADD COLUMN IF NOT EXISTS filing_info TEXT NOT NULL DEFAULT '',
ADD COLUMN IF NOT EXISTS filing_url TEXT NOT NULL DEFAULT '',
ADD COLUMN IF NOT EXISTS public_security_filing_info TEXT NOT NULL DEFAULT '',
ADD COLUMN IF NOT EXISTS public_security_filing_url TEXT NOT NULL DEFAULT '',
ADD COLUMN IF NOT EXISTS log_retention_days INTEGER NOT NULL DEFAULT 30;
ALTER TABLE generation_jobs
ADD COLUMN IF NOT EXISTS user_id UUID,
ADD COLUMN IF NOT EXISTS provider VARCHAR(128),
ADD COLUMN IF NOT EXISTS model_name VARCHAR(255),
ADD COLUMN IF NOT EXISTS api_url TEXT,
ADD COLUMN IF NOT EXISTS progress JSONB NOT NULL DEFAULT '{}'::jsonb;
CREATE INDEX IF NOT EXISTS generation_jobs_user_created_idx ON generation_jobs (user_id, created_at DESC);
CREATE INDEX IF NOT EXISTS generation_jobs_provider_model_created_idx ON generation_jobs (type, provider, model_name, created_at DESC);
CREATE TABLE IF NOT EXISTS system_api_configs (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
provider VARCHAR(128),
name VARCHAR(255) NOT NULL,
api_url TEXT NOT NULL DEFAULT '',
model_name VARCHAR(255) NOT NULL,
note TEXT NOT NULL DEFAULT '',
api_key_encrypted TEXT NOT NULL DEFAULT '',
api_key_preview VARCHAR(64) NOT NULL DEFAULT '',
type VARCHAR(16) NOT NULL DEFAULT 'image',
credits_per_use INTEGER NOT NULL DEFAULT 10,
is_active BOOLEAN NOT NULL DEFAULT true,
sort_order INTEGER NOT NULL DEFAULT 0,
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
updated_at TIMESTAMPTZ
);
CREATE INDEX IF NOT EXISTS system_api_configs_active_type_sort_idx ON system_api_configs (is_active, type, sort_order);
CREATE TABLE IF NOT EXISTS payment_methods (
id VARCHAR(64) PRIMARY KEY,
type VARCHAR(32) NOT NULL,
name VARCHAR(128) NOT NULL,
is_active BOOLEAN NOT NULL DEFAULT FALSE,
public_config JSONB NOT NULL DEFAULT '{}'::jsonb,
secret_config_encrypted JSONB NOT NULL DEFAULT '{}'::jsonb,
secret_config_preview JSONB NOT NULL DEFAULT '{}'::jsonb,
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
updated_at TIMESTAMPTZ
);
INSERT INTO payment_methods (id, type, name, is_active) VALUES
('pm-alipay', 'alipay', '支付宝', true),
('pm-wechat', 'wechat', '微信支付', false),
('pm-manual', 'manual', '手动转账', false),
('pm-stripe', 'stripe', 'Stripe', false)
ON CONFLICT (id) DO NOTHING;
CREATE INDEX IF NOT EXISTS platform_logs_user_name_idx ON platform_logs (LOWER(COALESCE(user_name, '')));
CREATE INDEX IF NOT EXISTS platform_logs_user_email_idx ON platform_logs (LOWER(COALESCE(user_email, '')));
ALTER TABLE announcements
ADD COLUMN IF NOT EXISTS type VARCHAR(32) NOT NULL DEFAULT 'site';
ALTER TABLE platform_log_settings ENABLE ROW LEVEL SECURITY;
ALTER TABLE platform_logs ENABLE ROW LEVEL SECURITY;
ALTER TABLE api_providers ENABLE ROW LEVEL SECURITY;
ALTER TABLE model_recommendations ENABLE ROW LEVEL SECURITY;
ALTER TABLE system_api_configs ENABLE ROW LEVEL SECURITY;
ALTER TABLE payment_methods ENABLE ROW LEVEL SECURITY;
-- ============================================================
-- Row Level Security (RLS) 策略
-- ============================================================
-- 启用所有表的 RLS
ALTER TABLE profiles ENABLE ROW LEVEL SECURITY;
ALTER TABLE works ENABLE ROW LEVEL SECURITY;
ALTER TABLE credit_transactions ENABLE ROW LEVEL SECURITY;
ALTER TABLE orders ENABLE ROW LEVEL SECURITY;
ALTER TABLE user_api_keys ENABLE ROW LEVEL SECURITY;
ALTER TABLE work_likes ENABLE ROW LEVEL SECURITY;
ALTER TABLE site_config ENABLE ROW LEVEL SECURITY;
ALTER TABLE announcements ENABLE ROW LEVEL SECURITY;
ALTER TABLE site_stats ENABLE ROW LEVEL SECURITY;
DROP POLICY IF EXISTS "profiles_read_own" ON profiles;
DROP POLICY IF EXISTS "profiles_update_own" ON profiles;
DROP POLICY IF EXISTS "profiles_admin_all" ON profiles;
DROP POLICY IF EXISTS "works_read_public" ON works;
DROP POLICY IF EXISTS "works_insert_own" ON works;
DROP POLICY IF EXISTS "works_update_own" ON works;
DROP POLICY IF EXISTS "works_delete_own" ON works;
DROP POLICY IF EXISTS "works_admin_all" ON works;
DROP POLICY IF EXISTS "credit_transactions_read_own" ON credit_transactions;
DROP POLICY IF EXISTS "credit_transactions_admin_all" ON credit_transactions;
DROP POLICY IF EXISTS "orders_read_own" ON orders;
DROP POLICY IF EXISTS "orders_insert_own" ON orders;
DROP POLICY IF EXISTS "orders_admin_all" ON orders;
DROP POLICY IF EXISTS "user_api_keys_read_own" ON user_api_keys;
DROP POLICY IF EXISTS "user_api_keys_insert_own" ON user_api_keys;
DROP POLICY IF EXISTS "user_api_keys_update_own" ON user_api_keys;
DROP POLICY IF EXISTS "user_api_keys_delete_own" ON user_api_keys;
DROP POLICY IF EXISTS "work_likes_read_all" ON work_likes;
DROP POLICY IF EXISTS "work_likes_insert_own" ON work_likes;
DROP POLICY IF EXISTS "work_likes_delete_own" ON work_likes;
DROP POLICY IF EXISTS "site_config_read_all" ON site_config;
DROP POLICY IF EXISTS "site_config_write_auth" ON site_config;
DROP POLICY IF EXISTS "site_config_admin_write" ON site_config;
DROP POLICY IF EXISTS "announcements_read_all" ON announcements;
DROP POLICY IF EXISTS "announcements_write_auth" ON announcements;
DROP POLICY IF EXISTS "announcements_admin_write" ON announcements;
DROP POLICY IF EXISTS "site_stats_read_all" ON site_stats;
DROP POLICY IF EXISTS "site_stats_write_auth" ON site_stats;
-- profiles: 用户可读自己的资料,管理员可读写所有
CREATE POLICY "profiles_read_own" ON profiles FOR SELECT USING (auth.uid() = id);
CREATE POLICY "profiles_update_own" ON profiles FOR UPDATE USING (auth.uid() = id);
CREATE POLICY "profiles_admin_all" ON profiles FOR ALL USING (
EXISTS (SELECT 1 FROM profiles WHERE id = auth.uid() AND role = 'admin')
) WITH CHECK (
EXISTS (SELECT 1 FROM profiles WHERE id = auth.uid() AND role = 'admin')
);
-- works: 用户可管理自己的作品,公开作品所有人可读
CREATE POLICY "works_read_public" ON works FOR SELECT USING (is_public = true OR auth.uid() = user_id);
CREATE POLICY "works_insert_own" ON works FOR INSERT WITH CHECK (auth.uid() = user_id);
CREATE POLICY "works_update_own" ON works FOR UPDATE USING (auth.uid() = user_id);
CREATE POLICY "works_delete_own" ON works FOR DELETE USING (auth.uid() = user_id);
CREATE POLICY "works_admin_all" ON works FOR ALL USING (
EXISTS (SELECT 1 FROM profiles WHERE id = auth.uid() AND role = 'admin')
) WITH CHECK (
EXISTS (SELECT 1 FROM profiles WHERE id = auth.uid() AND role = 'admin')
);
-- credit_transactions: 用户可读自己的记录
CREATE POLICY "credit_transactions_read_own" ON credit_transactions FOR SELECT USING (auth.uid() = user_id);
CREATE POLICY "credit_transactions_admin_all" ON credit_transactions FOR ALL USING (
EXISTS (SELECT 1 FROM profiles WHERE id = auth.uid() AND role = 'admin')
) WITH CHECK (
EXISTS (SELECT 1 FROM profiles WHERE id = auth.uid() AND role = 'admin')
);
-- orders: 用户可读自己的订单
CREATE POLICY "orders_read_own" ON orders FOR SELECT USING (auth.uid() = user_id);
CREATE POLICY "orders_insert_own" ON orders FOR INSERT WITH CHECK (auth.uid() = user_id);
CREATE POLICY "orders_admin_all" ON orders FOR ALL USING (
EXISTS (SELECT 1 FROM profiles WHERE id = auth.uid() AND role = 'admin')
) WITH CHECK (
EXISTS (SELECT 1 FROM profiles WHERE id = auth.uid() AND role = 'admin')
);
-- user_api_keys: 用户可管理自己的密钥
CREATE POLICY "user_api_keys_read_own" ON user_api_keys FOR SELECT USING (auth.uid() = user_id);
CREATE POLICY "user_api_keys_insert_own" ON user_api_keys FOR INSERT WITH CHECK (auth.uid() = user_id);
CREATE POLICY "user_api_keys_update_own" ON user_api_keys FOR UPDATE USING (auth.uid() = user_id);
CREATE POLICY "user_api_keys_delete_own" ON user_api_keys FOR DELETE USING (auth.uid() = user_id);
-- work_likes: 认证用户可点赞,所有人可读
CREATE POLICY "work_likes_read_all" ON work_likes FOR SELECT USING (true);
CREATE POLICY "work_likes_insert_own" ON work_likes FOR INSERT WITH CHECK (auth.uid() = user_id);
CREATE POLICY "work_likes_delete_own" ON work_likes FOR DELETE USING (auth.uid() = user_id);
-- site_config: 所有人可读,认证用户可写 (管理员操作通过 service role key)
CREATE POLICY "site_config_read_all" ON site_config FOR SELECT USING (true);
CREATE POLICY "site_config_admin_write" ON site_config FOR ALL USING (
EXISTS (SELECT 1 FROM profiles WHERE id = auth.uid() AND role = 'admin')
) WITH CHECK (
EXISTS (SELECT 1 FROM profiles WHERE id = auth.uid() AND role = 'admin')
);
-- announcements: 所有人可读,认证用户可写 (管理员操作)
CREATE POLICY "announcements_read_all" ON announcements FOR SELECT USING (true);
CREATE POLICY "announcements_admin_write" ON announcements FOR ALL USING (
EXISTS (SELECT 1 FROM profiles WHERE id = auth.uid() AND role = 'admin')
) WITH CHECK (
EXISTS (SELECT 1 FROM profiles WHERE id = auth.uid() AND role = 'admin')
);
-- site_stats: 公开读,访问量递增走 SECURITY DEFINER 函数
CREATE POLICY "site_stats_read_all" ON site_stats FOR SELECT USING (true);
-- ============================================================
-- Supabase Storage 桶 (通过 Supabase Dashboard 或 API 创建)
-- ============================================================
-- 需要在 Supabase Dashboard 中手动创建以下 Storage 桶:
-- 1. site-assets (公开读) — 存放网站 Logo、Favicon
-- 2. works (私有) — 存放用户生成的图片/视频文件
--
-- 或者通过 SQL (需要 service_role 权限):
-- INSERT INTO storage.buckets (id, name, public) VALUES ('site-assets', 'site-assets', true) ON CONFLICT DO NOTHING;
-- INSERT INTO storage.buckets (id, name, public) VALUES ('works', 'works', false) ON CONFLICT DO NOTHING;
-- ============================================================
-- 触发器: 自动更新 updated_at 字段
-- ============================================================
CREATE OR REPLACE FUNCTION update_updated_at()
RETURNS TRIGGER AS $$
BEGIN
NEW.updated_at = now();
RETURN NEW;
END;
$$ LANGUAGE plpgsql;
DROP TRIGGER IF EXISTS profiles_updated_at ON profiles;
DROP TRIGGER IF EXISTS works_updated_at ON works;
DROP TRIGGER IF EXISTS orders_updated_at ON orders;
DROP TRIGGER IF EXISTS user_api_keys_updated_at ON user_api_keys;
DROP TRIGGER IF EXISTS site_config_updated_at ON site_config;
DROP TRIGGER IF EXISTS announcements_updated_at ON announcements;
CREATE TRIGGER profiles_updated_at BEFORE UPDATE ON profiles FOR EACH ROW EXECUTE FUNCTION update_updated_at();
CREATE TRIGGER works_updated_at BEFORE UPDATE ON works FOR EACH ROW EXECUTE FUNCTION update_updated_at();
CREATE TRIGGER orders_updated_at BEFORE UPDATE ON orders FOR EACH ROW EXECUTE FUNCTION update_updated_at();
CREATE TRIGGER user_api_keys_updated_at BEFORE UPDATE ON user_api_keys FOR EACH ROW EXECUTE FUNCTION update_updated_at();
CREATE TRIGGER site_config_updated_at BEFORE UPDATE ON site_config FOR EACH ROW EXECUTE FUNCTION update_updated_at();
CREATE TRIGGER announcements_updated_at BEFORE UPDATE ON announcements FOR EACH ROW EXECUTE FUNCTION update_updated_at();
-- ============================================================
-- 触发器: 新用户注册时自动创建 profile
-- (仅在使用 Supabase Auth 时生效)
-- ============================================================
CREATE OR REPLACE FUNCTION handle_new_user()
RETURNS TRIGGER AS $$
BEGIN
INSERT INTO profiles (id, email, nickname, role, membership_tier, credits_balance, daily_quota_limit)
VALUES (
NEW.id,
NEW.email,
COALESCE(NEW.raw_user_meta_data->>'nickname', split_part(NEW.email, '@', 1)),
'user',
'free',
10, -- 新用户赠送 10 积分
5 -- 每日配额 5 次
)
ON CONFLICT (id) DO NOTHING;
-- 记录注册赠送积分
INSERT INTO credit_transactions (user_id, amount, balance_after, type, description)
SELECT NEW.id, 10, 10, 'gift', '新用户注册奖励'
WHERE NOT EXISTS (
SELECT 1 FROM credit_transactions
WHERE user_id = NEW.id AND type = 'gift' AND description = '新用户注册奖励'
);
RETURN NEW;
END;
$$ LANGUAGE plpgsql SECURITY DEFINER;
DROP TRIGGER IF EXISTS on_auth_user_created ON auth.users;
CREATE TRIGGER on_auth_user_created
AFTER INSERT ON auth.users
FOR EACH ROW EXECUTE FUNCTION handle_new_user();
-- ============================================================
-- 初始化管理员账户 (可选)
-- 请在注册管理员后,手动执行以下 SQL 将角色设为 admin:
-- UPDATE profiles SET role = 'admin' WHERE email = 'your-admin@example.com';
-- ============================================================
-- ============================================================
-- 原子递增访问量的 SQL 函数
-- ============================================================
CREATE OR REPLACE FUNCTION increment_visits()
RETURNS BIGINT AS $$
DECLARE
new_count BIGINT;
BEGIN
UPDATE site_stats SET total_visits = total_visits + 1, updated_at = now() WHERE id = 1
RETURNING total_visits INTO new_count;
RETURN new_count;
END;
$$ LANGUAGE plpgsql SECURITY DEFINER;
-- 完成
SELECT 'Database initialization completed successfully!' AS status;

12
scripts/prepare.sh Normal file
View File

@@ -0,0 +1,12 @@
#!/bin/bash
set -Eeuo pipefail
COZE_WORKSPACE_PATH="${COZE_WORKSPACE_PATH:-$(pwd)}"
cd "${COZE_WORKSPACE_PATH}"
echo "Installing dependencies..."
pnpm install --prefer-frozen-lockfile --prefer-offline --loglevel debug --reporter=append-only
if command -v coze > /dev/null 2>&1 && coze check-bins --help > /dev/null 2>&1; then
coze check-bins --fix
fi

46
scripts/start.sh Normal file
View File

@@ -0,0 +1,46 @@
#!/bin/bash
set -Eeuo pipefail
COZE_WORKSPACE_PATH="${COZE_WORKSPACE_PATH:-$(pwd)}"
# Load environment variables from .env.local if it exists. PM2 role-specific
# values are restored afterwards so backend/console services keep their ports.
PM2_DEPLOY_RUN_PORT="${DEPLOY_RUN_PORT:-}"
PM2_APP_RUNTIME_ROLE="${APP_RUNTIME_ROLE:-}"
PM2_BACKEND_INTERNAL_URL="${BACKEND_INTERNAL_URL:-}"
PM2_CONSOLE_INTERNAL_URL="${CONSOLE_INTERNAL_URL:-}"
if [ -f "${COZE_WORKSPACE_PATH}/.env.local" ]; then
set +u
set -a
# shellcheck disable=SC1091
source "${COZE_WORKSPACE_PATH}/.env.local"
set +a
set -u
fi
[ -n "${PM2_DEPLOY_RUN_PORT}" ] && DEPLOY_RUN_PORT="${PM2_DEPLOY_RUN_PORT}"
[ -n "${PM2_APP_RUNTIME_ROLE}" ] && APP_RUNTIME_ROLE="${PM2_APP_RUNTIME_ROLE}"
[ -n "${PM2_BACKEND_INTERNAL_URL}" ] && BACKEND_INTERNAL_URL="${PM2_BACKEND_INTERNAL_URL}"
[ -n "${PM2_CONSOLE_INTERNAL_URL}" ] && CONSOLE_INTERNAL_URL="${PM2_CONSOLE_INTERNAL_URL}"
if [ -n "${DEPLOY_NODE_BIN_DIR:-}" ] && [ -d "${DEPLOY_NODE_BIN_DIR}" ]; then
export PATH="${DEPLOY_NODE_BIN_DIR}:${PATH}"
fi
PORT=${1:-5000}
DEPLOY_RUN_PORT="${DEPLOY_RUN_PORT:-$PORT}"
APP_RUNTIME_ROLE="${APP_RUNTIME_ROLE:-full}"
start_service() {
cd "${COZE_WORKSPACE_PATH}"
echo "Starting ${APP_RUNTIME_ROLE} HTTP service on port ${DEPLOY_RUN_PORT} for deploy..."
echo "COZE_PROJECT_ENV: ${COZE_PROJECT_ENV}"
export NODE_ENV="${NODE_ENV:-production}"
export APP_RUNTIME_ROLE
PORT=${DEPLOY_RUN_PORT} node dist/server.js
}
echo "Starting ${APP_RUNTIME_ROLE} HTTP service on port ${DEPLOY_RUN_PORT} for deploy..."
start_service

5
src/app/about/page.tsx Normal file
View File

@@ -0,0 +1,5 @@
import { SitePolicyPage } from '@/components/site-policy-page';
export default function AboutPage() {
return <SitePolicyPage kind="about" />;
}

5
src/app/admin/page.tsx Normal file
View File

@@ -0,0 +1,5 @@
import { redirect } from 'next/navigation';
export default function AdminRedirectPage() {
redirect('/console');
}

View File

@@ -0,0 +1,84 @@
import { NextRequest, NextResponse } from 'next/server';
import { getDbClient } from '@/storage/database/local-db';
import { requireAdmin } from '@/lib/admin-auth';
const DEFAULT_ADMIN_EMAIL = 'admin@example.com';
export async function POST(request: NextRequest) {
const authError = await requireAdmin(request);
if (authError) return authError;
try {
if (process.env.ENABLE_DANGER_ADMIN_CLEAR_USERS !== 'true') {
return NextResponse.json(
{ error: '生产环境已默认禁用清空用户数据功能。如确需执行,请临时设置 ENABLE_DANGER_ADMIN_CLEAR_USERS=true 并完成备份后再操作。' },
{ status: 403 },
);
}
const body = await request.json();
const { password } = body;
const adminPassword = process.env.ADMIN_DEFAULT_PASSWORD || 'admin123';
if (password !== adminPassword) {
return NextResponse.json({ error: '管理员密码错误' }, { status: 401 });
}
const client = await getDbClient();
try {
await client.query('BEGIN');
const adminResult = await client.query(
`SELECT id, email, nickname FROM profiles
WHERE role = 'admin' AND is_active = true
ORDER BY CASE WHEN email = $1 THEN 0 ELSE 1 END, created_at ASC
LIMIT 1`,
[DEFAULT_ADMIN_EMAIL],
);
if (adminResult.rows.length === 0) {
await client.query('ROLLBACK');
return NextResponse.json({ error: '未找到可保留的系统管理员账号,已拒绝清理' }, { status: 409 });
}
const admin = adminResult.rows[0];
await client.query('DELETE FROM credit_transactions WHERE user_id <> $1', [admin.id]);
await client.query('DELETE FROM work_likes WHERE user_id <> $1', [admin.id]);
await client.query('DELETE FROM works WHERE user_id <> $1', [admin.id]);
await client.query('DELETE FROM user_api_keys WHERE user_id <> $1', [admin.id]);
await client.query('DELETE FROM orders WHERE user_id IS NOT NULL AND user_id <> $1', [admin.id]);
await client.query('DELETE FROM profiles WHERE id <> $1', [admin.id]);
await client.query('DELETE FROM auth.users WHERE id <> $1', [admin.id]);
await client.query(
`UPDATE profiles
SET email = $2,
nickname = COALESCE(NULLIF(nickname, ''), $3),
role = 'admin',
membership_tier = 'enterprise',
credits_balance = GREATEST(COALESCE(credits_balance, 0), 9999),
daily_quota_limit = GREATEST(COALESCE(daily_quota_limit, 0), 999),
daily_quota_used = 0,
is_active = true,
updated_at = NOW()
WHERE id = $1`,
[admin.id, admin.email || DEFAULT_ADMIN_EMAIL, admin.nickname || '管理员'],
);
await client.query('COMMIT');
return NextResponse.json({ success: true, message: '所有非系统管理员用户数据已清除,系统管理员已保留' });
} catch (error) {
await client.query('ROLLBACK');
throw error;
} finally {
client.release();
}
} catch (error: unknown) {
const message = error instanceof Error ? error.message : '清除用户数据失败';
console.error('[Clear Users Error]', message);
return NextResponse.json({ error: message }, { status: 500 });
}
}

View File

@@ -0,0 +1,285 @@
import { NextRequest, NextResponse } from 'next/server';
import type { PoolClient, QueryResult } from 'pg';
import { requireAdmin } from '@/lib/admin-auth';
import { getDbClient } from '@/storage/database/local-db';
type DbRow = Record<string, unknown>;
async function safeQuery(client: PoolClient, label: string, sql: string, params: unknown[] = []): Promise<QueryResult<DbRow>> {
try {
return await client.query(sql, params);
} catch (error) {
console.error(`[admin/dashboard] ${label} failed:`, error);
return { rows: [], rowCount: 0, command: 'SELECT', oid: 0, fields: [] };
}
}
function numberValue(value: unknown): number {
const parsed = Number(value ?? 0);
return Number.isFinite(parsed) ? parsed : 0;
}
function firstRow(result: QueryResult<DbRow>): DbRow {
return result.rows[0] || {};
}
function statusCount(rows: DbRow[], status: string): number {
const row = rows.find(item => item.status === status);
return numberValue(row?.count);
}
export async function GET(request: NextRequest) {
const authError = await requireAdmin(request);
if (authError) return authError;
const client = await getDbClient();
try {
const [
platformResult,
userResult,
workResult,
taskStatusResult,
latestTaskResult,
orderStatusResult,
orderRevenueResult,
latestOrderResult,
storageResult,
logResult,
providerResult,
recommendationResult,
userApiKeyResult,
announcementResult,
] = await Promise.all([
safeQuery(client, 'platform summary', `
SELECT
COALESCE((SELECT total_visits FROM site_stats WHERE id = 1 LIMIT 1), 0)::bigint AS total_visits,
NOW() AS database_time
`),
safeQuery(client, 'user summary', `
SELECT
COUNT(*)::int AS total,
COUNT(*) FILTER (WHERE COALESCE(is_active, true) = true)::int AS active,
COUNT(*) FILTER (WHERE COALESCE(is_active, true) = false)::int AS disabled,
COUNT(*) FILTER (WHERE COALESCE(role, 'user') IN ('admin', 'enterprise_admin'))::int AS admins,
COUNT(*) FILTER (
WHERE COALESCE(role, 'user') = 'vip'
OR COALESCE(membership_tier, 'free') NOT IN ('free', '')
)::int AS members,
COUNT(*) FILTER (WHERE created_at >= NOW() - INTERVAL '7 days')::int AS created_7d
FROM profiles
`),
safeQuery(client, 'work summary', `
SELECT
COUNT(*)::int AS total,
COUNT(*) FILTER (WHERE is_public = true)::int AS public,
COUNT(*) FILTER (WHERE is_public = false)::int AS private,
COUNT(*) FILTER (WHERE status = 'completed')::int AS completed,
COUNT(*) FILTER (WHERE status = 'failed')::int AS failed,
COUNT(*) FILTER (WHERE result_url IS NOT NULL AND result_url <> '')::int AS with_result_url,
COUNT(*) FILTER (WHERE created_at >= NOW() - INTERVAL '7 days')::int AS created_7d,
COUNT(*) FILTER (WHERE type = 'text2img')::int AS text2img,
COUNT(*) FILTER (WHERE type = 'img2img')::int AS img2img,
COUNT(*) FILTER (WHERE type = 'text2video')::int AS text2video,
COUNT(*) FILTER (WHERE type = 'img2video')::int AS img2video
FROM works
`),
safeQuery(client, 'task status summary', `
SELECT status, COUNT(*)::int AS count
FROM generation_jobs
GROUP BY status
`),
safeQuery(client, 'latest tasks', `
SELECT id, type, status, error, created_at, updated_at
FROM generation_jobs
ORDER BY created_at DESC
LIMIT 6
`),
safeQuery(client, 'order status summary', `
SELECT status, COUNT(*)::int AS count
FROM orders
GROUP BY status
`),
safeQuery(client, 'order revenue summary', `
SELECT
COALESCE(SUM(amount) FILTER (WHERE status = 'paid'), 0)::numeric AS paid_revenue,
COALESCE(SUM(amount) FILTER (
WHERE status = 'paid' AND COALESCE(paid_at, created_at) >= NOW() - INTERVAL '7 days'
), 0)::numeric AS paid_revenue_7d
FROM orders
`),
safeQuery(client, 'latest orders', `
SELECT id, order_no, product_name, amount, status, created_at
FROM orders
ORDER BY created_at DESC
LIMIT 6
`),
safeQuery(client, 'storage health', `
SELECT
COUNT(*)::int AS total,
COUNT(*) FILTER (WHERE result_url IS NOT NULL AND result_url <> '')::int AS persisted
FROM works
`),
safeQuery(client, 'log health', `
SELECT
COUNT(*)::int AS total,
COUNT(*) FILTER (WHERE level = 'error')::int AS errors,
COUNT(*) FILTER (WHERE created_at >= NOW() - INTERVAL '24 hours')::int AS created_24h
FROM platform_logs
`),
safeQuery(client, 'provider summary', `
SELECT
COUNT(*)::int AS total,
COUNT(*) FILTER (WHERE is_active = true)::int AS active,
COUNT(*) FILTER (WHERE is_active = false)::int AS inactive,
COUNT(*) FILTER (WHERE type = 'image')::int AS image,
COUNT(*) FILTER (WHERE type = 'video')::int AS video,
COUNT(*) FILTER (WHERE type = 'text')::int AS text,
COUNT(*) FILTER (
WHERE is_active = true
AND (COALESCE(default_api_url, '') = '' OR COALESCE(default_model, '') = '')
)::int AS incomplete
FROM api_providers
`),
safeQuery(client, 'model recommendation summary', `
SELECT
COUNT(*)::int AS total,
COUNT(*) FILTER (WHERE is_active = true)::int AS active
FROM model_recommendations
`),
safeQuery(client, 'user api key summary', `
SELECT
COUNT(*)::int AS total,
COUNT(*) FILTER (WHERE is_active = true)::int AS active
FROM user_api_keys
`),
safeQuery(client, 'announcement summary', `
SELECT
COUNT(*)::int AS total,
COUNT(*) FILTER (
WHERE is_active = true
AND (starts_at IS NULL OR starts_at <= NOW())
AND (expires_at IS NULL OR expires_at >= NOW())
)::int AS active,
COUNT(*) FILTER (WHERE is_active = true AND starts_at > NOW())::int AS scheduled,
COUNT(*) FILTER (WHERE expires_at < NOW())::int AS expired
FROM announcements
`),
]);
const platform = firstRow(platformResult);
const users = firstRow(userResult);
const works = firstRow(workResult);
const orderRevenue = firstRow(orderRevenueResult);
const storage = firstRow(storageResult);
const logs = firstRow(logResult);
const providers = firstRow(providerResult);
const recommendations = firstRow(recommendationResult);
const userApiKeys = firstRow(userApiKeyResult);
const announcements = firstRow(announcementResult);
const taskRows = taskStatusResult.rows;
const orderRows = orderStatusResult.rows;
const totalTasks = taskRows.reduce((sum, row) => sum + numberValue(row.count), 0);
const totalOrders = orderRows.reduce((sum, row) => sum + numberValue(row.count), 0);
const totalWorks = numberValue(works.total);
return NextResponse.json({
generatedAt: new Date().toISOString(),
platform: {
totalVisits: numberValue(platform.total_visits),
databaseTime: platform.database_time || null,
},
users: {
total: numberValue(users.total),
active: numberValue(users.active),
disabled: numberValue(users.disabled),
admins: numberValue(users.admins),
members: numberValue(users.members),
created7d: numberValue(users.created_7d),
},
works: {
total: totalWorks,
public: numberValue(works.public),
private: numberValue(works.private),
completed: numberValue(works.completed),
failed: numberValue(works.failed),
withResultUrl: numberValue(works.with_result_url),
created7d: numberValue(works.created_7d),
resultUrlCoverage: totalWorks > 0 ? numberValue(works.with_result_url) / totalWorks : 1,
byType: {
text2img: numberValue(works.text2img),
img2img: numberValue(works.img2img),
text2video: numberValue(works.text2video),
img2video: numberValue(works.img2video),
},
},
tasks: {
total: totalTasks,
queued: statusCount(taskRows, 'queued'),
running: statusCount(taskRows, 'running'),
succeeded: statusCount(taskRows, 'succeeded'),
failed: statusCount(taskRows, 'failed'),
latest: latestTaskResult.rows.map(row => ({
id: String(row.id || ''),
type: String(row.type || ''),
status: String(row.status || ''),
error: row.error ? String(row.error) : null,
createdAt: row.created_at || null,
updatedAt: row.updated_at || null,
})),
},
orders: {
total: totalOrders,
pending: statusCount(orderRows, 'pending'),
paid: statusCount(orderRows, 'paid'),
cancelled: statusCount(orderRows, 'cancelled'),
refunded: statusCount(orderRows, 'refunded'),
paidRevenue: numberValue(orderRevenue.paid_revenue),
paidRevenue7d: numberValue(orderRevenue.paid_revenue_7d),
latest: latestOrderResult.rows.map(row => ({
id: String(row.id || ''),
orderNo: String(row.order_no || ''),
productName: String(row.product_name || ''),
amount: numberValue(row.amount),
status: String(row.status || ''),
createdAt: row.created_at || null,
})),
},
providers: {
total: numberValue(providers.total),
active: numberValue(providers.active),
inactive: numberValue(providers.inactive),
image: numberValue(providers.image),
video: numberValue(providers.video),
text: numberValue(providers.text),
incomplete: numberValue(providers.incomplete),
recommendationsTotal: numberValue(recommendations.total),
recommendationsActive: numberValue(recommendations.active),
userApiKeysTotal: numberValue(userApiKeys.total),
userApiKeysActive: numberValue(userApiKeys.active),
},
announcements: {
total: numberValue(announcements.total),
active: numberValue(announcements.active),
scheduled: numberValue(announcements.scheduled),
expired: numberValue(announcements.expired),
},
system: {
apiHealth: true,
databaseHealth: true,
storageHealth: Boolean(process.env.LOCAL_STORAGE_DIR),
storageDirConfigured: Boolean(process.env.LOCAL_STORAGE_DIR),
worksPersisted: numberValue(storage.persisted),
worksTotal: numberValue(storage.total),
logsTotal: numberValue(logs.total),
logsErrors: numberValue(logs.errors),
logsCreated24h: numberValue(logs.created_24h),
},
});
} catch (error) {
console.error('[admin/dashboard] GET error:', error);
return NextResponse.json({ error: '获取仪表盘数据失败' }, { status: 500 });
} finally {
client.release();
}
}

View File

@@ -0,0 +1,69 @@
import { NextRequest, NextResponse } from 'next/server';
import { requireAdmin } from '@/lib/admin-auth';
import { getDbClient } from '@/storage/database/local-db';
export async function GET(request: NextRequest) {
const authError = await requireAdmin(request);
if (authError) return authError;
try {
const client = await getDbClient();
try {
const data: Record<string, unknown[]> = {};
const tables = [
'profiles',
'works',
'credit_transactions',
'orders',
'user_api_keys',
'system_api_configs',
'payment_methods',
'work_likes',
'announcements',
];
for (const table of tables) {
try {
const result = await client.query(`SELECT * FROM ${table} ORDER BY created_at ASC`);
data[table] = result.rows || [];
} catch {
data[table] = [];
}
}
try {
const result = await client.query('SELECT * FROM site_config');
data.site_config = result.rows || [];
} catch { data.site_config = []; }
try {
const result = await client.query('SELECT * FROM site_stats');
data.site_stats = result.rows || [];
} catch { data.site_stats = []; }
try {
const result = await client.query('SELECT id, email, created_at, raw_user_meta_data, password_hash FROM auth.users');
data.auth_users = result.rows || [];
} catch { data.auth_users = []; }
const exportData = {
_meta: {
version: '1.0',
platform: 'miaojing',
exported_at: new Date().toISOString(),
tables: Object.keys(data),
counts: Object.fromEntries(Object.entries(data).map(([k, v]) => [k, v.length])),
},
data,
};
return NextResponse.json(exportData);
} finally {
client.release();
}
} catch (err) {
console.error('[data-export] Error:', err);
return NextResponse.json({ error: err instanceof Error ? err.message : '导出失败' }, { status: 500 });
}
}

View File

@@ -0,0 +1,634 @@
import { NextRequest, NextResponse } from 'next/server';
import { requireAdmin } from '@/lib/admin-auth';
import { localStorage } from '@/lib/local-storage';
import { encryptSecret, previewSecret } from '@/lib/server-crypto';
import { getDbClient } from '@/storage/database/local-db';
interface ImportMeta {
version: string;
platform: string;
exported_at: string;
tables: string[];
counts: Record<string, number>;
}
interface ImportPayload {
_meta: ImportMeta;
data: Record<string, unknown[]>;
options?: {
skipAuth?: boolean;
};
}
const MAX_ROWS_PER_TABLE = 5000;
const UUID_REGEX = /^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/i;
const UUID_ID_TABLES = new Set([
'auth.users',
'profiles',
'announcements',
'works',
'credit_transactions',
'orders',
'user_api_keys',
'system_api_configs',
'work_likes',
]);
const TABLE_COLUMNS: Record<string, string[]> = {
profiles: ['id', 'email', 'nickname', 'avatar_url', 'phone', 'role', 'membership_tier', 'membership_expires_at', 'credits_balance', 'daily_quota_used', 'daily_quota_limit', 'is_active', 'preferred_theme', 'created_at', 'updated_at'],
site_config: ['id', 'site_name', 'site_tab_title', 'site_description', 'site_keywords', 'logo_url', 'favicon_url', 'announcement', 'membership_enabled', 'terms_of_service', 'privacy_policy', 'about_us', 'help_center', 'filing_info', 'filing_url', 'public_security_filing_info', 'public_security_filing_url', 'updated_at'],
site_stats: ['id', 'total_visits', 'total_users', 'total_generations', 'updated_at'],
announcements: ['id', 'title', 'content', 'type', 'is_active', 'starts_at', 'expires_at', 'created_at', 'updated_at'],
works: ['id', 'user_id', 'title', 'type', 'prompt', 'negative_prompt', 'params', 'result_url', 'thumbnail_url', 'width', 'height', 'duration', 'status', 'is_public', 'likes_count', 'views_count', 'created_at', 'updated_at'],
credit_transactions: ['id', 'user_id', 'amount', 'balance_after', 'type', 'description', 'related_work_id', 'created_at'],
orders: ['id', 'user_id', 'order_no', 'product_type', 'product_name', 'amount', 'credits_amount', 'status', 'payment_method', 'paid_at', 'created_at', 'updated_at'],
user_api_keys: ['id', 'user_id', 'provider', 'supplier_name', 'api_url', 'model_name', 'note', 'api_key_encrypted', 'api_key_preview', 'type', 'is_active', 'created_at', 'updated_at'],
system_api_configs: ['id', 'provider', 'name', 'api_url', 'model_name', 'note', 'api_key_encrypted', 'api_key_preview', 'type', 'credits_per_use', 'is_active', 'sort_order', 'created_at', 'updated_at'],
payment_methods: ['id', 'type', 'name', 'is_active', 'public_config', 'secret_config_encrypted', 'secret_config_preview', 'created_at', 'updated_at'],
work_likes: ['id', 'user_id', 'work_id', 'created_at'],
};
const SYSTEM_USER_ID = '00000000-0000-0000-0000-000000000000';
const AUTH_USER_COLUMNS = ['id', 'email', 'created_at', 'raw_user_meta_data', 'password_hash'];
const CONFLICT_COLUMNS: Record<string, string[]> = {
'auth.users': ['id'],
profiles: ['id'],
site_config: ['id'],
site_stats: ['id'],
announcements: ['id'],
works: ['id'],
credit_transactions: ['id'],
orders: ['id'],
user_api_keys: ['id'],
system_api_configs: ['id'],
payment_methods: ['id'],
work_likes: ['id'],
};
type ImportResult = { imported: number; skipped: number; errors: string[] };
type ImportContext = {
userIdMap: Map<string, string>;
workIdMap: Map<string, string>;
emailUserIdMap: Map<string, string>;
apiKeyIdMap: Map<string, string>;
apiKeyOwnerIdMap: Map<string, string>;
columnCache: Map<string, Set<string>>;
defaultableColumnCache: Map<string, Set<string>>;
};
export async function POST(request: NextRequest) {
const authError = await requireAdmin(request);
if (authError) return authError;
try {
const body: ImportPayload = await request.json();
const { _meta, data } = body;
const skipAuth = body.options?.skipAuth === true;
if (!_meta || _meta.platform !== 'miaojing' || !data || typeof data !== 'object') {
return NextResponse.json({ error: '无效的导入文件:格式不匹配' }, { status: 400 });
}
const client = await getDbClient();
const result: Record<string, ImportResult> = {};
try {
const context = await buildImportContext(client, data);
if (!skipAuth && Array.isArray(data.auth_users)) {
result.auth_users = await importRows(client, 'auth.users', AUTH_USER_COLUMNS, data.auth_users, context);
} else {
result.auth_users = {
imported: 0,
skipped: Array.isArray(data.auth_users) ? data.auth_users.length : 0,
errors: skipAuth ? ['已按选项跳过认证账号导入'] : [],
};
}
for (const [table, allowedColumns] of Object.entries(TABLE_COLUMNS)) {
const rows = data[table];
result[table] = await importRows(client, table, allowedColumns, Array.isArray(rows) ? rows : [], context);
}
return NextResponse.json({ success: true, message: '数据导入完成', details: result, meta: _meta });
} finally {
client.release();
}
} catch (err) {
console.error('[data-import] Error:', err instanceof Error ? err.message : err);
return NextResponse.json({ error: err instanceof Error ? err.message : '导入失败' }, { status: 500 });
}
}
async function importRows(
client: Awaited<ReturnType<typeof getDbClient>>,
table: string,
allowedColumns: string[],
rows: unknown[],
context: ImportContext,
): Promise<ImportResult> {
if (rows.length > MAX_ROWS_PER_TABLE) {
return { imported: 0, skipped: rows.length, errors: [`${table}: 单表最多允许导入 ${MAX_ROWS_PER_TABLE}`] };
}
let imported = 0;
let skipped = 0;
const errors: string[] = [];
const existingColumns = await getExistingColumns(client, table, context);
const defaultableColumns = await getDefaultableColumns(client, table, context);
const effectiveAllowedColumns = allowedColumns.filter(col => existingColumns.has(col));
for (const rawRow of rows) {
const row = await normalizeImportRow(table, rawRow as Record<string, unknown>, context);
const cols = Object.keys(row).filter(col => (
effectiveAllowedColumns.includes(col)
&& !(row[col] == null && defaultableColumns.has(col))
));
if (!cols.includes('id') || cols.length === 0) {
skipped++;
errors.push(`${table}: 缺少 id 或没有允许导入的字段`);
continue;
}
try {
const vals = cols.map(col => row[col]);
const placeholders = cols.map((_, i) => `$${i + 1}`).join(', ');
const conflictCols = CONFLICT_COLUMNS[table] || ['id'];
const mergeAssignments = getMergeAssignments(table, cols);
const conflictAction = mergeAssignments.length > 0
? `DO UPDATE SET ${mergeAssignments.join(', ')}`
: 'DO NOTHING';
const insertResult = await client.query(
`INSERT INTO ${table} AS target (${cols.join(', ')}) VALUES (${placeholders}) ON CONFLICT (${conflictCols.join(', ')}) ${conflictAction}`,
vals,
);
if ((insertResult.rowCount || 0) > 0) {
imported++;
} else {
skipped++;
}
} catch (e) {
skipped++;
errors.push(`${table}: ${e instanceof Error ? e.message : 'unknown error'}`);
}
}
return { imported, skipped, errors };
}
async function buildImportContext(
client: Awaited<ReturnType<typeof getDbClient>>,
data: Record<string, unknown[]>,
): Promise<ImportContext> {
const userIdMap = new Map<string, string>();
const workIdMap = new Map<string, string>();
const emailUserIdMap = new Map<string, string>();
const apiKeyIdMap = new Map<string, string>();
const apiKeyOwnerIdMap = new Map<string, string>();
const profileRows = Array.isArray(data.profiles) ? data.profiles : [];
const authRows = Array.isArray(data.auth_users) ? data.auth_users : [];
const profileEmails = new Map<string, string>();
for (const raw of profileRows) {
const row = raw as Record<string, unknown>;
seedUuidMap(userIdMap, row.id);
if (typeof row.id === 'string' && typeof row.email === 'string' && row.email.trim()) {
const email = row.email.trim().toLowerCase();
profileEmails.set(email, row.id);
emailUserIdMap.set(email, userIdMap.get(row.id) || row.id);
}
}
for (const raw of authRows) {
const row = raw as Record<string, unknown>;
seedUuidMap(userIdMap, row.id);
if (typeof row.id === 'string' && typeof row.email === 'string' && row.email.trim() && !profileEmails.has(row.email.trim().toLowerCase())) {
const email = row.email.trim().toLowerCase();
profileEmails.set(email, row.id);
emailUserIdMap.set(email, userIdMap.get(row.id) || row.id);
}
}
if (profileEmails.size > 0) {
const emails = [...profileEmails.keys()];
const existing = await client.query(
'SELECT id, lower(email) AS email FROM profiles WHERE lower(email) = ANY($1)',
[emails],
);
for (const row of existing.rows) {
const importedId = profileEmails.get(row.email);
if (importedId && importedId !== row.id) {
userIdMap.set(importedId, row.id);
emailUserIdMap.set(row.email, row.id);
}
}
}
for (const [email, importedId] of profileEmails.entries()) {
emailUserIdMap.set(email, userIdMap.get(importedId) || importedId);
}
const apiKeyRows = Array.isArray(data.user_api_keys) ? data.user_api_keys : [];
for (const raw of apiKeyRows) {
const row = raw as Record<string, unknown>;
const oldId = typeof row.id === 'string' && row.id.trim() ? row.id.trim() : '';
if (oldId) {
apiKeyIdMap.set(oldId, isUuid(oldId) ? oldId : crypto.randomUUID());
}
const ownerId = findImportedWorkUserId(row);
const ownerByEmail = findUserIdByEmail(row, {
userIdMap,
workIdMap,
emailUserIdMap,
apiKeyIdMap,
apiKeyOwnerIdMap,
columnCache: new Map(),
defaultableColumnCache: new Map(),
});
const mappedOwnerId = ownerId
? (userIdMap.get(ownerId) || ownerId)
: ownerByEmail;
if (oldId && mappedOwnerId) {
apiKeyOwnerIdMap.set(oldId, mappedOwnerId);
}
}
const works = Array.isArray(data.works) ? data.works : [];
const workUrls = new Map<string, string>();
for (const raw of works) {
const row = raw as Record<string, unknown>;
seedUuidMap(workIdMap, row.id);
if (typeof row.id === 'string' && typeof row.result_url === 'string' && row.result_url.trim() && !isDataUrl(row.result_url)) {
workUrls.set(row.result_url.trim(), row.id);
}
}
if (workUrls.size > 0) {
const existing = await client.query(
'SELECT id, result_url FROM works WHERE result_url = ANY($1)',
[[...workUrls.keys()]],
);
for (const row of existing.rows) {
const importedId = workUrls.get(row.result_url);
if (importedId && importedId !== row.id) {
workIdMap.set(importedId, row.id);
}
}
}
return {
userIdMap,
workIdMap,
emailUserIdMap,
apiKeyIdMap,
apiKeyOwnerIdMap,
columnCache: new Map(),
defaultableColumnCache: new Map(),
};
}
async function normalizeImportRow(table: string, row: Record<string, unknown>, context: ImportContext): Promise<Record<string, unknown>> {
const next = { ...row };
if (typeof next.user_id === 'string' && context.userIdMap.has(next.user_id)) {
next.user_id = context.userIdMap.get(next.user_id);
}
if ((!next.user_id || next.user_id === SYSTEM_USER_ID) && findUserIdByEmail(next, context)) {
next.user_id = findUserIdByEmail(next, context);
}
if (typeof next.related_work_id === 'string' && context.workIdMap.has(next.related_work_id)) {
next.related_work_id = context.workIdMap.get(next.related_work_id);
}
if (typeof next.work_id === 'string' && context.workIdMap.has(next.work_id)) {
next.work_id = context.workIdMap.get(next.work_id);
}
if (table === 'auth.users' || table === 'profiles') {
const currentId = typeof next.id === 'string' ? next.id : '';
if (currentId && context.userIdMap.has(currentId)) {
next.id = context.userIdMap.get(currentId);
}
}
if (table === 'user_api_keys') {
const currentId = typeof next.id === 'string' ? next.id : '';
if (currentId && context.apiKeyIdMap.has(currentId)) {
next.id = context.apiKeyIdMap.get(currentId);
}
const importedUserId = findImportedWorkUserId(next);
const emailUserId = findUserIdByEmail(next, context);
if (importedUserId || emailUserId) {
next.user_id = importedUserId
? (context.userIdMap.get(importedUserId) || importedUserId)
: emailUserId;
}
}
if (table === 'works') {
const currentId = typeof next.id === 'string' ? next.id : '';
if (currentId && context.workIdMap.has(currentId)) {
next.id = context.workIdMap.get(currentId);
}
const importedUserId = findImportedWorkUserId(next) || findUserIdByEmail(next, context) || findUserIdByCustomModel(next, context);
if (importedUserId) {
next.user_id = context.userIdMap.get(importedUserId) || importedUserId;
}
if (typeof next.result_url === 'string') {
next.result_url = await persistImportMedia(next.result_url, getWorkMediaFolder(next.type, 'results'));
}
if (typeof next.thumbnail_url === 'string') {
next.thumbnail_url = await persistImportMedia(next.thumbnail_url, 'imported/works/thumbnails');
}
if (next.params && typeof next.params === 'object') {
next.params = await sanitizeImportMedia(next.params, 'imported/works/references');
remapCustomModelId(next.params as Record<string, unknown>, context);
if ((!next.user_id || next.user_id === SYSTEM_USER_ID) && findUserIdByCustomModel(next, context)) {
next.user_id = findUserIdByCustomModel(next, context);
}
}
}
if (table === 'user_api_keys') {
if (typeof next.note !== 'string' || next.note.trim() === '') {
next.note = '导入的 API Key';
}
if (typeof next.type !== 'string' || next.type.trim() === '') {
next.type = 'image';
}
const rawEncrypted = typeof next.api_key_encrypted === 'string' ? next.api_key_encrypted.trim() : '';
const rawApiKey = typeof next.apiKey === 'string' ? next.apiKey.trim() : '';
const secret = rawApiKey || rawEncrypted;
if (secret) {
next.api_key_encrypted = encryptSecret(secret);
next.api_key_preview = typeof next.api_key_preview === 'string' && next.api_key_preview
? next.api_key_preview
: previewSecret(secret);
}
}
if (UUID_ID_TABLES.has(table)) {
const currentId = typeof next.id === 'string' ? next.id : '';
if (!isUuid(currentId)) {
next.id = crypto.randomUUID();
}
}
return next;
}
function findImportedWorkUserId(row: Record<string, unknown>): string | null {
const directKeys = ['user_id', 'userId', 'publisher_id', 'publisherId', 'owner_id', 'ownerId', 'created_by', 'createdBy'];
for (const key of directKeys) {
const value = row[key];
if (typeof value === 'string' && value.trim() && value !== 'anonymous' && value !== '00000000-0000-0000-0000-000000000000') {
return value.trim();
}
}
const params = row.params && typeof row.params === 'object' ? row.params as Record<string, unknown> : null;
if (!params) return null;
for (const key of directKeys) {
const value = params[key];
if (typeof value === 'string' && value.trim() && value !== 'anonymous' && value !== '00000000-0000-0000-0000-000000000000') {
return value.trim();
}
}
return null;
}
function findUserIdByEmail(row: Record<string, unknown>, context: ImportContext): string | null {
const directKeys = ['email', 'user_email', 'userEmail', 'publisher_email', 'publisherEmail', 'owner_email', 'ownerEmail'];
for (const key of directKeys) {
const value = row[key];
if (typeof value === 'string' && value.trim()) {
const mapped = context.emailUserIdMap.get(value.trim().toLowerCase());
if (mapped) return mapped;
}
}
const params = row.params && typeof row.params === 'object' ? row.params as Record<string, unknown> : null;
if (!params) return null;
for (const key of directKeys) {
const value = params[key];
if (typeof value === 'string' && value.trim()) {
const mapped = context.emailUserIdMap.get(value.trim().toLowerCase());
if (mapped) return mapped;
}
}
return null;
}
function findUserIdByCustomModel(row: Record<string, unknown>, context: ImportContext): string | null {
const params = row.params && typeof row.params === 'object' ? row.params as Record<string, unknown> : null;
const model = typeof params?.model === 'string'
? params.model
: typeof row.model === 'string'
? row.model
: '';
if (!model.startsWith('custom:')) return null;
const oldId = model.slice('custom:'.length);
return context.apiKeyOwnerIdMap.get(oldId) || null;
}
function remapCustomModelId(params: Record<string, unknown>, context: ImportContext): void {
const model = typeof params.model === 'string' ? params.model : '';
if (!model.startsWith('custom:')) return;
const oldId = model.slice('custom:'.length);
const newId = context.apiKeyIdMap.get(oldId);
if (newId) {
params.model = `custom:${newId}`;
}
}
function getMergeAssignments(table: string, cols: string[]): string[] {
const has = (column: string) => cols.includes(column);
const assignments: string[] = [];
if (table === 'auth.users') {
if (has('email')) assignments.push(`email = COALESCE(NULLIF(target.email, ''), EXCLUDED.email)`);
if (has('raw_user_meta_data')) assignments.push(`raw_user_meta_data = COALESCE(target.raw_user_meta_data, EXCLUDED.raw_user_meta_data)`);
if (has('password_hash')) assignments.push(`password_hash = COALESCE(NULLIF(target.password_hash, ''), EXCLUDED.password_hash)`);
return assignments;
}
if (table === 'profiles') {
if (has('email')) assignments.push(`email = COALESCE(NULLIF(target.email, ''), EXCLUDED.email)`);
if (has('nickname')) assignments.push(`nickname = COALESCE(NULLIF(target.nickname, ''), EXCLUDED.nickname)`);
if (has('avatar_url')) assignments.push(`avatar_url = COALESCE(NULLIF(target.avatar_url, ''), EXCLUDED.avatar_url)`);
if (has('phone')) assignments.push(`phone = COALESCE(NULLIF(target.phone, ''), EXCLUDED.phone)`);
if (has('role')) assignments.push(`role = CASE WHEN target.role = 'admin' THEN target.role ELSE COALESCE(NULLIF(target.role, ''), EXCLUDED.role) END`);
if (has('membership_tier')) assignments.push(`membership_tier = COALESCE(NULLIF(target.membership_tier, ''), EXCLUDED.membership_tier)`);
if (has('membership_expires_at')) assignments.push(`membership_expires_at = COALESCE(target.membership_expires_at, EXCLUDED.membership_expires_at)`);
if (has('credits_balance')) assignments.push(`credits_balance = COALESCE(target.credits_balance, EXCLUDED.credits_balance)`);
if (has('daily_quota_limit')) assignments.push(`daily_quota_limit = COALESCE(target.daily_quota_limit, EXCLUDED.daily_quota_limit)`);
if (has('is_active')) assignments.push(`is_active = COALESCE(target.is_active, EXCLUDED.is_active)`);
if (has('preferred_theme')) assignments.push(`preferred_theme = CASE WHEN EXCLUDED.preferred_theme IN ('dark', 'light') THEN EXCLUDED.preferred_theme ELSE target.preferred_theme END`);
if (has('updated_at')) assignments.push(`updated_at = GREATEST(COALESCE(target.updated_at, EXCLUDED.updated_at), COALESCE(EXCLUDED.updated_at, target.updated_at))`);
return assignments;
}
if (table === 'works') {
if (has('user_id')) {
assignments.push(`user_id = CASE WHEN (target.user_id IS NULL OR target.user_id = '${SYSTEM_USER_ID}'::uuid) AND EXCLUDED.user_id IS NOT NULL AND EXCLUDED.user_id <> '${SYSTEM_USER_ID}'::uuid THEN EXCLUDED.user_id ELSE target.user_id END`);
}
if (has('params')) assignments.push(`params = CASE WHEN target.params IS NULL OR target.params = '{}'::jsonb THEN EXCLUDED.params ELSE target.params END`);
if (has('thumbnail_url')) assignments.push(`thumbnail_url = COALESCE(NULLIF(target.thumbnail_url, ''), EXCLUDED.thumbnail_url)`);
if (has('width')) assignments.push(`width = COALESCE(target.width, EXCLUDED.width)`);
if (has('height')) assignments.push(`height = COALESCE(target.height, EXCLUDED.height)`);
if (has('duration')) assignments.push(`duration = COALESCE(target.duration, EXCLUDED.duration)`);
if (has('updated_at')) assignments.push(`updated_at = GREATEST(COALESCE(target.updated_at, EXCLUDED.updated_at), COALESCE(EXCLUDED.updated_at, target.updated_at))`);
return assignments;
}
if (table === 'user_api_keys') {
if (has('user_id')) assignments.push(`user_id = COALESCE(target.user_id, EXCLUDED.user_id)`);
if (has('provider')) assignments.push(`provider = COALESCE(NULLIF(target.provider, ''), EXCLUDED.provider)`);
if (has('supplier_name')) assignments.push(`supplier_name = COALESCE(NULLIF(target.supplier_name, ''), EXCLUDED.supplier_name)`);
if (has('api_url')) assignments.push(`api_url = COALESCE(NULLIF(target.api_url, ''), EXCLUDED.api_url)`);
if (has('model_name')) assignments.push(`model_name = COALESCE(NULLIF(target.model_name, ''), EXCLUDED.model_name)`);
if (has('note')) assignments.push(`note = COALESCE(NULLIF(target.note, ''), EXCLUDED.note)`);
if (has('api_key_encrypted')) assignments.push(`api_key_encrypted = COALESCE(NULLIF(target.api_key_encrypted, ''), EXCLUDED.api_key_encrypted)`);
if (has('api_key_preview')) assignments.push(`api_key_preview = COALESCE(NULLIF(target.api_key_preview, ''), EXCLUDED.api_key_preview)`);
if (has('type')) assignments.push(`type = COALESCE(NULLIF(target.type, ''), EXCLUDED.type)`);
if (has('is_active')) assignments.push(`is_active = COALESCE(target.is_active, EXCLUDED.is_active)`);
if (has('updated_at')) assignments.push(`updated_at = GREATEST(COALESCE(target.updated_at, EXCLUDED.updated_at), COALESCE(EXCLUDED.updated_at, target.updated_at))`);
return assignments;
}
if (table === 'system_api_configs') {
if (has('provider')) assignments.push(`provider = COALESCE(NULLIF(target.provider, ''), EXCLUDED.provider)`);
if (has('name')) assignments.push(`name = COALESCE(NULLIF(target.name, ''), EXCLUDED.name)`);
if (has('api_url')) assignments.push(`api_url = COALESCE(NULLIF(target.api_url, ''), EXCLUDED.api_url)`);
if (has('model_name')) assignments.push(`model_name = COALESCE(NULLIF(target.model_name, ''), EXCLUDED.model_name)`);
if (has('note')) assignments.push(`note = COALESCE(NULLIF(target.note, ''), EXCLUDED.note)`);
if (has('api_key_encrypted')) assignments.push(`api_key_encrypted = COALESCE(NULLIF(target.api_key_encrypted, ''), EXCLUDED.api_key_encrypted)`);
if (has('api_key_preview')) assignments.push(`api_key_preview = COALESCE(NULLIF(target.api_key_preview, ''), EXCLUDED.api_key_preview)`);
if (has('type')) assignments.push(`type = COALESCE(NULLIF(target.type, ''), EXCLUDED.type)`);
if (has('credits_per_use')) assignments.push(`credits_per_use = COALESCE(target.credits_per_use, EXCLUDED.credits_per_use)`);
if (has('is_active')) assignments.push(`is_active = COALESCE(target.is_active, EXCLUDED.is_active)`);
if (has('sort_order')) assignments.push(`sort_order = COALESCE(target.sort_order, EXCLUDED.sort_order)`);
if (has('updated_at')) assignments.push(`updated_at = GREATEST(COALESCE(target.updated_at, EXCLUDED.updated_at), COALESCE(EXCLUDED.updated_at, target.updated_at))`);
return assignments;
}
if (table === 'payment_methods') {
if (has('type')) assignments.push(`type = COALESCE(NULLIF(target.type, ''), EXCLUDED.type)`);
if (has('name')) assignments.push(`name = COALESCE(NULLIF(target.name, ''), EXCLUDED.name)`);
if (has('is_active')) assignments.push(`is_active = COALESCE(target.is_active, EXCLUDED.is_active)`);
if (has('public_config')) assignments.push(`public_config = COALESCE(target.public_config, EXCLUDED.public_config)`);
if (has('secret_config_encrypted')) assignments.push(`secret_config_encrypted = COALESCE(target.secret_config_encrypted, EXCLUDED.secret_config_encrypted)`);
if (has('secret_config_preview')) assignments.push(`secret_config_preview = COALESCE(target.secret_config_preview, EXCLUDED.secret_config_preview)`);
if (has('updated_at')) assignments.push(`updated_at = GREATEST(COALESCE(target.updated_at, EXCLUDED.updated_at), COALESCE(EXCLUDED.updated_at, target.updated_at))`);
return assignments;
}
return assignments;
}
async function getExistingColumns(
client: Awaited<ReturnType<typeof getDbClient>>,
table: string,
context: ImportContext,
): Promise<Set<string>> {
const cached = context.columnCache.get(table);
if (cached) return cached;
const [schemaName, tableName] = table.includes('.') ? table.split('.', 2) : ['public', table];
const result = await client.query(
'SELECT column_name FROM information_schema.columns WHERE table_schema = $1 AND table_name = $2',
[schemaName, tableName],
);
const columns = new Set((result.rows || []).map((row: Record<string, unknown>) => String(row.column_name)));
context.columnCache.set(table, columns);
return columns;
}
async function getDefaultableColumns(
client: Awaited<ReturnType<typeof getDbClient>>,
table: string,
context: ImportContext,
): Promise<Set<string>> {
const cached = context.defaultableColumnCache.get(table);
if (cached) return cached;
const [schemaName, tableName] = table.includes('.') ? table.split('.', 2) : ['public', table];
const result = await client.query(
`SELECT column_name
FROM information_schema.columns
WHERE table_schema = $1
AND table_name = $2
AND is_nullable = 'NO'
AND column_default IS NOT NULL`,
[schemaName, tableName],
);
const columns = new Set((result.rows || []).map((row: Record<string, unknown>) => String(row.column_name)));
context.defaultableColumnCache.set(table, columns);
return columns;
}
function seedUuidMap(map: Map<string, string>, value: unknown): void {
if (typeof value === 'string' && value && !isUuid(value) && !map.has(value)) {
map.set(value, crypto.randomUUID());
}
}
function isUuid(value: unknown): value is string {
return typeof value === 'string' && UUID_REGEX.test(value);
}
function isDataUrl(value: unknown): boolean {
return typeof value === 'string' && /^data:[^,]+,/i.test(value);
}
function getWorkMediaFolder(type: unknown, kind: string): string {
const text = typeof type === 'string' ? type.toLowerCase() : '';
const media = text.includes('video') ? 'videos' : 'images';
return `imported/works/${kind}/${media}`;
}
function extensionFromMime(mime: string): string {
const normalized = mime.toLowerCase();
if (normalized.includes('png')) return 'png';
if (normalized.includes('jpeg') || normalized.includes('jpg')) return 'jpg';
if (normalized.includes('webp')) return 'webp';
if (normalized.includes('gif')) return 'gif';
if (normalized.includes('mp4')) return 'mp4';
if (normalized.includes('webm')) return 'webm';
return 'bin';
}
async function persistImportMedia(value: string, folder: string): Promise<string> {
if (!isDataUrl(value)) return value;
const match = value.match(/^data:([^;,]+)?(;base64)?,([\s\S]*)$/i);
if (!match) return value;
const mime = match[1] || 'application/octet-stream';
const isBase64 = Boolean(match[2]);
const payload = match[3] || '';
const buffer = isBase64 ? Buffer.from(payload, 'base64') : Buffer.from(decodeURIComponent(payload));
const ext = extensionFromMime(mime);
const key = `${folder}/${Date.now()}-${crypto.randomUUID()}.${ext}`;
const savedKey = await localStorage.uploadFile({ fileContent: buffer, fileName: key, contentType: mime });
return localStorage.generatePresignedUrl({ key: savedKey, expireTime: 2592000 });
}
async function sanitizeImportMedia(value: unknown, folder: string): Promise<unknown> {
if (typeof value === 'string') {
return persistImportMedia(value, folder);
}
if (Array.isArray(value)) {
return Promise.all(value.map(item => sanitizeImportMedia(item, folder)));
}
if (value && typeof value === 'object') {
const output: Record<string, unknown> = {};
for (const [key, nested] of Object.entries(value as Record<string, unknown>)) {
output[key] = await sanitizeImportMedia(nested, folder);
}
return output;
}
return value;
}

View File

@@ -0,0 +1,77 @@
import { NextRequest, NextResponse } from 'next/server';
import { requireAdmin } from '@/lib/admin-auth';
import { isValidEmail, normalizeEmail } from '@/lib/email-service';
import { getDbClient } from '@/storage/database/local-db';
export const runtime = 'nodejs';
function mapRecipient(row: Record<string, unknown>) {
const email = normalizeEmail(row.email);
if (!isValidEmail(email)) return null;
return {
id: String(row.id),
email,
nickname: typeof row.nickname === 'string' && row.nickname.trim() ? row.nickname.trim() : email.split('@')[0],
phone: typeof row.phone === 'string' ? row.phone : null,
avatarUrl: typeof row.avatar_url === 'string' ? row.avatar_url : null,
emailVerified: row.email_verified === true,
};
}
export async function GET(request: NextRequest) {
const adminError = await requireAdmin(request);
if (adminError) return adminError;
const { searchParams } = new URL(request.url);
const q = (searchParams.get('q') || '').trim().toLowerCase().slice(0, 80);
const limit = Math.min(80, Math.max(1, Number(searchParams.get('limit') || 30)));
const client = await getDbClient();
try {
const params: unknown[] = [];
let filter = `
WHERE COALESCE(role, 'user') NOT IN ('admin', 'enterprise_admin')
AND COALESCE(is_active, true) = true
AND COALESCE(email, '') <> ''
`;
if (q) {
params.push(`%${q}%`);
filter += `
AND (
LOWER(email) LIKE $${params.length}
OR LOWER(COALESCE(nickname, '')) LIKE $${params.length}
OR COALESCE(phone, '') LIKE $${params.length}
)
`;
}
const result = await client.query(
`SELECT id, email, nickname, phone, avatar_url, email_verified
FROM profiles
${filter}
ORDER BY created_at DESC
LIMIT ${limit}`,
params,
);
const countResult = await client.query(
`SELECT COUNT(*)::int AS count
FROM profiles
WHERE COALESCE(role, 'user') NOT IN ('admin', 'enterprise_admin')
AND COALESCE(is_active, true) = true
AND COALESCE(email, '') <> ''`,
);
const users = result.rows
.map(mapRecipient)
.filter((item): item is NonNullable<ReturnType<typeof mapRecipient>> => Boolean(item));
return NextResponse.json({
users,
total: Number(countResult.rows[0]?.count || 0),
});
} finally {
client.release();
}
}

View File

@@ -0,0 +1,84 @@
import { NextRequest, NextResponse } from 'next/server';
import { requireAdmin } from '@/lib/admin-auth';
import {
getEmailSettings,
getRequestBaseUrl,
publicEmailSettings,
renderEmailTemplate,
saveEmailSettings,
sendTemplatedEmail,
} from '@/lib/email-service';
import { getDbClient } from '@/storage/database/local-db';
export const runtime = 'nodejs';
export async function GET(request: NextRequest) {
const adminError = await requireAdmin(request);
if (adminError) return adminError;
const client = await getDbClient();
try {
const settings = await getEmailSettings(client);
const platformUrl = getRequestBaseUrl(request) || settings.appBaseUrl;
const preview = renderEmailTemplate(settings, {
title: '通知邮件模板预览',
intro: '这是一封由管理员发送给用户的通知邮件示例,用于预览全局通用邮件模板效果。',
body: '你可以在后台使用这套模板发送系统公告、功能更新、订单提醒、活动通知和安全提醒。实际发送时,标题、正文、按钮和备注会替换为管理员填写的内容。',
buttonText: '进入妙境',
buttonUrl: platformUrl,
note: '验证码邮件使用独立安全验证模板;管理员通知、管理员邮件和提醒邮件使用这套通用模板。',
templateKind: 'notification',
assetBaseUrl: platformUrl,
});
return NextResponse.json({ settings: publicEmailSettings(settings), preview });
} finally {
client.release();
}
}
export async function PUT(request: NextRequest) {
const adminError = await requireAdmin(request);
if (adminError) return adminError;
const client = await getDbClient();
try {
const body = await request.json();
const settings = await saveEmailSettings(client, body);
return NextResponse.json({ success: true, settings, message: '邮箱配置已保存' });
} catch (error) {
const message = error instanceof Error ? error.message : '邮箱配置保存失败';
return NextResponse.json({ error: message }, { status: 400 });
} finally {
client.release();
}
}
export async function POST(request: NextRequest) {
const adminError = await requireAdmin(request);
if (adminError) return adminError;
const client = await getDbClient();
try {
const body = await request.json();
const to = typeof body.to === 'string' ? body.to.trim() : '';
if (!to) {
return NextResponse.json({ error: '请填写测试收件邮箱' }, { status: 400 });
}
await sendTemplatedEmail(client, {
to,
type: 'business',
subject: '【妙境】邮箱配置测试',
title: '邮箱配置测试',
intro: '如果你收到这封邮件,说明自定义域名邮箱 SMTP 配置已生效。',
note: '请同时检查收件箱、垃圾箱,以及 SPF/DKIM/DMARC 解析状态。',
ipAddress: 'admin-test',
assetBaseUrl: getRequestBaseUrl(request) || undefined,
});
return NextResponse.json({ success: true, message: '测试邮件已发送' });
} catch (error) {
const message = error instanceof Error ? error.message : '测试邮件发送失败';
return NextResponse.json({ error: message }, { status: 400 });
} finally {
client.release();
}
}

View File

@@ -0,0 +1,123 @@
import { NextRequest, NextResponse } from 'next/server';
import { requireAdmin } from '@/lib/admin-auth';
import { getDbClient } from '@/storage/database/local-db';
import { markStaleRunningJobs } from '@/lib/generation-job-worker';
import { ensureGenerationJobRuntimeSchema } from '@/lib/generation-job-estimates';
import { writePlatformLog } from '@/lib/platform-logs';
const STATUSES = new Set(['queued', 'running', 'succeeded', 'failed']);
const CLEANUP_STATUSES = new Set(['failed', 'succeeded']);
function intParam(value: string | null, fallback: number, min: number, max: number) {
const parsed = Number.parseInt(value || '', 10);
if (!Number.isFinite(parsed)) return fallback;
return Math.min(max, Math.max(min, parsed));
}
export async function GET(request: NextRequest) {
const authError = await requireAdmin(request);
if (authError) return authError;
await markStaleRunningJobs();
const { searchParams } = new URL(request.url);
const status = searchParams.get('status') || '';
const userSearch = (searchParams.get('user') || searchParams.get('userSearch') || '').trim();
const page = intParam(searchParams.get('page'), 1, 1, 100000);
const pageSize = intParam(searchParams.get('pageSize'), 20, 1, 100);
const offset = (page - 1) * pageSize;
if (status && !STATUSES.has(status)) {
return NextResponse.json({ error: '任务状态无效' }, { status: 400 });
}
const client = await getDbClient();
try {
await ensureGenerationJobRuntimeSchema(client);
const whereClauses: string[] = [];
const params: unknown[] = [];
if (status) {
params.push(status);
whereClauses.push(`j.status = $${params.length}`);
}
if (userSearch) {
params.push(`%${userSearch.toLowerCase()}%`);
whereClauses.push(`(
j.user_id::text LIKE $${params.length}
OR LOWER(COALESCE(p.email, '')) LIKE $${params.length}
OR LOWER(COALESCE(p.nickname, '')) LIKE $${params.length}
)`);
}
const whereSql = whereClauses.length ? `WHERE ${whereClauses.join(' AND ')}` : '';
const countResult = await client.query(
`SELECT COUNT(*)::int AS total
FROM generation_jobs j
LEFT JOIN profiles p ON p.id = j.user_id
${whereSql}`,
params,
);
const rowsResult = await client.query(
`SELECT j.id, j.user_id, p.email AS user_email, p.nickname AS user_nickname,
j.type, j.status, j.error, j.created_at, j.started_at, j.finished_at, j.updated_at
FROM generation_jobs j
LEFT JOIN profiles p ON p.id = j.user_id
${whereSql}
ORDER BY j.created_at DESC
LIMIT $${params.length + 1}
OFFSET $${params.length + 2}`,
[...params, pageSize, offset],
);
const total = countResult.rows[0]?.total || 0;
return NextResponse.json({
jobs: rowsResult.rows,
total,
page,
pageSize,
totalPages: Math.max(1, Math.ceil(total / pageSize)),
});
} finally {
client.release();
}
}
export async function DELETE(request: NextRequest) {
const authError = await requireAdmin(request);
if (authError) return authError;
const { searchParams } = new URL(request.url);
const status = searchParams.get('status') || 'failed';
const olderThanDays = intParam(searchParams.get('olderThanDays'), 7, 0, 3650);
if (!CLEANUP_STATUSES.has(status)) {
return NextResponse.json(
{ error: '只允许清理失败或已完成任务' },
{ status: 400 },
);
}
const client = await getDbClient();
try {
const result = await client.query(
`DELETE FROM generation_jobs
WHERE status = $1
AND updated_at < NOW() - ($2::int * INTERVAL '1 day')`,
[status, olderThanDays],
);
void writePlatformLog({
type: 'admin',
level: 'warning',
action: 'generation_jobs_cleanup',
message: `管理员清理了${status === 'failed' ? '失败' : '已完成'}生成任务`,
targetType: 'generation_jobs',
metadata: { status, olderThanDays, deleted: result.rowCount || 0 },
request,
});
return NextResponse.json({
success: true,
deleted: result.rowCount || 0,
});
} finally {
client.release();
}
}

View File

@@ -0,0 +1,118 @@
import { NextRequest, NextResponse } from 'next/server';
import { requireAdmin } from '@/lib/admin-auth';
import { getDbClient } from '@/storage/database/local-db';
function mapRecommendation(row: Record<string, unknown>) {
return {
id: String(row.id),
modelName: String(row.model_name || ''),
displayName: String(row.display_name || row.model_name || ''),
type: String(row.type || 'image'),
providerId: (row.provider_id as string | null) || null,
isActive: row.is_active !== false,
sortOrder: Number(row.sort_order || 0),
};
}
async function readBody(request: NextRequest) {
return request.json().catch(() => ({}));
}
export async function GET() {
const client = await getDbClient();
try {
const result = await client.query(
`SELECT id, model_name, display_name, type, provider_id, is_active, sort_order
FROM model_recommendations
ORDER BY type ASC, sort_order ASC, model_name ASC`
);
return NextResponse.json({ recommendations: result.rows.map(mapRecommendation) });
} finally {
client.release();
}
}
export async function POST(request: NextRequest) {
const authError = await requireAdmin(request);
if (authError) return authError;
const body = await readBody(request);
if (!body.modelName?.trim()) {
return NextResponse.json({ error: '请填写模型名称' }, { status: 400 });
}
const client = await getDbClient();
try {
const result = await client.query(
`INSERT INTO model_recommendations (model_name, display_name, type, provider_id, is_active, sort_order)
VALUES ($1, $2, $3, $4, $5, $6)
RETURNING id, model_name, display_name, type, provider_id, is_active, sort_order`,
[
body.modelName.trim(),
body.displayName?.trim() || body.modelName.trim(),
body.type || 'image',
body.providerId || null,
body.isActive !== false,
Number(body.sortOrder || 0),
]
);
return NextResponse.json({ recommendation: mapRecommendation(result.rows[0]) });
} finally {
client.release();
}
}
export async function PUT(request: NextRequest) {
const authError = await requireAdmin(request);
if (authError) return authError;
const body = await readBody(request);
if (!body.id || !body.modelName?.trim()) {
return NextResponse.json({ error: '缺少推荐项 ID 或模型名称' }, { status: 400 });
}
const client = await getDbClient();
try {
const result = await client.query(
`UPDATE model_recommendations
SET model_name = $2, display_name = $3, type = $4, provider_id = $5,
is_active = $6, sort_order = $7, updated_at = NOW()
WHERE id = $1
RETURNING id, model_name, display_name, type, provider_id, is_active, sort_order`,
[
body.id,
body.modelName.trim(),
body.displayName?.trim() || body.modelName.trim(),
body.type || 'image',
body.providerId || null,
body.isActive !== false,
Number(body.sortOrder || 0),
]
);
if (result.rows.length === 0) {
return NextResponse.json({ error: '推荐模型不存在' }, { status: 404 });
}
return NextResponse.json({ recommendation: mapRecommendation(result.rows[0]) });
} finally {
client.release();
}
}
export async function DELETE(request: NextRequest) {
const authError = await requireAdmin(request);
if (authError) return authError;
const body = await readBody(request);
const id = body.id || request.nextUrl.searchParams.get('id');
if (!id) return NextResponse.json({ error: '缺少推荐项 ID' }, { status: 400 });
const client = await getDbClient();
try {
await client.query('DELETE FROM model_recommendations WHERE id = $1', [id]);
return NextResponse.json({ success: true });
} finally {
client.release();
}
}

View File

@@ -0,0 +1,80 @@
import { NextRequest, NextResponse } from 'next/server';
import { getDbClient } from '@/storage/database/local-db';
import { requireAdmin } from '@/lib/admin-auth';
export async function GET(request: NextRequest) {
const authError = await requireAdmin(request);
if (authError) return authError;
try {
const client = await getDbClient();
try {
const result = await client.query('SELECT * FROM orders ORDER BY created_at DESC LIMIT 100');
return NextResponse.json({ orders: result.rows || [] });
} finally {
client.release();
}
} catch (err) {
console.error('[admin/orders] GET error:', err);
return NextResponse.json({ error: '获取订单列表失败' }, { status: 500 });
}
}
export async function POST(request: NextRequest) {
const authError = await requireAdmin(request);
if (authError) return authError;
try {
const body = await request.json();
const client = await getDbClient();
try {
const id = crypto.randomUUID();
const { user_id, order_no, product_type, product_name, amount, credits_amount, status, payment_method } = body;
await client.query(
'INSERT INTO orders (id, user_id, order_no, product_type, product_name, amount, credits_amount, status, payment_method) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9)',
[id, user_id, order_no, product_type, product_name, amount, credits_amount, status || 'pending', payment_method]
);
return NextResponse.json({ success: true });
} finally {
client.release();
}
} catch (err) {
console.error('[admin/orders] POST error:', err);
return NextResponse.json({ error: '创建订单失败' }, { status: 500 });
}
}
export async function PUT(request: NextRequest) {
const authError = await requireAdmin(request);
if (authError) return authError;
try {
const body = await request.json();
const { orderId, ...updates } = body;
if (!orderId) {
return NextResponse.json({ error: '缺少订单ID' }, { status: 400 });
}
const client = await getDbClient();
try {
const setClauses: string[] = [];
const params: unknown[] = [];
let paramIdx = 1;
if (updates.status !== undefined) { setClauses.push(`status = $${paramIdx++}`); params.push(updates.status); }
if (updates.payment_method !== undefined) { setClauses.push(`payment_method = $${paramIdx++}`); params.push(updates.payment_method); }
if (updates.paid_at !== undefined) { setClauses.push(`paid_at = $${paramIdx++}`); params.push(updates.paid_at); }
setClauses.push('updated_at = NOW()');
params.push(orderId);
await client.query(`UPDATE orders SET ${setClauses.join(', ')} WHERE id = $${paramIdx}`, params);
return NextResponse.json({ success: true });
} finally {
client.release();
}
} catch (err) {
console.error('[admin/orders] PUT error:', err);
return NextResponse.json({ error: '更新订单失败' }, { status: 500 });
}
}

View File

@@ -0,0 +1,47 @@
import { NextRequest, NextResponse } from 'next/server';
import { requireAdmin } from '@/lib/admin-auth';
import { getDbClient } from '@/storage/database/local-db';
import { listPaymentMethods, savePaymentMethod } from '@/lib/server-payment-config';
async function readBody(request: NextRequest) {
return request.json().catch(() => ({}));
}
export async function GET(request: NextRequest) {
const authError = await requireAdmin(request);
if (authError) return authError;
const client = await getDbClient();
try {
return NextResponse.json({ paymentMethods: await listPaymentMethods(client) });
} finally {
client.release();
}
}
export async function PUT(request: NextRequest) {
const authError = await requireAdmin(request);
if (authError) return authError;
const body = await readBody(request);
if (typeof body.id !== 'string' || !body.id.trim()) {
return NextResponse.json({ error: '缺少支付方式 ID' }, { status: 400 });
}
const client = await getDbClient();
try {
const paymentMethods = await savePaymentMethod(client, body.id.trim(), {
name: typeof body.name === 'string' ? body.name : undefined,
isActive: typeof body.isActive === 'boolean' ? body.isActive : undefined,
config: body.config && typeof body.config === 'object' ? body.config : undefined,
});
return NextResponse.json({ paymentMethods });
} catch (err) {
return NextResponse.json(
{ error: err instanceof Error ? err.message : '保存失败' },
{ status: 400 },
);
} finally {
client.release();
}
}

View File

@@ -0,0 +1,121 @@
import { NextRequest, NextResponse } from 'next/server';
import { requireAdmin } from '@/lib/admin-auth';
import { getDbClient } from '@/storage/database/local-db';
function mapProvider(row: Record<string, unknown>) {
return {
id: String(row.id),
name: String(row.name || ''),
defaultApiUrl: String(row.default_api_url || ''),
defaultModel: String(row.default_model || ''),
type: String(row.type || 'image'),
website: (row.website as string | null) || null,
isActive: row.is_active !== false,
sortOrder: Number(row.sort_order || 0),
};
}
async function readBody(request: NextRequest) {
return request.json().catch(() => ({}));
}
export async function GET() {
const client = await getDbClient();
try {
const result = await client.query(
`SELECT id, name, default_api_url, default_model, type, website, is_active, sort_order
FROM api_providers
ORDER BY sort_order ASC, name ASC`
);
return NextResponse.json({ providers: result.rows.map(mapProvider) });
} finally {
client.release();
}
}
export async function POST(request: NextRequest) {
const authError = await requireAdmin(request);
if (authError) return authError;
const body = await readBody(request);
if (!body.name?.trim()) {
return NextResponse.json({ error: '请填写供应商名称' }, { status: 400 });
}
const client = await getDbClient();
try {
const result = await client.query(
`INSERT INTO api_providers (name, default_api_url, default_model, type, website, is_active, sort_order)
VALUES ($1, $2, $3, $4, $5, $6, $7)
RETURNING id, name, default_api_url, default_model, type, website, is_active, sort_order`,
[
body.name.trim(),
body.defaultApiUrl?.trim() || '',
body.defaultModel?.trim() || '',
body.type || 'image',
body.website?.trim() || null,
body.isActive !== false,
Number(body.sortOrder || 0),
]
);
return NextResponse.json({ provider: mapProvider(result.rows[0]) });
} finally {
client.release();
}
}
export async function PUT(request: NextRequest) {
const authError = await requireAdmin(request);
if (authError) return authError;
const body = await readBody(request);
if (!body.id || !body.name?.trim()) {
return NextResponse.json({ error: '缺少供应商 ID 或名称' }, { status: 400 });
}
const client = await getDbClient();
try {
const result = await client.query(
`UPDATE api_providers
SET name = $2, default_api_url = $3, default_model = $4, type = $5, website = $6,
is_active = $7, sort_order = $8, updated_at = NOW()
WHERE id = $1
RETURNING id, name, default_api_url, default_model, type, website, is_active, sort_order`,
[
body.id,
body.name.trim(),
body.defaultApiUrl?.trim() || '',
body.defaultModel?.trim() || '',
body.type || 'image',
body.website?.trim() || null,
body.isActive !== false,
Number(body.sortOrder || 0),
]
);
if (result.rows.length === 0) {
return NextResponse.json({ error: '供应商不存在' }, { status: 404 });
}
return NextResponse.json({ provider: mapProvider(result.rows[0]) });
} finally {
client.release();
}
}
export async function DELETE(request: NextRequest) {
const authError = await requireAdmin(request);
if (authError) return authError;
const body = await readBody(request);
const id = body.id || request.nextUrl.searchParams.get('id');
if (!id) return NextResponse.json({ error: '缺少供应商 ID' }, { status: 400 });
const client = await getDbClient();
try {
await client.query('DELETE FROM api_providers WHERE id = $1', [id]);
return NextResponse.json({ success: true });
} finally {
client.release();
}
}

View File

@@ -0,0 +1,147 @@
import { NextRequest, NextResponse } from 'next/server';
import { requireAdmin } from '@/lib/admin-auth';
import { getRequestBaseUrl, isValidEmail, normalizeEmail, sendTemplatedEmail } from '@/lib/email-service';
import { getDbClient } from '@/storage/database/local-db';
export const runtime = 'nodejs';
const MAX_TARGETED_RECIPIENTS = 200;
const MAX_BROADCAST_RECIPIENTS = 5000;
type AdminMailKind = 'notification' | 'admin';
function normalizeMailKind(value: unknown): AdminMailKind {
return value === 'admin' ? 'admin' : 'notification';
}
function normalizeIdList(value: unknown): string[] {
if (!Array.isArray(value)) return [];
return [...new Set(value
.filter((item): item is string => typeof item === 'string')
.map(item => item.trim())
.filter(item => /^[0-9a-fA-F-]{36}$/.test(item)))];
}
function normalizeEmailList(value: unknown): string[] {
if (!Array.isArray(value)) return [];
return [...new Set(value
.map(normalizeEmail)
.filter(isValidEmail))];
}
async function loadRecipients(client: Awaited<ReturnType<typeof getDbClient>>, body: Record<string, unknown>) {
const mode = body.mode === 'all' ? 'all' : 'selected';
if (mode === 'all') {
const result = await client.query(
`SELECT id, email
FROM profiles
WHERE COALESCE(role, 'user') NOT IN ('admin', 'enterprise_admin')
AND COALESCE(is_active, true) = true
AND COALESCE(email, '') <> ''
ORDER BY created_at ASC
LIMIT $1`,
[MAX_BROADCAST_RECIPIENTS],
);
return result.rows
.map(row => ({ id: String(row.id), email: normalizeEmail(row.email) }))
.filter(row => isValidEmail(row.email));
}
const userIds = normalizeIdList(body.userIds);
const emails = normalizeEmailList(body.emails);
if (userIds.length === 0 && emails.length === 0) return [];
if (userIds.length + emails.length > MAX_TARGETED_RECIPIENTS) {
throw new Error(`单次指定发送最多 ${MAX_TARGETED_RECIPIENTS} 个收件人`);
}
const result = await client.query(
`SELECT id, email
FROM profiles
WHERE COALESCE(role, 'user') NOT IN ('admin', 'enterprise_admin')
AND COALESCE(is_active, true) = true
AND COALESCE(email, '') <> ''
AND (
id = ANY($1::uuid[])
OR LOWER(email) = ANY($2::text[])
)`,
[userIds, emails],
);
return result.rows
.map(row => ({ id: String(row.id), email: normalizeEmail(row.email) }))
.filter(row => isValidEmail(row.email));
}
export async function POST(request: NextRequest) {
const adminError = await requireAdmin(request);
if (adminError) return adminError;
const client = await getDbClient();
try {
const body = await request.json().catch(() => ({})) as Record<string, unknown>;
const title = typeof body.title === 'string' ? body.title.trim().slice(0, 120) : '';
const content = typeof body.content === 'string' ? body.content.trim().slice(0, 5000) : '';
const buttonText = typeof body.buttonText === 'string' ? body.buttonText.trim().slice(0, 40) : '';
const buttonUrl = typeof body.buttonUrl === 'string' ? body.buttonUrl.trim().slice(0, 500) : '';
const mailKind = normalizeMailKind(body.mailKind);
const mailKindLabel = mailKind === 'admin' ? '管理员邮件' : '通知邮件';
if (!title || !content) {
return NextResponse.json({ error: '请填写邮件标题和正文内容' }, { status: 400 });
}
if (buttonUrl && !/^https?:\/\/[^\s"'<>]+$/i.test(buttonUrl)) {
return NextResponse.json({ error: '按钮链接必须是 HTTP(S) 地址' }, { status: 400 });
}
const recipients = await loadRecipients(client, body);
const uniqueRecipients = [...new Map(recipients.map(item => [item.email, item])).values()];
if (uniqueRecipients.length === 0) {
return NextResponse.json({ error: '没有可发送的非管理员用户邮箱' }, { status: 400 });
}
let sent = 0;
const failed: Array<{ email: string; error: string }> = [];
const assetBaseUrl = getRequestBaseUrl(request) || undefined;
for (const recipient of uniqueRecipients) {
try {
await sendTemplatedEmail(client, {
to: recipient.email,
type: mailKind === 'admin' ? 'business' : 'announcement',
subject: `【妙境】${title}`,
title,
body: content,
buttonText: buttonText || undefined,
buttonUrl: buttonUrl || undefined,
note: `这是一封${mailKindLabel},请勿直接回复。`,
templateKind: mailKind,
ipAddress: body.mode === 'all' ? 'admin-broadcast' : 'admin-targeted',
assetBaseUrl,
});
sent += 1;
} catch (error) {
failed.push({
email: recipient.email,
error: error instanceof Error ? error.message : String(error),
});
}
}
return NextResponse.json({
success: failed.length === 0,
total: uniqueRecipients.length,
sent,
failedCount: failed.length,
failed: failed.slice(0, 20),
message: failed.length === 0
? `邮件已发送给 ${sent} 个用户`
: `已发送 ${sent} 封,失败 ${failed.length}`,
}, { status: sent > 0 ? 200 : 400 });
} catch (error) {
const message = error instanceof Error ? error.message : '邮件发送失败';
return NextResponse.json({ error: message }, { status: 400 });
} finally {
client.release();
}
}

View File

@@ -0,0 +1,39 @@
import { NextRequest, NextResponse } from 'next/server';
import { requireAdmin } from '@/lib/admin-auth';
import { getDbClient } from '@/storage/database/local-db';
export async function GET(request: NextRequest) {
const authError = await requireAdmin(request);
if (authError) return authError;
try {
const client = await getDbClient();
try {
const result = await client.query(`
SELECT
COALESCE((SELECT total_visits FROM site_stats WHERE id = 1 LIMIT 1), 0)::int AS total_visits,
COALESCE((
SELECT COUNT(*)
FROM profiles
WHERE COALESCE(role, 'user') NOT IN ('admin', 'enterprise_admin')
), 0)::int AS total_users,
COALESCE((
SELECT COUNT(*)
FROM works
WHERE is_public = true AND status = 'completed'
), 0)::int AS total_works
`);
const row = result.rows[0] || {};
return NextResponse.json({
totalVisits: Number(row.total_visits || 0),
totalUsers: Number(row.total_users || 0),
totalWorks: Number(row.total_works || 0),
});
} finally {
client.release();
}
} catch (err) {
console.error('[admin/stats] GET error:', err);
return NextResponse.json({ error: '获取统计数据失败' }, { status: 500 });
}
}

View File

@@ -0,0 +1,142 @@
import { NextRequest, NextResponse } from 'next/server';
import { requireAdmin } from '@/lib/admin-auth';
import { getDbClient } from '@/storage/database/local-db';
import {
encryptApiKeyForStorage,
ensureSystemApiSchema,
isUuid,
listSystemApis,
toSafeSystemApi,
} from '@/lib/server-api-config';
async function readBody(request: NextRequest) {
return request.json().catch(() => ({}));
}
function normalizeType(value: unknown): 'image' | 'video' | 'text' {
return value === 'video' || value === 'text' ? value : 'image';
}
export async function GET(request: NextRequest) {
const authError = await requireAdmin(request);
if (authError) return authError;
const includeInactive = request.nextUrl.searchParams.get('includeInactive') !== 'false';
return NextResponse.json({ apis: await listSystemApis(includeInactive) });
}
export async function POST(request: NextRequest) {
const authError = await requireAdmin(request);
if (authError) return authError;
const body = await readBody(request);
if (!body.name?.trim() || !body.modelName?.trim()) {
return NextResponse.json({ error: '请填写显示名称和模型名称' }, { status: 400 });
}
const secret = encryptApiKeyForStorage(String(body.apiKey || ''));
const client = await getDbClient();
try {
await ensureSystemApiSchema(client);
const result = await client.query(
`INSERT INTO system_api_configs (
provider, name, api_url, model_name, note, api_key_encrypted,
api_key_preview, type, credits_per_use, is_active, sort_order
)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10,
COALESCE((SELECT MAX(sort_order) + 1 FROM system_api_configs), 0))
RETURNING id, provider, name, api_url, model_name, note, api_key_preview,
type, credits_per_use, is_active, sort_order, created_at, updated_at`,
[
String(body.provider || '').trim(),
String(body.name).trim(),
String(body.apiUrl || '').trim(),
String(body.modelName).trim(),
String(body.note || '').trim(),
secret.encrypted,
secret.preview,
normalizeType(body.type),
Number(body.creditsPerUse || 10),
body.isActive !== false,
],
);
return NextResponse.json({ api: toSafeSystemApi(result.rows[0]) });
} finally {
client.release();
}
}
export async function PUT(request: NextRequest) {
const authError = await requireAdmin(request);
if (authError) return authError;
const body = await readBody(request);
if (!isUuid(body.id) || !body.name?.trim() || !body.modelName?.trim()) {
return NextResponse.json({ error: '缺少 API ID、显示名称或模型名称' }, { status: 400 });
}
const updates: string[] = [];
const params: unknown[] = [];
let idx = 1;
const add = (column: string, value: unknown) => {
updates.push(`${column} = $${idx++}`);
params.push(value);
};
add('provider', String(body.provider || '').trim());
add('name', String(body.name).trim());
add('api_url', String(body.apiUrl || '').trim());
add('model_name', String(body.modelName).trim());
add('note', String(body.note || '').trim());
add('type', normalizeType(body.type));
add('credits_per_use', Number(body.creditsPerUse || 10));
add('is_active', body.isActive !== false);
if (body.sortOrder !== undefined) add('sort_order', Number(body.sortOrder || 0));
if (typeof body.apiKey === 'string' && body.apiKey.trim() && body.apiKey !== '********') {
const secret = encryptApiKeyForStorage(body.apiKey);
add('api_key_encrypted', secret.encrypted);
add('api_key_preview', secret.preview);
}
if (body.clearApiKey === true) {
add('api_key_encrypted', '');
add('api_key_preview', '');
}
updates.push('updated_at = NOW()');
params.push(body.id);
const client = await getDbClient();
try {
await ensureSystemApiSchema(client);
const result = await client.query(
`UPDATE system_api_configs
SET ${updates.join(', ')}
WHERE id = $${idx}
RETURNING id, provider, name, api_url, model_name, note, api_key_preview,
type, credits_per_use, is_active, sort_order, created_at, updated_at`,
params,
);
if (result.rows.length === 0) {
return NextResponse.json({ error: '系统 API 不存在' }, { status: 404 });
}
return NextResponse.json({ api: toSafeSystemApi(result.rows[0]) });
} finally {
client.release();
}
}
export async function DELETE(request: NextRequest) {
const authError = await requireAdmin(request);
if (authError) return authError;
const body = await readBody(request);
const id = body.id || request.nextUrl.searchParams.get('id');
if (!isUuid(id)) return NextResponse.json({ error: '缺少 API ID' }, { status: 400 });
const client = await getDbClient();
try {
await ensureSystemApiSchema(client);
await client.query('DELETE FROM system_api_configs WHERE id = $1', [id]);
return NextResponse.json({ success: true });
} finally {
client.release();
}
}

View File

@@ -0,0 +1,207 @@
import { spawn } from 'node:child_process';
import { createHash, randomUUID } from 'node:crypto';
import fs from 'node:fs/promises';
import path from 'node:path';
import { NextRequest, NextResponse } from 'next/server';
import { requireAdmin } from '@/lib/admin-auth';
export const runtime = 'nodejs';
export const dynamic = 'force-dynamic';
type UpgradeMode = 'hot' | 'cold';
type UpgradeStatus =
| 'queued'
| 'running'
| 'rolling_back'
| 'succeeded'
| 'failed'
| 'rolled_back'
| 'rollback_failed';
type UpgradeJobState = {
id: string;
mode: UpgradeMode;
status: UpgradeStatus;
step: string;
message: string;
progress: number;
packageName: string;
packageHash?: string;
backupFile?: string;
sourceBackupFile?: string;
restartRequired?: boolean;
changedFiles?: string[];
preExistingFiles?: string[];
error?: string;
startedAt: string;
updatedAt: string;
finishedAt?: string;
logs: string[];
dryRun?: boolean;
};
const MAX_PACKAGE_BYTES = 300 * 1024 * 1024;
const RUNNING_STATUSES = new Set<UpgradeStatus>(['queued', 'running', 'rolling_back']);
export async function GET(request: NextRequest) {
const authError = await requireAdmin(request);
if (authError) return authError;
try {
const states = await readStates();
return NextResponse.json({
latest: states[0] || null,
history: states,
stateDir: getUpgradeStateRoot(),
running: states.some(job => RUNNING_STATUSES.has(job.status)),
});
} catch (error) {
console.error('[admin/upgrade] failed to read state:', error);
return NextResponse.json({ error: '读取升级状态失败' }, { status: 500 });
}
}
export async function POST(request: NextRequest) {
const authError = await requireAdmin(request);
if (authError) return authError;
try {
const states = await readStates();
const runningJob = states.find(job => RUNNING_STATUSES.has(job.status));
if (runningJob) {
return NextResponse.json({ error: `已有升级任务正在执行:${runningJob.id}` }, { status: 409 });
}
const form = await request.formData();
const modeValue = String(form.get('mode') || '');
const mode = modeValue === 'hot' || modeValue === 'cold' ? modeValue : null;
const dryRun = String(form.get('dryRun') || '') === 'true';
if (!mode) {
return NextResponse.json({ error: '请选择热更新或冷更新' }, { status: 400 });
}
const file = form.get('package');
if (!(file instanceof File)) {
return NextResponse.json({ error: '请上传升级包' }, { status: 400 });
}
if (file.size <= 0) {
return NextResponse.json({ error: '升级包为空' }, { status: 400 });
}
if (file.size > MAX_PACKAGE_BYTES) {
return NextResponse.json({ error: '升级包不能超过 300MB' }, { status: 400 });
}
if (!isAllowedArchiveName(file.name)) {
return NextResponse.json({ error: '仅支持 .tar、.tar.gz、.tgz 升级包' }, { status: 400 });
}
const stateRoot = getUpgradeStateRoot();
const jobId = `${new Date().toISOString().replace(/[-:.TZ]/g, '').slice(0, 14)}-${randomUUID().slice(0, 8)}`;
const jobDir = path.join(stateRoot, 'jobs', jobId);
const uploadDir = path.join(jobDir, 'upload');
await fs.mkdir(uploadDir, { recursive: true, mode: 0o700 });
const safeName = sanitizeFileName(file.name);
const packagePath = path.join(uploadDir, safeName);
const bytes = Buffer.from(await file.arrayBuffer());
await fs.writeFile(packagePath, bytes, { mode: 0o600 });
const now = new Date().toISOString();
const initialState: UpgradeJobState = {
id: jobId,
mode,
status: 'queued',
step: 'queued',
message: '升级包已上传,等待执行',
progress: 0,
packageName: file.name,
packageHash: createHash('sha256').update(bytes).digest('hex'),
startedAt: now,
updatedAt: now,
logs: [`[${now}] 上传升级包 ${file.name} (${file.size} bytes)`],
dryRun,
};
if (dryRun) {
initialState.message = '升级包已上传,正在执行预检';
}
await writeState(jobDir, initialState);
const runnerArgs = [
path.join(process.cwd(), 'scripts/admin-upgrade-runner.mjs'),
'--job-id',
jobId,
'--mode',
mode,
'--package',
packagePath,
'--package-name',
file.name,
'--project',
process.cwd(),
];
if (dryRun) runnerArgs.push('--dry-run', 'true');
const child = spawn(process.execPath, runnerArgs, {
cwd: process.cwd(),
detached: true,
stdio: 'ignore',
env: {
...process.env,
UPGRADE_STATE_DIR: stateRoot,
COREPACK_HOME: process.env.COREPACK_HOME || '/tmp/corepack',
},
});
child.unref();
return NextResponse.json({ success: true, dryRun, job: initialState });
} catch (error) {
console.error('[admin/upgrade] failed to start upgrade:', error);
return NextResponse.json({ error: error instanceof Error ? error.message : '创建升级任务失败' }, { status: 500 });
}
}
function getUpgradeStateRoot(): string {
const configured = process.env.UPGRADE_STATE_DIR;
if (configured) return path.resolve(configured);
if (process.env.LOCAL_STORAGE_DIR) return path.join(path.dirname(process.env.LOCAL_STORAGE_DIR), 'upgrade');
return path.join(process.cwd(), 'upgrade-state');
}
async function readStates(): Promise<UpgradeJobState[]> {
const jobsRoot = path.join(getUpgradeStateRoot(), 'jobs');
let jobNames: string[] = [];
try {
jobNames = await fs.readdir(jobsRoot);
} catch {
return [];
}
const states = await Promise.all(
jobNames.map(async jobName => {
try {
const statePath = path.join(jobsRoot, jobName, 'state.json');
const raw = await fs.readFile(statePath, 'utf8');
return JSON.parse(raw) as UpgradeJobState;
} catch {
return null;
}
}),
);
return states
.filter((job): job is UpgradeJobState => Boolean(job))
.sort((a, b) => new Date(b.updatedAt).getTime() - new Date(a.updatedAt).getTime());
}
async function writeState(jobDir: string, state: UpgradeJobState): Promise<void> {
await fs.mkdir(jobDir, { recursive: true, mode: 0o700 });
await fs.writeFile(path.join(jobDir, 'state.json'), `${JSON.stringify(state, null, 2)}\n`, { mode: 0o600 });
}
function isAllowedArchiveName(name: string): boolean {
return name.endsWith('.tar') || name.endsWith('.tar.gz') || name.endsWith('.tgz');
}
function sanitizeFileName(name: string): string {
const baseName = path.basename(name).replace(/[^a-zA-Z0-9._-]/g, '_');
return baseName || 'upgrade-package.tar.gz';
}

View File

@@ -0,0 +1,74 @@
import { NextRequest, NextResponse } from 'next/server';
import { getDbClient } from '@/storage/database/local-db';
import { requireAdmin } from '@/lib/admin-auth';
import { deleteAdminUser, listAdminUsers, updateAdminUser } from '@/lib/admin-users-service';
function getTokenUserId(request: NextRequest): string | null {
const header = request.headers.get('authorization') || '';
const token = header.replace(/^Bearer\s+/i, '').trim();
const match = token.match(/^token-[a-z_]+-([0-9a-fA-F-]{36})-\d+$/);
return match?.[1] || null;
}
export async function GET(request: NextRequest) {
const authError = await requireAdmin(request);
if (authError) return authError;
try {
const client = await getDbClient();
try {
const params = request.nextUrl.searchParams;
const result = await listAdminUsers(client, {
search: params.get('search') || params.get('q') || '',
page: Number(params.get('page') || '1'),
pageSize: Number(params.get('pageSize') || params.get('limit') || '20'),
});
return NextResponse.json(result);
} finally {
client.release();
}
} catch (err) {
console.error('[admin/users] GET error:', err);
return NextResponse.json({ error: '获取用户列表失败' }, { status: 500 });
}
}
export async function PUT(request: NextRequest) {
const authError = await requireAdmin(request);
if (authError) return authError;
try {
const body = await request.json();
const client = await getDbClient();
try {
const result = await updateAdminUser(client, body);
return NextResponse.json(result.body, { status: result.status });
} finally {
client.release();
}
} catch (err) {
console.error('[admin/users] PUT error:', err);
return NextResponse.json({ error: '服务器错误' }, { status: 500 });
}
}
export async function DELETE(request: NextRequest) {
const authError = await requireAdmin(request);
if (authError) return authError;
try {
const body = await request.json().catch(() => ({}));
const userId = body.userId || body.id || request.nextUrl.searchParams.get('userId') || request.nextUrl.searchParams.get('id');
const client = await getDbClient();
try {
const result = await deleteAdminUser(client, String(userId || ''), getTokenUserId(request));
return NextResponse.json(result.body, { status: result.status });
} finally {
client.release();
}
} catch (err) {
console.error('[admin/users] DELETE error:', err);
return NextResponse.json({ error: '删除用户失败' }, { status: 500 });
}
}

View File

@@ -0,0 +1,124 @@
import { NextRequest, NextResponse } from 'next/server';
import { getDbClient } from '@/storage/database/local-db';
import { requireAdmin } from '@/lib/admin-auth';
function toPublicAnnouncement(row: Record<string, unknown>) {
const startsAt = row.starts_at ?? row.start_date ?? null;
const expiresAt = row.expires_at ?? row.end_date ?? null;
const isActive = row.is_active ?? row.enabled ?? true;
return {
...row,
enabled: isActive !== false,
start_date: startsAt,
end_date: expiresAt,
is_active: isActive !== false,
starts_at: startsAt,
expires_at: expiresAt,
};
}
export async function GET() {
try {
const client = await getDbClient();
try {
const result = await client.query('SELECT * FROM announcements ORDER BY created_at DESC');
return NextResponse.json((result.rows || []).map(toPublicAnnouncement));
} finally {
client.release();
}
} catch {
return NextResponse.json([]);
}
}
export async function POST(request: NextRequest) {
const authError = await requireAdmin(request);
if (authError) return authError;
try {
const body = await request.json();
const { title, content, startDate, endDate, enabled } = body;
if (!title || !content || !startDate || !endDate) {
return NextResponse.json({ error: '请填写完整公告信息' }, { status: 400 });
}
const client = await getDbClient();
try {
const id = crypto.randomUUID();
await client.query(
'INSERT INTO announcements (id, title, content, is_active, starts_at, expires_at) VALUES ($1, $2, $3, $4, $5, $6)',
[id, title, content, enabled !== false, new Date(startDate).toISOString(), new Date(endDate).toISOString()]
);
return NextResponse.json({ id, success: true });
} finally {
client.release();
}
} catch (err) {
console.error('[announcements] POST error:', err);
return NextResponse.json({ error: '创建公告失败' }, { status: 500 });
}
}
export async function PUT(request: NextRequest) {
const authError = await requireAdmin(request);
if (authError) return authError;
try {
const body = await request.json();
const { id, title, content, startDate, endDate, enabled } = body;
if (!id) {
return NextResponse.json({ error: '缺少公告ID' }, { status: 400 });
}
const client = await getDbClient();
try {
const updates: string[] = [];
const params: unknown[] = [];
let paramIdx = 1;
if (title !== undefined) { updates.push(`title = $${paramIdx++}`); params.push(title); }
if (content !== undefined) { updates.push(`content = $${paramIdx++}`); params.push(content); }
if (startDate !== undefined) { updates.push(`starts_at = $${paramIdx++}`); params.push(new Date(startDate).toISOString()); }
if (endDate !== undefined) { updates.push(`expires_at = $${paramIdx++}`); params.push(new Date(endDate).toISOString()); }
if (enabled !== undefined) { updates.push(`is_active = $${paramIdx++}`); params.push(enabled); }
updates.push(`updated_at = NOW()`);
params.push(id);
await client.query(`UPDATE announcements SET ${updates.join(', ')} WHERE id = $${paramIdx}`, params);
return NextResponse.json({ success: true });
} finally {
client.release();
}
} catch (err) {
console.error('[announcements] PUT error:', err);
return NextResponse.json({ error: '更新公告失败' }, { status: 500 });
}
}
export async function DELETE(request: NextRequest) {
const authError = await requireAdmin(request);
if (authError) return authError;
try {
const { searchParams } = new URL(request.url);
const id = searchParams.get('id');
if (!id) {
return NextResponse.json({ error: '缺少公告ID' }, { status: 400 });
}
const client = await getDbClient();
try {
await client.query('DELETE FROM announcements WHERE id = $1', [id]);
return NextResponse.json({ success: true });
} finally {
client.release();
}
} catch (err) {
console.error('[announcements] DELETE error:', err);
return NextResponse.json({ error: '删除公告失败' }, { status: 500 });
}
}

View File

@@ -0,0 +1,78 @@
import { NextResponse } from 'next/server';
import { getDbClient } from '@/storage/database/local-db';
import { ensureEmailSchema } from '@/lib/email-service';
import { getRequiredProductionSecret, isProductionRuntime } from '@/lib/runtime-env';
import { ensureProfilePreferenceSchema } from '@/lib/profile-preferences';
const ADMIN_EMAIL = 'admin@miaojing.ai';
export async function GET() {
try {
const client = await getDbClient();
try {
await ensureEmailSchema(client);
await ensureProfilePreferenceSchema(client);
const result = await client.query(
'SELECT id, nickname FROM profiles WHERE role = $1 LIMIT 1',
['admin']
);
if (result.rows.length > 0) {
return NextResponse.json({ exists: true, nickname: result.rows[0].nickname });
}
if (isProductionRuntime()) {
return NextResponse.json({ exists: false, autoCreated: false });
}
getRequiredProductionSecret('ADMIN_DEFAULT_PASSWORD', 'admin123');
// Development only: bootstrap the default admin profile.
const userId = crypto.randomUUID();
await client.query(
'INSERT INTO auth.users (id, email, created_at) VALUES ($1, $2, NOW())',
[userId, ADMIN_EMAIL]
);
await client.query(
`INSERT INTO profiles (
id, email, nickname, role, membership_tier, credits_balance,
daily_quota_limit, daily_quota_used, is_active, email_verified,
email_verified_at, email_bound_at
)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, true, NOW(), NOW())
ON CONFLICT (id) DO UPDATE SET
role = $4,
membership_tier = $5,
credits_balance = $6,
daily_quota_limit = $7,
nickname = $3,
email_verified = true,
email_verified_at = COALESCE(profiles.email_verified_at, NOW()),
email_bound_at = COALESCE(profiles.email_bound_at, NOW())`,
[userId, ADMIN_EMAIL, '管理员', 'admin', 'enterprise', 9999, 999, 0, true]
);
try {
await client.query(
'INSERT INTO credit_transactions (user_id, amount, balance_after, type, description) VALUES ($1, $2, $3, $4, $5)',
[userId, 9999, 9999, 'gift', '管理员初始积分']
);
} catch { /* non-critical */ }
console.log('[admin-exists] Default admin account created: account=admin, password=***');
return NextResponse.json({
exists: true,
autoCreated: true,
nickname: '管理员',
});
} finally {
client.release();
}
} catch (err) {
console.error('[admin-exists] Error:', err);
return NextResponse.json({ exists: false, error: '数据库连接失败' });
}
}

View File

@@ -0,0 +1,146 @@
import { NextRequest, NextResponse } from 'next/server';
import { buildCustomApiHeaders, fetchWithRetry, parseCustomApiError } from '@/lib/custom-api-fetch';
interface FetchModelsRequest {
apiUrl: string;
apiKey: string;
provider: string;
}
export async function POST(request: NextRequest) {
try {
const body = await request.json();
const { apiUrl, apiKey, provider } = body as FetchModelsRequest;
if (!apiUrl || !apiKey) {
return NextResponse.json(
{ success: false, error: '请填写 API 请求地址和 API Key' },
{ status: 400 }
);
}
// Derive the base URL from the apiUrl
const baseUrl = apiUrl.replace(/\/images\/generations.*/, '').replace(/\/videos\/generations.*/, '').replace(/\/chat\/completions.*/, '').replace(/\/+$/, '');
const modelsUrl = `${baseUrl}/models`;
let response: Response;
try {
response = await fetchWithRetry(
modelsUrl,
{
method: 'GET',
headers: buildCustomApiHeaders(apiKey),
},
15_000,
0, // no retry
);
} catch (fetchError: unknown) {
const msg = fetchError instanceof Error ? fetchError.message : '请求失败';
return NextResponse.json({
success: false,
error: `网络错误: ${msg}`,
suggestion: '请检查 API 地址是否正确、网络是否可达',
});
}
if (response.ok) {
try {
const data = await response.json();
if (Array.isArray(data.data)) {
const models = data.data.map((m: Record<string, unknown>) => ({
id: typeof m.id === 'string' ? m.id : '',
name: typeof m.name === 'string' ? m.name : '',
description: typeof m.description === 'string' ? m.description : '',
provider: provider,
})).filter((m: { id: string }) => m.id);
return NextResponse.json({
success: true,
models: models,
message: `成功获取 ${models.length} 个模型`,
});
} else {
return NextResponse.json({
success: false,
error: 'API 返回的数据格式不正确',
suggestion: '请检查 API 地址是否正确,确保它支持 /models 端点',
});
}
} catch (parseError) {
return NextResponse.json({
success: false,
error: '解析模型数据失败',
suggestion: 'API 返回的数据格式可能不正确',
});
}
} else {
const errorText = await response.text().catch(() => '');
const isHtml = errorText.trim().startsWith('<!') || errorText.trim().startsWith('<html') || errorText.trim().startsWith('<HTML');
const parsed = isHtml
? { error: parseCustomApiError(response.status, errorText), suggestion: '' }
: parseApiError(response.status, errorText);
return NextResponse.json({
success: false,
error: parsed.error,
statusCode: response.status,
suggestion: parsed.suggestion || getDiagnosticSuggestion(response.status, isHtml),
});
}
} catch (error: unknown) {
const message = error instanceof Error ? error.message : '获取模型列表失败';
return NextResponse.json({ success: false, error: message }, { status: 500 });
}
}
/**
* Get diagnostic suggestion based on response status and content type
*/
function getDiagnosticSuggestion(statusCode: number, isHtml: boolean): string {
if (isHtml) {
if (statusCode === 502 || statusCode === 503 || statusCode === 504) {
return 'API 代理(如 Cloudflare返回错误。你的 API 在本地可用但部署环境不可用时,通常是代理防火墙拦截了服务器请求。建议:①检查 API 代理的 WAF/防火墙设置 ②将服务器 IP 加入白名单 ③尝试使用 API 的直连地址(绕过 Cloudflare';
}
if (statusCode === 403) {
return '代理防火墙拦截了请求。建议:①检查 Cloudflare WAF 规则 ②将服务器 IP 加入白名单 ③使用 API 的直连地址';
}
return 'API 返回了错误页面而非 JSON 响应,可能是代理防火墙拦截。建议使用 API 的直连地址(绕过 CDN/代理)';
}
const suggestions: Record<number, string> = {
401: 'API Key 无效或已过期,请检查密钥是否正确',
403: '账户无权限访问该模型,请检查账户状态',
404: 'API 地址不正确,请确认完整的请求端点 URL',
429: '请求频率过高或账户余额不足',
500: 'API 服务端内部错误,请稍后重试',
502: 'API 网关错误。可能原因①API 服务端宕机 ②代理防火墙拦截了服务器 IP',
503: '服务暂不可用。可能原因:①账户余额不足 ②服务维护中 ③代理限制了服务器IP',
};
return suggestions[statusCode] || '';
}
/**
* Parse common API error status codes and bodies into user-friendly messages
*/
function parseApiError(statusCode: number, errorBody: string): { error: string; suggestion: string } {
// Delegate HTML detection to shared utility
const friendlyError = parseCustomApiError(statusCode, errorBody);
const suggestions: Record<number, string> = {
401: 'API Key 无效或已过期,请检查密钥是否正确',
403: '账户无权限访问该模型,请检查账户状态',
404: 'API 地址不正确,请确认完整的请求端点 URL',
429: '请求频率过高或账户余额不足',
500: 'API 服务端内部错误,请稍后重试',
502: 'API 网关错误。可能原因①API 服务端宕机 ②代理防火墙拦截了服务器 IP',
503: '服务暂不可用。可能原因:①账户余额不足 ②服务维护中 ③代理限制了服务器IP',
};
return {
error: friendlyError,
suggestion: suggestions[statusCode] || '',
};
}

View File

@@ -0,0 +1,290 @@
import { NextRequest, NextResponse } from 'next/server';
import { getDbClient } from '@/storage/database/local-db';
import { ensureEmailSchema } from '@/lib/email-service';
import { createSessionToken } from '@/lib/session-auth';
import { getRequiredProductionSecret } from '@/lib/runtime-env';
import { writePlatformLog } from '@/lib/platform-logs';
import { ensureProfilePreferenceSchema, normalizePreferredTheme } from '@/lib/profile-preferences';
function normalizeRoleForTier(role: string | null | undefined, tier: string | null | undefined): string {
const currentRole = role || 'user';
if (currentRole === 'admin' || currentRole === 'enterprise_admin') return currentRole;
return tier && tier !== 'free' ? 'vip' : currentRole === 'vip' ? 'user' : currentRole;
}
async function verifyPasswordHash(client: Awaited<ReturnType<typeof getDbClient>>, passwordHash: string, password: string): Promise<boolean> {
const result = await client.query(
'SELECT $1::text = crypt($2::text, $1::text) AS ok',
[passwordHash, password]
);
return result.rows[0]?.ok === true;
}
export async function POST(request: NextRequest) {
try {
const body = await request.json();
const { email: rawEmail, account, phone: rawPhone, password, adminOnly } = body;
const identifier = account || rawEmail || rawPhone;
if (!identifier || !password) {
return NextResponse.json({ error: 'Please enter account and password' }, { status: 400 });
}
const client = await getDbClient();
try {
await ensureEmailSchema(client);
await ensureProfilePreferenceSchema(client);
let loginEmail = identifier;
let userId = '';
let userRole = 'user';
let userNickname = '';
let userMembershipTier = 'free';
let userCreditsBalance = 0;
let userDailyQuotaUsed = 0;
let userDailyQuotaLimit = 5;
let userAvatarUrl: string | null = null;
let userPhone: string | null = null;
let userCreatedAt: string | null = null;
let userEmailVerified = false;
let userEmailVerifiedAt: string | null = null;
let userPreferredTheme: 'dark' | 'light' = 'dark';
const isEmailFormat = /^[^\s@]+@[^\s@]+\.[^\s@]+$/.test(identifier);
let isAdminAccount = false;
let adminProfileId: string | null = null;
if (!isEmailFormat) {
const adminLookup = await client.query(
"SELECT id, email, nickname, role FROM profiles WHERE (nickname = $1 OR phone = $1) AND role = 'admin' LIMIT 1",
[identifier]
);
if (adminLookup.rows.length > 0) {
isAdminAccount = true;
adminProfileId = adminLookup.rows[0].id;
loginEmail = adminLookup.rows[0].email;
userNickname = adminLookup.rows[0].nickname || '';
} else {
const nicknameLower = String(identifier).toLowerCase();
if (nicknameLower === 'admin' || nicknameLower.startsWith('admin')) {
const anyLookup = await client.query(
"SELECT id, email, nickname, role FROM profiles WHERE role = 'admin' ORDER BY created_at ASC LIMIT 1"
);
if (anyLookup.rows.length > 0) {
isAdminAccount = true;
adminProfileId = anyLookup.rows[0].id;
loginEmail = anyLookup.rows[0].email;
userNickname = anyLookup.rows[0].nickname || '';
}
}
}
} else {
const adminLookup = await client.query(
"SELECT id, email, nickname, role FROM profiles WHERE email = $1 AND role = 'admin' LIMIT 1",
[identifier]
);
if (adminLookup.rows.length > 0) {
isAdminAccount = true;
adminProfileId = adminLookup.rows[0].id;
loginEmail = identifier;
userNickname = adminLookup.rows[0].nickname || '';
}
}
if (isAdminAccount) {
const authResult = await client.query(
'SELECT id, email, created_at, password_hash FROM auth.users WHERE email = $1',
[loginEmail]
);
if (authResult.rows.length > 0 && authResult.rows[0].password_hash) {
const passwordOk = await verifyPasswordHash(client, authResult.rows[0].password_hash, password);
if (!passwordOk) {
return NextResponse.json({ error: 'Invalid admin password' }, { status: 401 });
}
} else if (password !== getRequiredProductionSecret('ADMIN_DEFAULT_PASSWORD', 'admin123')) {
return NextResponse.json({ error: 'Invalid admin password' }, { status: 401 });
}
userRole = 'admin';
userMembershipTier = 'enterprise';
userCreditsBalance = 9999;
userDailyQuotaLimit = 999;
userNickname = userNickname || '管理员';
userEmailVerified = true;
userEmailVerifiedAt = new Date().toISOString();
if (authResult.rows.length > 0) {
userId = authResult.rows[0].id;
userCreatedAt = authResult.rows[0].created_at;
} else if (adminProfileId) {
userId = adminProfileId;
await client.query(
'INSERT INTO auth.users (id, email, created_at) VALUES ($1, $2, NOW()) ON CONFLICT (id) DO NOTHING',
[userId, loginEmail]
);
userCreatedAt = new Date().toISOString();
} else {
userId = crypto.randomUUID();
await client.query(
'INSERT INTO auth.users (id, email, created_at) VALUES ($1, $2, NOW())',
[userId, loginEmail]
);
userCreatedAt = new Date().toISOString();
}
await client.query(
`INSERT INTO profiles (id, email, nickname, role, membership_tier, credits_balance, daily_quota_limit, daily_quota_used, is_active)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9)
ON CONFLICT (id) DO UPDATE SET
role = $4,
membership_tier = $5,
credits_balance = $6,
daily_quota_limit = $7,
nickname = $3,
is_active = true,
email_verified = true,
email_verified_at = COALESCE(profiles.email_verified_at, NOW()),
email_bound_at = COALESCE(profiles.email_bound_at, NOW())`,
[userId, loginEmail, userNickname, 'admin', 'enterprise', 9999, 999, 0, true]
);
const adminThemeResult = await client.query(
'SELECT preferred_theme FROM profiles WHERE id = $1 LIMIT 1',
[userId]
);
userPreferredTheme = normalizePreferredTheme(adminThemeResult.rows[0]?.preferred_theme);
if (adminProfileId && adminProfileId !== userId) {
await client.query(
'UPDATE profiles SET role = $1, membership_tier = $2, credits_balance = $3, daily_quota_limit = $4 WHERE id = $5',
['admin', 'enterprise', 9999, 999, adminProfileId]
);
}
} else {
if (!isEmailFormat) {
const profileResult = await client.query(
'SELECT id, email, nickname, phone, role FROM profiles WHERE nickname = $1 OR phone = $1 LIMIT 1',
[identifier]
);
if (profileResult.rows.length > 0) {
const profile = profileResult.rows[0];
loginEmail = profile.email;
userId = profile.id;
userRole = profile.role || 'user';
userNickname = profile.nickname;
userPhone = profile.phone;
} else {
return NextResponse.json({ error: 'Account does not exist' }, { status: 401 });
}
}
const authResult = await client.query(
'SELECT id, email, created_at, password_hash FROM auth.users WHERE email = $1',
[loginEmail]
);
if (authResult.rows.length === 0) {
return NextResponse.json({ error: 'Account does not exist' }, { status: 401 });
}
const authUser = authResult.rows[0];
if (authUser.password_hash) {
const passwordOk = await verifyPasswordHash(client, authUser.password_hash, password);
if (!passwordOk) {
return NextResponse.json({ error: 'Invalid password' }, { status: 401 });
}
} else {
return NextResponse.json({ error: '该账号缺少密码凭据,请联系管理员重置密码后再登录' }, { status: 401 });
}
userId = authUser.id;
userCreatedAt = authUser.created_at;
const profileResult = await client.query(
'SELECT nickname, role, membership_tier, credits_balance, daily_quota_used, daily_quota_limit, avatar_url, phone, email_verified, email_verified_at, preferred_theme FROM profiles WHERE id = $1',
[userId]
);
if (profileResult.rows.length > 0) {
const profile = profileResult.rows[0];
userNickname = profile.nickname || loginEmail.split('@')[0];
userMembershipTier = profile.membership_tier || 'free';
userRole = normalizeRoleForTier(profile.role, userMembershipTier);
userCreditsBalance = profile.credits_balance || 0;
userDailyQuotaUsed = profile.daily_quota_used || 0;
userDailyQuotaLimit = profile.daily_quota_limit || 5;
userAvatarUrl = profile.avatar_url || null;
userPhone = profile.phone || null;
userEmailVerified = profile.email_verified === true;
userEmailVerifiedAt = profile.email_verified_at || null;
userPreferredTheme = normalizePreferredTheme(profile.preferred_theme);
if (userRole !== (profile.role || 'user')) {
await client.query('UPDATE profiles SET role = $1, updated_at = NOW() WHERE id = $2', [userRole, userId]);
}
} else {
userNickname = loginEmail.split('@')[0];
await client.query(
`INSERT INTO profiles (id, email, nickname, role, membership_tier, credits_balance, daily_quota_used, daily_quota_limit)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8)
ON CONFLICT (id) DO UPDATE SET email = $2, nickname = $3, email_verified = false, email_verified_at = NULL`,
[userId, loginEmail, userNickname, userRole, userMembershipTier, userCreditsBalance, userDailyQuotaUsed, userDailyQuotaLimit]
);
}
}
if (adminOnly === true && userRole !== 'admin' && userRole !== 'enterprise_admin') {
void writePlatformLog({
type: 'security',
level: 'warning',
action: 'console_login_denied',
message: '非管理员账号尝试登录管理后台被拒绝',
userId,
userName: userNickname,
userEmail: loginEmail,
request,
});
return NextResponse.json({ error: 'Only administrators can log in to the console' }, { status: 403 });
}
const accessToken = createSessionToken(userId, userRole);
void writePlatformLog({
type: 'auth',
level: 'info',
action: adminOnly === true ? 'console_login_success' : 'user_login_success',
message: adminOnly === true ? '管理员登录管理后台成功' : '用户登录成功',
userId,
userName: userNickname,
userEmail: loginEmail,
request,
});
return NextResponse.json({
user: {
id: userId,
email: loginEmail,
nickname: userNickname,
role: userRole,
membership_tier: userMembershipTier,
credits_balance: userCreditsBalance,
daily_quota_used: userDailyQuotaUsed,
daily_quota_limit: userDailyQuotaLimit,
avatar_url: userAvatarUrl,
phone: userPhone,
created_at: userCreatedAt,
email_verified: userEmailVerified,
email_verified_at: userEmailVerifiedAt,
preferred_theme: userPreferredTheme,
},
session: { access_token: accessToken },
});
} finally {
client.release();
}
} catch (error: unknown) {
const message = error instanceof Error ? error.message : 'Login failed';
console.error('[Login Error]', message);
return NextResponse.json({ error: message }, { status: 500 });
}
}

View File

@@ -0,0 +1,175 @@
import { NextRequest, NextResponse } from 'next/server';
import { getDbClient } from '@/storage/database/local-db';
import { ensureEmailSchema, getRequestBaseUrl, normalizeEmail, sendTemplatedEmail, verifyEmailCode } from '@/lib/email-service';
import { getRequiredProductionSecret } from '@/lib/runtime-env';
import { ensureProfilePreferenceSchema } from '@/lib/profile-preferences';
function isStrongPassword(password: string): boolean {
return password.length >= 8 && /[A-Za-z]/.test(password) && /\d/.test(password);
}
export async function POST(request: NextRequest) {
try {
const { email, password, nickname, phone, inviteCode, emailCode, acceptedTerms } = await request.json();
const normalizedEmail = normalizeEmail(email);
if (!normalizedEmail || !password) {
return NextResponse.json({ error: 'Please enter email and password' }, { status: 400 });
}
if (acceptedTerms !== true) {
return NextResponse.json({ error: '请先阅读并同意服务条款和隐私政策' }, { status: 400 });
}
if (!isStrongPassword(password)) {
return NextResponse.json({ error: '密码至少 8 位,并同时包含字母和数字' }, { status: 400 });
}
const isAdminRegistration = typeof inviteCode === 'string'
&& inviteCode === getRequiredProductionSecret('ADMIN_INVITE_CODE', 'miaojing-admin-2024');
const client = await getDbClient();
try {
await ensureEmailSchema(client);
await ensureProfilePreferenceSchema(client);
if (isAdminRegistration) {
const existingAdminResult = await client.query(
'SELECT id FROM profiles WHERE role = $1',
['admin']
);
if (existingAdminResult.rows.length > 0) {
return NextResponse.json(
{ error: 'Admin account already exists' },
{ status: 400 }
);
}
}
const existingUserResult = await client.query(
'SELECT id FROM profiles WHERE email = $1',
[normalizedEmail]
);
if (existingUserResult.rows.length > 0) {
return NextResponse.json(
{ error: 'Email is already registered' },
{ status: 400 }
);
}
const userId = crypto.randomUUID();
if (!isAdminRegistration) {
if (typeof emailCode !== 'string' || !/^[a-z0-9]{4,10}$/i.test(emailCode)) {
return NextResponse.json({ error: '请输入正确的邮箱验证码' }, { status: 400 });
}
await client.query('BEGIN');
try {
await verifyEmailCode(client, {
email: normalizedEmail,
type: 'register',
code: typeof emailCode === 'string' ? emailCode : '',
});
await client.query('COMMIT');
} catch (error) {
await client.query('ROLLBACK');
throw error;
}
}
await client.query(
`INSERT INTO auth.users (id, email, password_hash, created_at)
VALUES ($1, $2, crypt($3, gen_salt('bf')), NOW())`,
[userId, normalizedEmail, password]
);
const role = isAdminRegistration ? 'admin' : 'user';
const membershipTier = isAdminRegistration ? 'enterprise' : 'free';
const creditsBalance = isAdminRegistration ? 9999 : 10;
const dailyQuotaLimit = isAdminRegistration ? 999 : 5;
const displayName = nickname || normalizedEmail.split('@')[0];
await client.query(
`INSERT INTO profiles (
id, email, nickname, phone, role, membership_tier, credits_balance,
daily_quota_limit, daily_quota_used, is_active, email_verified,
email_verified_at, email_bound_at, email_sender_domain
)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, CASE WHEN $11 THEN NOW() ELSE NULL END, CASE WHEN $11 THEN NOW() ELSE NULL END, $12)
ON CONFLICT (id) DO UPDATE SET
email = EXCLUDED.email,
nickname = EXCLUDED.nickname,
phone = EXCLUDED.phone,
role = EXCLUDED.role,
membership_tier = EXCLUDED.membership_tier,
credits_balance = EXCLUDED.credits_balance,
daily_quota_limit = EXCLUDED.daily_quota_limit,
daily_quota_used = EXCLUDED.daily_quota_used,
is_active = EXCLUDED.is_active,
email_verified = EXCLUDED.email_verified,
email_verified_at = EXCLUDED.email_verified_at,
email_bound_at = EXCLUDED.email_bound_at,
email_sender_domain = EXCLUDED.email_sender_domain`,
[
userId,
normalizedEmail,
displayName,
phone || null,
role,
membershipTier,
creditsBalance,
dailyQuotaLimit,
0,
true,
true,
normalizedEmail.split('@')[1] || null,
]
);
try {
await client.query(
'INSERT INTO credit_transactions (user_id, amount, balance_after, type, description) VALUES ($1, $2, $3, $4, $5)',
[userId, creditsBalance, creditsBalance, 'gift', isAdminRegistration ? 'Admin initial credits' : 'New user registration bonus']
);
} catch {
// Ignore credit transaction errors.
}
await sendTemplatedEmail(client, {
to: normalizedEmail,
type: 'register_success',
subject: '【妙境】注册成功',
title: '注册成功',
intro: isAdminRegistration ? '管理员账号已创建成功。' : '你的妙境账号已注册成功,邮箱也已完成验证。',
note: '若非本人操作,请尽快联系管理员。',
assetBaseUrl: getRequestBaseUrl(request) || undefined,
}).catch(() => undefined);
return NextResponse.json({
user: {
id: userId,
email: normalizedEmail,
nickname: displayName,
role,
membership_tier: membershipTier,
credits_balance: creditsBalance,
daily_quota_used: 0,
daily_quota_limit: dailyQuotaLimit,
avatar_url: null,
phone: phone || null,
email_verified: true,
email_verified_at: new Date().toISOString(),
preferred_theme: 'dark',
},
message: isAdminRegistration ? 'Admin account registered' : 'Registration successful',
});
} finally {
client.release();
}
} catch (error: unknown) {
const message = error instanceof Error ? error.message : 'Registration failed';
console.error('[Register Error]', message);
return NextResponse.json({ error: message }, { status: 500 });
}
}

View File

@@ -0,0 +1,215 @@
import { NextRequest, NextResponse } from 'next/server';
import { buildCustomApiHeaders, fetchWithRetry, parseCustomApiError } from '@/lib/custom-api-fetch';
interface TestApiRequest {
apiUrl: string;
apiKey: string;
modelName: string;
provider: string;
}
export async function POST(request: NextRequest) {
try {
const body = await request.json();
const { apiUrl, apiKey, modelName, provider } = body as TestApiRequest;
if (!apiUrl || !apiKey) {
return NextResponse.json(
{ success: false, error: '请填写 API 请求地址和 API Key' },
{ status: 400 }
);
}
// ---- Step 1: Quick connectivity check with a lightweight request ----
// Try the /models endpoint first (most APIs support this, no cost)
// Derive the base URL from the apiUrl
const baseUrl = apiUrl.replace(/\/images\/generations.*/, '').replace(/\/videos\/generations.*/, '').replace(/\/chat\/completions.*/, '').replace(/\/+$/, '');
const modelsUrl = `${baseUrl}/models`;
let response: Response;
try {
response = await fetchWithRetry(
modelsUrl,
{
method: 'GET',
headers: buildCustomApiHeaders(apiKey),
},
15_000,
0, // no retry for test - keep it fast
);
} catch (fetchError: unknown) {
// If /models fails with timeout or network error, try the actual endpoint
if (fetchError instanceof DOMException && fetchError.name === 'AbortError') {
return await testActualEndpoint(apiUrl, apiKey, modelName || 'gpt-image-2');
}
const msg = fetchError instanceof Error ? fetchError.message : '请求失败';
// Network error - could be DNS, connection refused, or firewall
if (msg.includes('ECONNREFUSED') || msg.includes('ENOTFOUND') || msg.includes('fetch failed')) {
return NextResponse.json({
success: false,
error: `无法连接到 API 地址: ${msg}`,
suggestion: '请检查 API 地址是否正确、服务是否运行。常见原因:①地址拼写错误 ②服务未启动 ③DNS 无法解析',
});
}
return NextResponse.json({
success: false,
error: `网络错误: ${msg}`,
suggestion: '请检查 API 地址是否正确、网络是否可达。如果使用了代理(如 Cloudflare可能代理防火墙拦截了服务器请求',
});
}
// If /models returned successfully, the key is valid
if (response.ok) {
let modelInfo = '';
try {
const data = await response.json();
if (Array.isArray(data.data)) {
const targetModel = modelName || 'gpt-image-2';
const found = data.data.some((m: Record<string, unknown>) =>
typeof m.id === 'string' && m.id.includes(targetModel.replace('gpt-image-2', 'dall'))
);
modelInfo = found ? `,模型 ${modelName || 'gpt-image-2'} 可用` : `,已连接(共 ${data.data.length} 个模型)`;
}
} catch {
// Ignore parse error, connectivity is confirmed
}
return NextResponse.json({
success: true,
message: `连接成功${modelInfo}`,
});
}
// /models returned an error - check if it's HTML (Cloudflare block)
const errorText = await response.text().catch(() => '');
const isHtml = errorText.trim().startsWith('<!') || errorText.trim().startsWith('<html') || errorText.trim().startsWith('<HTML');
if (response.status === 404 && !isHtml) {
// /models not supported (not a Cloudflare error), try actual endpoint
return await testActualEndpoint(apiUrl, apiKey, modelName || 'gpt-image-2');
}
// Auth/permission error or Cloudflare block
const parsed = isHtml
? { error: parseCustomApiError(response.status, errorText), suggestion: '' }
: parseApiError(response.status, errorText);
return NextResponse.json({
success: false,
error: parsed.error,
statusCode: response.status,
suggestion: parsed.suggestion || getDiagnosticSuggestion(response.status, isHtml),
});
} catch (error: unknown) {
const message = error instanceof Error ? error.message : '测试连接失败';
return NextResponse.json({ success: false, error: message }, { status: 500 });
}
}
/**
* Fallback: test by sending a minimal request to the actual generation endpoint
*/
async function testActualEndpoint(apiUrl: string, apiKey: string, modelName: string): Promise<NextResponse> {
try {
const response = await fetchWithRetry(
apiUrl,
{
method: 'POST',
headers: buildCustomApiHeaders(apiKey),
body: JSON.stringify({
model: modelName,
prompt: 'test',
n: 1,
size: '1024x1024',
}),
},
15_000,
0, // no retry for test
);
if (response.ok) {
return NextResponse.json({
success: true,
message: `连接成功,模型 ${modelName} 可用`,
});
}
const errorText = await response.text().catch(() => '');
const parsed = parseApiError(response.status, errorText);
return NextResponse.json({
success: false,
error: parsed.error,
statusCode: response.status,
suggestion: parsed.suggestion,
});
} catch (fetchError: unknown) {
if (fetchError instanceof DOMException && fetchError.name === 'AbortError') {
return NextResponse.json({
success: false,
error: '连接超时15秒请检查 API 地址是否正确',
suggestion: '可能原因①API 地址有误 ②服务响应过慢 ③代理限制了服务器IP访问',
});
}
const msg = fetchError instanceof Error ? fetchError.message : '请求失败';
return NextResponse.json({
success: false,
error: `网络错误: ${msg}`,
suggestion: '请检查 API 地址和网络连通性',
});
}
}
/**
* Get diagnostic suggestion based on response status and content type
*/
function getDiagnosticSuggestion(statusCode: number, isHtml: boolean): string {
if (isHtml) {
if (statusCode === 502 || statusCode === 503 || statusCode === 504) {
return 'API 代理(如 Cloudflare返回错误。你的 API 在本地可用但部署环境不可用时,通常是代理防火墙拦截了服务器请求。建议:①检查 API 代理的 WAF/防火墙设置 ②将服务器 IP 加入白名单 ③尝试使用 API 的直连地址(绕过 Cloudflare';
}
if (statusCode === 403) {
return '代理防火墙拦截了请求。建议:①检查 Cloudflare WAF 规则 ②将服务器 IP 加入白名单 ③使用 API 的直连地址';
}
return 'API 返回了错误页面而非 JSON 响应,可能是代理防火墙拦截。建议使用 API 的直连地址(绕过 CDN/代理)';
}
const suggestions: Record<number, string> = {
401: 'API Key 无效或已过期,请检查密钥是否正确',
403: '账户无权限访问该模型,请检查账户状态',
404: 'API 地址不正确,请确认完整的请求端点 URL',
429: '请求频率过高或账户余额不足',
500: 'API 服务端内部错误,请稍后重试',
502: 'API 网关错误。可能原因①API 服务端宕机 ②代理防火墙拦截了服务器 IP',
503: '服务暂不可用。可能原因:①账户余额不足 ②服务维护中 ③代理限制了服务器IP',
};
return suggestions[statusCode] || '';
}
/**
* Parse common API error status codes and bodies into user-friendly messages
*/
function parseApiError(statusCode: number, errorBody: string): { error: string; suggestion: string } {
// Delegate HTML detection to shared utility
const friendlyError = parseCustomApiError(statusCode, errorBody);
const suggestions: Record<number, string> = {
401: 'API Key 无效或已过期,请检查密钥是否正确',
403: '账户无权限访问该模型,请检查账户状态',
404: 'API 地址不正确,请确认完整的请求端点 URL',
429: '请求频率过高或账户余额不足',
500: 'API 服务端内部错误,请稍后重试',
502: 'API 网关错误。可能原因①API 服务端宕机 ②代理防火墙拦截了服务器 IP',
503: '服务暂不可用。可能原因:①账户余额不足 ②服务维护中 ③代理限制了服务器IP',
};
return {
error: friendlyError,
suggestion: suggestions[statusCode] || '',
};
}

View File

@@ -0,0 +1,60 @@
import { NextRequest, NextResponse } from 'next/server';
import { deleteCanvasProject, getCanvasProject, updateCanvasProject } from '@/lib/canvas-store';
import { getAuthenticatedUserId } from '@/lib/session-auth';
import { normalizeCanvasState } from '@/lib/canvas-store';
type RouteContext = {
params: Promise<{ id: string }>;
};
export async function GET(request: NextRequest, context: RouteContext) {
try {
const userId = await getAuthenticatedUserId(request);
if (!userId) return NextResponse.json({ error: '请先登录' }, { status: 401 });
const { id } = await context.params;
const project = await getCanvasProject(userId, id);
if (!project) return NextResponse.json({ error: '画布不存在' }, { status: 404 });
return NextResponse.json({ project });
} catch (error) {
console.error('[canvas/projects/:id] GET error:', error);
return NextResponse.json({ error: '读取画布项目失败' }, { status: 500 });
}
}
export async function PUT(request: NextRequest, context: RouteContext) {
try {
const userId = await getAuthenticatedUserId(request);
if (!userId) return NextResponse.json({ error: '请先登录' }, { status: 401 });
const { id } = await context.params;
const body = await request.json().catch(() => ({}));
const project = await updateCanvasProject(userId, id, {
title: typeof body.title === 'string' ? body.title : undefined,
state: body.state ? normalizeCanvasState(body.state) : undefined,
});
if (!project) return NextResponse.json({ error: '画布不存在' }, { status: 404 });
return NextResponse.json({ project });
} catch (error) {
console.error('[canvas/projects/:id] PUT error:', error);
return NextResponse.json({ error: '保存画布项目失败' }, { status: 500 });
}
}
export async function DELETE(request: NextRequest, context: RouteContext) {
try {
const userId = await getAuthenticatedUserId(request);
if (!userId) return NextResponse.json({ error: '请先登录' }, { status: 401 });
const { id } = await context.params;
const deleted = await deleteCanvasProject(userId, id);
if (!deleted) return NextResponse.json({ error: '画布不存在' }, { status: 404 });
return NextResponse.json({ ok: true });
} catch (error) {
console.error('[canvas/projects/:id] DELETE error:', error);
return NextResponse.json({ error: '删除画布项目失败' }, { status: 500 });
}
}

View File

@@ -0,0 +1,31 @@
import { NextRequest, NextResponse } from 'next/server';
import { createCanvasProject, listCanvasProjects } from '@/lib/canvas-store';
import { getAuthenticatedUserId } from '@/lib/session-auth';
export async function GET(request: NextRequest) {
try {
const userId = await getAuthenticatedUserId(request);
if (!userId) return NextResponse.json({ error: '请先登录' }, { status: 401 });
const projects = await listCanvasProjects(userId);
return NextResponse.json({ projects });
} catch (error) {
console.error('[canvas/projects] GET error:', error);
return NextResponse.json({ error: '读取画布项目失败' }, { status: 500 });
}
}
export async function POST(request: NextRequest) {
try {
const userId = await getAuthenticatedUserId(request);
if (!userId) return NextResponse.json({ error: '请先登录' }, { status: 401 });
const body = await request.json().catch(() => ({}));
const title = typeof body.title === 'string' ? body.title : '未命名画布';
const project = await createCanvasProject(userId, title);
return NextResponse.json({ project }, { status: 201 });
} catch (error) {
console.error('[canvas/projects] POST error:', error);
return NextResponse.json({ error: '创建画布项目失败' }, { status: 500 });
}
}

View File

@@ -0,0 +1,143 @@
import { NextRequest, NextResponse } from 'next/server';
import { getDbClient } from '@/storage/database/local-db';
import { getAuthenticatedUserId } from '@/lib/session-auth';
function toWorkType(type: string, params: Record<string, unknown>): string {
const explicitMode = params.creationMode || params.workType || params.mode;
if (explicitMode === 'text2img' || explicitMode === 'img2img' || explicitMode === 'text2video' || explicitMode === 'img2video' || explicitMode === 'reverse-prompt') {
return explicitMode;
}
if (type === 'reverse-prompt') return 'reverse-prompt';
const hasReference = Boolean(params.referenceImage)
|| (Array.isArray(params.referenceImages) && params.referenceImages.length > 0)
|| Number(params.refImageCount || 0) > 0;
if (type === 'video') return hasReference ? 'img2video' : 'text2video';
return hasReference ? 'img2img' : 'text2img';
}
function fromWorkType(type: string): 'image' | 'video' | 'reverse-prompt' {
if (type === 'reverse-prompt') return 'reverse-prompt';
return type.includes('video') ? 'video' : 'image';
}
function mapWork(row: Record<string, unknown>) {
const params = (row.params || {}) as Record<string, unknown>;
return {
id: row.id,
type: fromWorkType(String(row.type || 'text2img')),
url: row.result_url,
prompt: row.prompt || '',
negativePrompt: row.negative_prompt || undefined,
model: params.model || '',
modelLabel: params.modelLabel || params.model || '',
isCustomModel: Boolean(params.isCustomModel),
params,
referenceImage: params.referenceImage,
referenceImages: Array.isArray(params.referenceImages)
? params.referenceImages
: params.referenceImage
? [params.referenceImage]
: undefined,
published: row.is_public === true,
createdAt: row.created_at,
};
}
export async function GET(request: NextRequest) {
const userId = await getAuthenticatedUserId(request);
if (!userId) return NextResponse.json({ error: '请先登录' }, { status: 401 });
const client = await getDbClient();
try {
const result = await client.query(
`SELECT id, type, prompt, negative_prompt, params, result_url, is_public, status, created_at
FROM works
WHERE user_id = $1 AND status = 'completed'
ORDER BY created_at DESC
LIMIT 300`,
[userId],
);
return NextResponse.json({ records: result.rows.map(mapWork) });
} finally {
client.release();
}
}
export async function POST(request: NextRequest) {
const userId = await getAuthenticatedUserId(request);
if (!userId) return NextResponse.json({ error: '请先登录' }, { status: 401 });
const body = await request.json();
const records = Array.isArray(body.records) ? body.records : [body];
const client = await getDbClient();
try {
await client.query('BEGIN');
const saved = [];
for (const record of records) {
const params = {
...(record.params || {}),
model: record.model || (record.params || {}).model,
modelLabel: record.modelLabel || (record.params || {}).modelLabel,
isCustomModel: Boolean(record.isCustomModel),
referenceImage: record.referenceImage || (record.params || {}).referenceImage,
referenceImages: record.referenceImages || (record.params || {}).referenceImages,
};
const workType = toWorkType(String(record.type || 'image'), params);
let url = String(record.url || '').trim();
if (workType === 'reverse-prompt') {
url = url && !url.startsWith('data:') ? url : `[reverse-prompt:${record.id || Date.now()}]`;
}
if (!url || url.startsWith('data:')) continue;
const existing = await client.query(
`SELECT id, type, prompt, negative_prompt, params, result_url, is_public, status, created_at
FROM works
WHERE user_id = $1 AND result_url = $2
LIMIT 1`,
[userId, url],
);
if (existing.rows[0]) {
saved.push(mapWork(existing.rows[0]));
continue;
}
const result = await client.query(
`INSERT INTO works (user_id, type, prompt, negative_prompt, params, result_url, is_public, status, credits_cost, created_at)
VALUES ($1, $2, $3, $4, $5::jsonb, $6, $7, 'completed', $8, COALESCE($9::timestamptz, NOW()))
RETURNING id, type, prompt, negative_prompt, params, result_url, is_public, status, created_at`,
[
userId,
workType,
record.prompt || '',
record.negativePrompt || null,
JSON.stringify(params),
url,
Boolean(record.published),
Number(record.creditsCost || 0),
record.createdAt || null,
],
);
if (result.rows[0]) saved.push(mapWork(result.rows[0]));
}
await client.query('COMMIT');
return NextResponse.json({ records: saved });
} catch (error) {
await client.query('ROLLBACK');
throw error;
} finally {
client.release();
}
}
export async function DELETE(request: NextRequest) {
const userId = await getAuthenticatedUserId(request);
if (!userId) return NextResponse.json({ error: '请先登录' }, { status: 401 });
const id = request.nextUrl.searchParams.get('id');
const client = await getDbClient();
try {
if (id) {
await client.query('DELETE FROM works WHERE id = $1 AND user_id = $2', [id, userId]);
} else {
await client.query('DELETE FROM works WHERE user_id = $1', [userId]);
}
return NextResponse.json({ success: true });
} finally {
client.release();
}
}

View File

@@ -0,0 +1,158 @@
import path from 'path';
import { NextRequest, NextResponse } from 'next/server';
import { localStorage } from '@/lib/local-storage';
import { fetchPublicHttpUrl } from '@/lib/remote-fetch';
/**
* Download proxy.
*
* Supports:
* - remote http(s) URLs, fetched server-side to avoid browser CORS failures
* - same-origin relative URLs
* - local-storage URLs, read directly from disk with path traversal protection
*/
export async function GET(request: NextRequest) {
const url = request.nextUrl.searchParams.get('url');
const filename = sanitizeFilename(
request.nextUrl.searchParams.get('filename') || 'download',
);
if (!url) {
return NextResponse.json({ error: '缺少 url 参数' }, { status: 400 });
}
try {
const localKey = getLocalStorageKey(url);
if (localKey) {
return downloadLocalStorageFile(localKey, filename);
}
const targetUrl = resolveDownloadUrl(url, request.nextUrl.origin);
if (!targetUrl) {
return NextResponse.json(
{ error: '仅支持 HTTP(S) URL 或站内文件 URL' },
{ status: 400 },
);
}
const response = await fetchPublicHttpUrl(targetUrl, {
signal: AbortSignal.timeout(60_000),
});
if (!response.ok) {
return NextResponse.json(
{ error: `远程文件获取失败: ${response.status}` },
{ status: response.status },
);
}
const contentType = response.headers.get('content-type') || 'application/octet-stream';
const body = await response.arrayBuffer();
return buildDownloadResponse(
body,
contentType,
filename,
body.byteLength,
);
} catch (err) {
const msg = err instanceof Error ? err.message : '下载失败';
console.error('[Download Proxy Error]', msg);
return NextResponse.json({ error: `下载失败: ${msg}` }, { status: 502 });
}
}
function getLocalStorageKey(url: string): string | null {
let pathname = url;
if (url.startsWith('http://') || url.startsWith('https://')) {
try {
pathname = new URL(url).pathname;
} catch {
return null;
}
}
const prefix = '/api/local-storage/';
if (!pathname.startsWith(prefix)) return null;
try {
const key = decodeURIComponent(pathname.slice(prefix.length));
const normalized = path.posix.normalize(key).replace(/^\/+/, '');
if (!normalized || normalized.startsWith('..') || normalized.includes('/../')) {
return null;
}
return normalized;
} catch {
return null;
}
}
function resolveDownloadUrl(url: string, origin: string): string | null {
if (url.startsWith('http://') || url.startsWith('https://')) {
return url;
}
if (url.startsWith('/') && !url.startsWith('//')) {
return `${origin}${url}`;
}
return null;
}
function downloadLocalStorageFile(key: string, filename: string) {
if (!localStorage.fileExists(key)) {
return NextResponse.json({ error: '文件不存在' }, { status: 404 });
}
const fileBuffer = localStorage.readFile(key);
const contentType = getContentType(key);
return buildDownloadResponse(
fileBuffer.buffer.slice(
fileBuffer.byteOffset,
fileBuffer.byteOffset + fileBuffer.byteLength,
) as ArrayBuffer,
contentType,
filename,
fileBuffer.byteLength,
);
}
function buildDownloadResponse(
body: ArrayBuffer,
contentType: string,
filename: string,
length: number,
) {
return new NextResponse(body, {
status: 200,
headers: {
'Content-Type': contentType,
'Content-Disposition': `attachment; filename="${filename}"`,
'Content-Length': String(length),
'Cache-Control': 'no-cache',
},
});
}
function sanitizeFilename(filename: string): string {
return path.basename(filename).replace(/[\r\n"]/g, '_') || 'download';
}
function getContentType(filePath: string): string {
const extension = filePath.split('.').pop()?.toLowerCase();
const contentTypeMap: Record<string, string> = {
jpg: 'image/jpeg',
jpeg: 'image/jpeg',
png: 'image/png',
webp: 'image/webp',
gif: 'image/gif',
mp4: 'video/mp4',
avi: 'video/x-msvideo',
mov: 'video/quicktime',
wmv: 'video/x-ms-wmv',
webm: 'video/webm',
};
return contentTypeMap[extension || ''] || 'application/octet-stream';
}

View File

@@ -0,0 +1,72 @@
import { NextRequest, NextResponse } from 'next/server';
import { ensureEmailSchema, getRequestBaseUrl, isValidEmail, normalizeEmail, sendTemplatedEmail, verifyEmailCode } from '@/lib/email-service';
import { getDbClient } from '@/storage/database/local-db';
export const runtime = 'nodejs';
function passwordStrongEnough(value: string): boolean {
return value.length >= 8 && /[a-zA-Z]/.test(value) && /\d/.test(value);
}
function friendlyError(error: unknown) {
return error instanceof Error ? error.message : '密码重置失败,请稍后再试';
}
export async function POST(request: NextRequest) {
const client = await getDbClient();
try {
await ensureEmailSchema(client);
const body = await request.json();
const email = normalizeEmail(body.email);
const code = typeof body.code === 'string' ? body.code.trim() : '';
const newPassword = typeof body.newPassword === 'string' ? body.newPassword : '';
if (!isValidEmail(email) || !/^[a-z0-9]{4,10}$/i.test(code)) {
return NextResponse.json({ error: '邮箱或验证码格式不正确' }, { status: 400 });
}
if (!passwordStrongEnough(newPassword)) {
return NextResponse.json({ error: '新密码至少 8 位,并同时包含字母和数字' }, { status: 400 });
}
await client.query('BEGIN');
await verifyEmailCode(client, { email, type: 'reset_password', code });
const user = await client.query(
`SELECT p.id, p.nickname
FROM profiles p
JOIN auth.users u ON u.id = p.id
WHERE LOWER(p.email) = LOWER($1) AND p.email_verified = true
LIMIT 1`,
[email],
);
if (user.rows.length === 0) {
await client.query('ROLLBACK');
return NextResponse.json({ error: '该邮箱尚未绑定或未完成验证' }, { status: 400 });
}
await client.query(
`UPDATE auth.users
SET password_hash = crypt($1, gen_salt('bf'))
WHERE id = $2`,
[newPassword, user.rows[0].id],
);
await client.query('COMMIT');
await sendTemplatedEmail(client, {
to: email,
type: 'password_reset_success',
subject: '【妙境】密码已重置',
title: '密码重置成功',
intro: '你的妙境账号密码已成功重置。请使用新密码重新登录。',
note: '若非本人操作,请立即联系管理员并检查账号安全。',
assetBaseUrl: getRequestBaseUrl(request) || undefined,
}).catch(() => undefined);
return NextResponse.json({ success: true, message: '密码已重置,请重新登录' });
} catch (error) {
await client.query('ROLLBACK').catch(() => undefined);
return NextResponse.json({ error: friendlyError(error) }, { status: 400 });
} finally {
client.release();
}
}

View File

@@ -0,0 +1,62 @@
import { NextRequest, NextResponse } from 'next/server';
import { requireAdmin } from '@/lib/admin-auth';
import { getRequestBaseUrl, isValidEmail, normalizeEmail, sendTemplatedEmail, type EmailMessageType } from '@/lib/email-service';
import { getDbClient } from '@/storage/database/local-db';
export const runtime = 'nodejs';
const ALLOWED_TYPES: EmailMessageType[] = [
'register_success',
'email_verified',
'password_reset_success',
'security_login',
'announcement',
'order',
'business',
];
export async function POST(request: NextRequest) {
const adminError = await requireAdmin(request);
if (adminError) return adminError;
const client = await getDbClient();
try {
const body = await request.json();
const to = normalizeEmail(body.to);
const type = ALLOWED_TYPES.includes(body.type) ? body.type : 'business';
const title = typeof body.title === 'string' ? body.title.trim().slice(0, 120) : '';
const bodyText = typeof body.body === 'string' ? body.body.trim().slice(0, 4000) : '';
const buttonText = typeof body.buttonText === 'string' ? body.buttonText.trim().slice(0, 40) : '';
const buttonUrl = typeof body.buttonUrl === 'string' ? body.buttonUrl.trim().slice(0, 500) : '';
if (!isValidEmail(to)) {
return NextResponse.json({ error: '请输入正确的收件邮箱' }, { status: 400 });
}
if (!title || !bodyText) {
return NextResponse.json({ error: '请填写邮件标题和正文' }, { status: 400 });
}
if (buttonUrl && !/^https?:\/\/[^\s"'<>]+$/i.test(buttonUrl)) {
return NextResponse.json({ error: '按钮链接必须是 HTTP(S) 地址' }, { status: 400 });
}
await sendTemplatedEmail(client, {
to,
type,
subject: `【妙境】${title}`,
title,
body: bodyText,
buttonText: buttonText || undefined,
buttonUrl: buttonUrl || undefined,
note: '这是一封系统通知邮件,请勿直接回复。',
ipAddress: 'admin',
assetBaseUrl: getRequestBaseUrl(request) || undefined,
});
return NextResponse.json({ success: true, message: '邮件已发送' });
} catch (error) {
const message = error instanceof Error ? error.message : '邮件发送失败';
return NextResponse.json({ error: message }, { status: 400 });
} finally {
client.release();
}
}

View File

@@ -0,0 +1,47 @@
import { NextRequest, NextResponse } from 'next/server';
import { ensureEmailSchema, isValidEmail, normalizeEmail, sendVerificationCode } from '@/lib/email-service';
import { getDbClient } from '@/storage/database/local-db';
import { getAuthenticatedUserId } from '@/lib/session-auth';
export const runtime = 'nodejs';
function friendlyError(error: unknown) {
return error instanceof Error ? error.message : '验证码发送失败,请稍后再试';
}
export async function POST(request: NextRequest) {
const userId = await getAuthenticatedUserId(request);
if (!userId) {
return NextResponse.json({ error: '请先登录后再验证邮箱' }, { status: 401 });
}
const client = await getDbClient();
try {
await ensureEmailSchema(client);
const body = await request.json();
const email = normalizeEmail(body.email);
if (!isValidEmail(email)) {
return NextResponse.json({ error: '请输入正确的邮箱地址' }, { status: 400 });
}
const user = await client.query('SELECT id, email FROM profiles WHERE id = $1 LIMIT 1', [userId]);
if (user.rows.length === 0) {
return NextResponse.json({ error: '账号不存在,请重新登录' }, { status: 404 });
}
const duplicate = await client.query(
'SELECT id FROM profiles WHERE LOWER(email) = LOWER($1) AND id <> $2 LIMIT 1',
[email, userId],
);
if (duplicate.rows.length > 0) {
return NextResponse.json({ error: '该邮箱已被其他账号绑定' }, { status: 400 });
}
const result = await sendVerificationCode(client, request, { email, type: 'verify_email', userId });
return NextResponse.json({ ...result, message: '验证码已发送,请查收邮箱' });
} catch (error) {
return NextResponse.json({ error: friendlyError(error) }, { status: 400 });
} finally {
client.release();
}
}

View File

@@ -0,0 +1,36 @@
import { NextRequest, NextResponse } from 'next/server';
import { sendVerificationCode, normalizeEmail, isValidEmail } from '@/lib/email-service';
import { getDbClient } from '@/storage/database/local-db';
export const runtime = 'nodejs';
function friendlyError(error: unknown) {
return error instanceof Error ? error.message : '验证码发送失败,请稍后再试';
}
export async function POST(request: NextRequest) {
const client = await getDbClient();
try {
const body = await request.json();
const email = normalizeEmail(body.email);
if (!isValidEmail(email)) {
return NextResponse.json({ error: '请输入正确的邮箱地址' }, { status: 400 });
}
const existing = await client.query(
'SELECT id FROM profiles WHERE LOWER(email) = LOWER($1) LIMIT 1',
[email],
);
if (existing.rows.length > 0) {
return NextResponse.json({ error: '该邮箱已注册,请直接登录' }, { status: 400 });
}
const result = await sendVerificationCode(client, request, { email, type: 'register' });
return NextResponse.json({ ...result, message: '验证码已发送,请查收邮箱' });
} catch (error) {
return NextResponse.json({ error: friendlyError(error) }, { status: 400 });
} finally {
client.release();
}
}

View File

@@ -0,0 +1,47 @@
import { NextRequest, NextResponse } from 'next/server';
import { ensureEmailSchema, isValidEmail, normalizeEmail, sendVerificationCode } from '@/lib/email-service';
import { getDbClient } from '@/storage/database/local-db';
export const runtime = 'nodejs';
export async function POST(request: NextRequest) {
const client = await getDbClient();
try {
await ensureEmailSchema(client);
const body = await request.json();
const email = normalizeEmail(body.email);
if (!isValidEmail(email)) {
return NextResponse.json({ error: '请输入正确的邮箱地址' }, { status: 400 });
}
const user = await client.query(
`SELECT p.id
FROM profiles p
JOIN auth.users u ON u.id = p.id
WHERE LOWER(p.email) = LOWER($1) AND p.email_verified = true AND u.password_hash IS NOT NULL
LIMIT 1`,
[email],
);
if (user.rows.length > 0) {
try {
await sendVerificationCode(client, request, {
email,
type: 'reset_password',
userId: user.rows[0].id,
});
} catch (error) {
const message = error instanceof Error ? error.message : '验证码发送失败,请稍后再试';
return NextResponse.json({ error: message }, { status: 400 });
}
}
return NextResponse.json({
success: true,
cooldown: 60,
message: '如果该邮箱已绑定并验证,我们已发送重置验证码',
});
} finally {
client.release();
}
}

View File

@@ -0,0 +1,73 @@
import { NextRequest, NextResponse } from 'next/server';
import { ensureEmailSchema, getRequestBaseUrl, isValidEmail, normalizeEmail, sendTemplatedEmail, verifyEmailCode } from '@/lib/email-service';
import { getDbClient } from '@/storage/database/local-db';
import { getAuthenticatedUserId } from '@/lib/session-auth';
export const runtime = 'nodejs';
function friendlyError(error: unknown) {
return error instanceof Error ? error.message : '邮箱验证失败,请稍后再试';
}
export async function POST(request: NextRequest) {
const userId = await getAuthenticatedUserId(request);
if (!userId) {
return NextResponse.json({ error: '请先登录后再验证邮箱' }, { status: 401 });
}
const client = await getDbClient();
try {
await ensureEmailSchema(client);
const body = await request.json();
const email = normalizeEmail(body.email);
const code = typeof body.code === 'string' ? body.code.trim() : '';
if (!isValidEmail(email) || !/^[a-z0-9]{4,10}$/i.test(code)) {
return NextResponse.json({ error: '邮箱或验证码格式不正确' }, { status: 400 });
}
await client.query('BEGIN');
await verifyEmailCode(client, { email, type: 'verify_email', code });
const duplicate = await client.query(
'SELECT id FROM profiles WHERE LOWER(email) = LOWER($1) AND id <> $2 LIMIT 1',
[email, userId],
);
if (duplicate.rows.length > 0) {
await client.query('ROLLBACK');
return NextResponse.json({ error: '该邮箱已被其他账号绑定' }, { status: 400 });
}
const domain = email.includes('@') ? email.split('@')[1] : null;
const profile = await client.query(
`UPDATE profiles
SET email = $1,
email_verified = true,
email_verified_at = NOW(),
email_bound_at = COALESCE(email_bound_at, NOW()),
email_sender_domain = $2,
updated_at = NOW()
WHERE id = $3
RETURNING id, email, nickname, phone, role, membership_tier, credits_balance, daily_quota_used, daily_quota_limit, avatar_url, created_at, email_verified, email_verified_at, email_bound_at`,
[email, domain, userId],
);
await client.query('UPDATE auth.users SET email = $1 WHERE id = $2', [email, userId]);
await client.query('COMMIT');
await sendTemplatedEmail(client, {
to: email,
type: 'email_verified',
subject: '【妙境】邮箱验证成功',
title: '邮箱验证成功',
intro: '你的账号邮箱已完成验证,后续可用于找回密码和安全通知。',
note: '若非本人操作,请尽快修改账号密码。',
assetBaseUrl: getRequestBaseUrl(request) || undefined,
}).catch(() => undefined);
return NextResponse.json({ success: true, profile: profile.rows[0], message: '邮箱验证成功' });
} catch (error) {
await client.query('ROLLBACK').catch(() => undefined);
return NextResponse.json({ error: friendlyError(error) }, { status: 400 });
} finally {
client.release();
}
}

Some files were not shown because too many files have changed in this diff Show More