ModelAi/server2.js
2025-09-15 10:27:14 +08:00

208 lines
5.9 KiB
JavaScript
Raw Permalink Blame History

This file contains invisible Unicode characters

This file contains invisible Unicode characters that are indistinguishable to humans but may be processed differently by a computer. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

// server.js
const express = require("express");
const multer = require("multer");
const cors = require("cors");
const fs = require("fs");
const path = require("path");
const OpenAI = require("openai");
const bodyParser = require("body-parser");
const app = express();
const port = 30004;
const mysql = require("mysql2/promise");
// 初始化数据库连接池
const pool = mysql.createPool({
host: "1.95.137.212",
user: "root",
password: "maibu520",
database: "fastbee",
waitForConnections: true,
connectionLimit: 10,
});
// 允许大文件请求体
app.use(bodyParser.json({ limit: "500mb" }));
app.use(bodyParser.urlencoded({ limit: "500mb", extended: true }));
// Multer 上传目录
const upload = multer({
dest: "uploads/",
limits: { fileSize: 500 * 1024 * 1024 }, // 500MB
});
// Node <20 兼容 File
if (typeof File === "undefined") {
globalThis.File = require("node:buffer").File;
}
// 启用 CORS
app.use(cors());
app.use(express.json({ limit: "200mb" })); // 支持大 JSON
app.use(express.urlencoded({ extended: true })); // 支持 form-data 字段
// 初始化混元客户端
const client = new OpenAI({
apiKey: "sk-LVfG90qgdhf9kKQUucqBSLioxamDu7gBeW9boXqKOxIDJt7H",
baseURL: "https://api.hunyuan.cloud.tencent.com/v1",
timeout: 120000,
});
app.get("/test-db", async (req, res) => {
try {
await pool.query(
"INSERT INTO ai_chat_messages (session_id, role, content) VALUES (?, 'user', ?)",
["sess1", "hello"]
);
res.send("OK");
} catch (err) {
console.error(err);
res.send("FAIL");
}
});
// 查询会话历史
app.get("/history", async (req, res) => {
try {
const { sessionId } = req.query;
if (!sessionId) {
return res.status(400).json({ code: 1, error: "sessionId 必填" });
}
// 查询数据库
const [rows] = await pool.query(
"SELECT user_id, role, content, image_base64, created_at FROM ai_chat_messages WHERE session_id = ? ORDER BY id ASC",
[sessionId]
);
res.json({
code: 0,
sessionId,
messages: rows,
});
} catch (err) {
console.error("查询会话历史失败:", err);
res.status(500).json({ code: 1, error: err.message });
}
});
// ====================== API ======================
app.post("/analyze", upload.single("file"), async (req, res) => {
try {
res.setHeader("Content-Type", "text/plain; charset=utf-8");
res.setHeader("Transfer-Encoding", "chunked");
res.setHeader("Cache-Control", "no-cache");
res.setHeader("Connection", "keep-alive");
const { userPrompt, imageBase64, userId, sessionId } = req.body;
// 如果前端没有传 sessionId生成一个
const sessId = sessionId || Date.now().toString();
// ------------------ 1⃣ 保存用户提问 ------------------
await pool.query(
"INSERT INTO ai_chat_messages (session_id, user_id, role, content, image_base64) VALUES (?, ?, 'user', ?, ?)",
[sessId, userId || null, userPrompt || "", imageBase64 || null]
);
// ------------------ 2⃣ 图像分析 ------------------
const completion = await client.chat.completions.create({
model: "hunyuan-vision",
stream: true,
do_scene: true,
do_behavior: true,
llm_model: "hunyuan-turbos-latest",
messages: [
{
role: "system",
content:
"你是一名新能源项目的图像分析专家,只需给出结论与建议,禁止输出任何数学推导、公式或 LaTeX 代码。",
},
{
role: "user",
content: [
{ type: "text", text: userPrompt || "请描述图片内容。" },
imageBase64
? {
type: "image_url",
image_url: {
url: imageBase64.startsWith("data:")
? imageBase64
: `data:image/png;base64,${imageBase64}`,
},
}
: null,
].filter(Boolean),
},
],
});
let sceneText = "";
for await (const chunk of completion) {
const delta = chunk.choices[0]?.delta?.content || "";
sceneText += delta;
if (delta)
res.write(
`data: ${JSON.stringify({ type: "scene", content: delta })}\n\n`
);
}
// 保存图像分析结果到数据库
await pool.query(
"INSERT INTO ai_chat_messages (session_id, user_id, role, content) VALUES (?, ?, 'assistant', ?)",
[sessId, userId || null, sceneText]
);
// ------------------ 3⃣ LLM 分析 ------------------
const llmStream = await client.chat.completions.create({
model: "hunyuan-turbos-latest",
stream: true,
messages: [
{
role: "system",
content: "你是一名新能源项目的图像分析AI分析师。",
},
{
role: "user",
content: `请基于以下场景分析结果提供部署建议或优化意见:\n${sceneText}`,
},
],
});
let llmText = "";
for await (const chunk of llmStream) {
const delta = chunk.choices[0]?.delta?.content || "";
llmText += delta;
if (delta)
res.write(
`data: ${JSON.stringify({ type: "llm", content: delta })}\n\n`
);
}
// 保存 LLM 输出到数据库
await pool.query(
"INSERT INTO ai_chat_messages (session_id, user_id, role, content) VALUES (?, ?, 'assistant', ?)",
[sessId, userId || null, llmText]
);
res.write("data: [DONE]\n\n");
res.end();
// 删除上传文件
if (req.file && req.file.path) {
fs.unlinkSync(req.file.path);
}
} catch (err) {
console.error("❌ 调用失败:", err.response?.data || err.message);
res.status(500).json({ code: 1, error: err.response?.data || err.message });
}
});
// 静态文件服务
app.use("/uploads", express.static("uploads"));
// 启动服务
app.listen(port, () => {
console.log(`✅ 服务器运行http://localhost:${port}`);
});