feat: siliconflow model
This commit is contained in:
parent
11d0e06236
commit
0ecf9b6321
|
@ -0,0 +1,11 @@
|
||||||
|
# some chat agents
|
||||||
|
|
||||||
|
## env
|
||||||
|
|
||||||
|
### 请求代理
|
||||||
|
|
||||||
|
- HTTP_PROXY_HOST
|
||||||
|
|
||||||
|
- HTTP_PROXY_PORT
|
||||||
|
|
||||||
|
- HTTP_PROXY_PROTOCOL
|
|
@ -22,6 +22,7 @@
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@types/node": "^22.13.9",
|
"@types/node": "^22.13.9",
|
||||||
|
"axios": "^1.8.2",
|
||||||
"dotenv": "^16.4.7",
|
"dotenv": "^16.4.7",
|
||||||
"express": "^4.21.2",
|
"express": "^4.21.2",
|
||||||
"openai": "^4.86.1"
|
"openai": "^4.86.1"
|
||||||
|
|
|
@ -11,6 +11,9 @@ importers:
|
||||||
'@types/node':
|
'@types/node':
|
||||||
specifier: ^22.13.9
|
specifier: ^22.13.9
|
||||||
version: 22.13.9
|
version: 22.13.9
|
||||||
|
axios:
|
||||||
|
specifier: ^1.8.2
|
||||||
|
version: 1.8.2
|
||||||
dotenv:
|
dotenv:
|
||||||
specifier: ^16.4.7
|
specifier: ^16.4.7
|
||||||
version: 16.4.7
|
version: 16.4.7
|
||||||
|
@ -141,6 +144,9 @@ packages:
|
||||||
resolution: {integrity: sha512-kNOjDqAh7px0XWNI+4QbzoiR/nTkHAWNud2uvnJquD1/x5a7EQZMJT0AczqK0Qn67oY/TTQ1LbUKajZpp3I9tQ==}
|
resolution: {integrity: sha512-kNOjDqAh7px0XWNI+4QbzoiR/nTkHAWNud2uvnJquD1/x5a7EQZMJT0AczqK0Qn67oY/TTQ1LbUKajZpp3I9tQ==}
|
||||||
engines: {node: '>=8.0.0'}
|
engines: {node: '>=8.0.0'}
|
||||||
|
|
||||||
|
axios@1.8.2:
|
||||||
|
resolution: {integrity: sha512-ls4GYBm5aig9vWx8AWDSGLpnpDQRtWAfrjU+EuytuODrFBkqesN2RkOQCBzrA1RQNHw1SmRMSDDDSwzNAYQ6Rg==}
|
||||||
|
|
||||||
body-parser@1.20.3:
|
body-parser@1.20.3:
|
||||||
resolution: {integrity: sha512-7rAxByjUMqQ3/bHJy7D6OGXvx/MMc4IqBn/X0fcM1QUcAItpZrBEYhWGem+tzXH90c+G01ypMcYJBO9Y30203g==}
|
resolution: {integrity: sha512-7rAxByjUMqQ3/bHJy7D6OGXvx/MMc4IqBn/X0fcM1QUcAItpZrBEYhWGem+tzXH90c+G01ypMcYJBO9Y30203g==}
|
||||||
engines: {node: '>= 0.8', npm: 1.2.8000 || >= 1.4.16}
|
engines: {node: '>= 0.8', npm: 1.2.8000 || >= 1.4.16}
|
||||||
|
@ -276,6 +282,15 @@ packages:
|
||||||
resolution: {integrity: sha512-6BN9trH7bp3qvnrRyzsBz+g3lZxTNZTbVO2EV1CS0WIcDbawYVdYvGflME/9QP0h0pYlCDBCTjYa9nZzMDpyxQ==}
|
resolution: {integrity: sha512-6BN9trH7bp3qvnrRyzsBz+g3lZxTNZTbVO2EV1CS0WIcDbawYVdYvGflME/9QP0h0pYlCDBCTjYa9nZzMDpyxQ==}
|
||||||
engines: {node: '>= 0.8'}
|
engines: {node: '>= 0.8'}
|
||||||
|
|
||||||
|
follow-redirects@1.15.9:
|
||||||
|
resolution: {integrity: sha512-gew4GsXizNgdoRyqmyfMHyAmXsZDk6mHkSxZFCzW9gwlbtOW44CDtYavM+y+72qD/Vq2l550kMF52DT8fOLJqQ==}
|
||||||
|
engines: {node: '>=4.0'}
|
||||||
|
peerDependencies:
|
||||||
|
debug: '*'
|
||||||
|
peerDependenciesMeta:
|
||||||
|
debug:
|
||||||
|
optional: true
|
||||||
|
|
||||||
form-data-encoder@1.7.2:
|
form-data-encoder@1.7.2:
|
||||||
resolution: {integrity: sha512-qfqtYan3rxrnCk1VYaA4H+Ms9xdpPqvLZa6xmMgFvhO32x7/3J/ExcTd6qpxM0vH2GdMI+poehyBZvqfMTto8A==}
|
resolution: {integrity: sha512-qfqtYan3rxrnCk1VYaA4H+Ms9xdpPqvLZa6xmMgFvhO32x7/3J/ExcTd6qpxM0vH2GdMI+poehyBZvqfMTto8A==}
|
||||||
|
|
||||||
|
@ -466,6 +481,9 @@ packages:
|
||||||
resolution: {integrity: sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==}
|
resolution: {integrity: sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==}
|
||||||
engines: {node: '>= 0.10'}
|
engines: {node: '>= 0.10'}
|
||||||
|
|
||||||
|
proxy-from-env@1.1.0:
|
||||||
|
resolution: {integrity: sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==}
|
||||||
|
|
||||||
pump@3.0.2:
|
pump@3.0.2:
|
||||||
resolution: {integrity: sha512-tUPXtzlGM8FE3P0ZL6DVs/3P58k9nk8/jZeQCurTJylQA8qFYzHFfhBJkuqyE0FifOsQ0uKWekiZ5g8wtr28cw==}
|
resolution: {integrity: sha512-tUPXtzlGM8FE3P0ZL6DVs/3P58k9nk8/jZeQCurTJylQA8qFYzHFfhBJkuqyE0FifOsQ0uKWekiZ5g8wtr28cw==}
|
||||||
|
|
||||||
|
@ -719,6 +737,14 @@ snapshots:
|
||||||
|
|
||||||
atomic-sleep@1.0.0: {}
|
atomic-sleep@1.0.0: {}
|
||||||
|
|
||||||
|
axios@1.8.2:
|
||||||
|
dependencies:
|
||||||
|
follow-redirects: 1.15.9
|
||||||
|
form-data: 4.0.2
|
||||||
|
proxy-from-env: 1.1.0
|
||||||
|
transitivePeerDependencies:
|
||||||
|
- debug
|
||||||
|
|
||||||
body-parser@1.20.3:
|
body-parser@1.20.3:
|
||||||
dependencies:
|
dependencies:
|
||||||
bytes: 3.1.2
|
bytes: 3.1.2
|
||||||
|
@ -873,6 +899,8 @@ snapshots:
|
||||||
transitivePeerDependencies:
|
transitivePeerDependencies:
|
||||||
- supports-color
|
- supports-color
|
||||||
|
|
||||||
|
follow-redirects@1.15.9: {}
|
||||||
|
|
||||||
form-data-encoder@1.7.2: {}
|
form-data-encoder@1.7.2: {}
|
||||||
|
|
||||||
form-data@4.0.2:
|
form-data@4.0.2:
|
||||||
|
@ -1059,6 +1087,8 @@ snapshots:
|
||||||
forwarded: 0.2.0
|
forwarded: 0.2.0
|
||||||
ipaddr.js: 1.9.1
|
ipaddr.js: 1.9.1
|
||||||
|
|
||||||
|
proxy-from-env@1.1.0: {}
|
||||||
|
|
||||||
pump@3.0.2:
|
pump@3.0.2:
|
||||||
dependencies:
|
dependencies:
|
||||||
end-of-stream: 1.4.4
|
end-of-stream: 1.4.4
|
||||||
|
|
|
@ -0,0 +1,134 @@
|
||||||
|
import { Router } from "express";
|
||||||
|
import { existsSync, readFileSync } from "fs";
|
||||||
|
import { ChatCompletionMessageParam, ChatCompletionSystemMessageParam } from "openai/resources";
|
||||||
|
import path from "path";
|
||||||
|
import llm from "../services/llm";
|
||||||
|
import logger from "../utils/logger";
|
||||||
|
import { charactersPath, systemPromptPath } from "../utils/preload";
|
||||||
|
|
||||||
|
const router = Router();
|
||||||
|
|
||||||
|
// 提取好感度的正则表达式
|
||||||
|
const affinityRegex = /^\|(\d+)\|/;
|
||||||
|
|
||||||
|
type ChatCompletionMessageWithAffinityParam = ChatCompletionMessageParam & {
|
||||||
|
affinity: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
router.post('/:character', async (req, res) => {
|
||||||
|
const { messages, stream = false } = req.body;
|
||||||
|
const { character } = req.params;
|
||||||
|
|
||||||
|
if (!character || !existsSync(path.resolve(charactersPath, `${character}.md`))) {
|
||||||
|
res.status(404).json({ code: 404, message: '角色不存在' });
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const characterPrompt = readFileSync(path.resolve(charactersPath, `${character}.md`), 'utf-8');
|
||||||
|
|
||||||
|
let affinity = 0;
|
||||||
|
|
||||||
|
logger.debug(`[${character}] 请求:${messages[messages.length - 1].content}`);
|
||||||
|
|
||||||
|
const systemPrompt = readFileSync(systemPromptPath, 'utf-8');
|
||||||
|
|
||||||
|
const systemMessage: ChatCompletionSystemMessageParam =
|
||||||
|
{
|
||||||
|
role: 'system',
|
||||||
|
content: systemPrompt + '\n\n' + characterPrompt
|
||||||
|
};
|
||||||
|
|
||||||
|
const requestOptions = {
|
||||||
|
messages: [systemMessage, ...messages.map((message: ChatCompletionMessageParam & ChatCompletionMessageWithAffinityParam) => {
|
||||||
|
if (message.affinity) {
|
||||||
|
return {
|
||||||
|
role: message.role,
|
||||||
|
content: `|${message.affinity}|${(message.content as string).replace(affinityRegex, '')}`
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
return message;
|
||||||
|
}
|
||||||
|
})],
|
||||||
|
model: "deepseek-chat",
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
if (stream) {
|
||||||
|
// 流式响应
|
||||||
|
res.setHeader('Content-Type', 'text/event-stream');
|
||||||
|
res.setHeader('Cache-Control', 'no-cache');
|
||||||
|
res.setHeader('Connection', 'keep-alive');
|
||||||
|
|
||||||
|
const stream = await llm.chat.completions.create({
|
||||||
|
...requestOptions,
|
||||||
|
stream: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
let buffer = '';
|
||||||
|
let totalContent = '';
|
||||||
|
let affinityExtracted = false;
|
||||||
|
|
||||||
|
for await (const chunk of stream) {
|
||||||
|
const content = chunk.choices[0]?.delta?.content || '';
|
||||||
|
buffer += content;
|
||||||
|
|
||||||
|
// 处理缓冲区内容
|
||||||
|
if (!affinityExtracted) {
|
||||||
|
const match = affinityRegex.exec(buffer);
|
||||||
|
|
||||||
|
if (match) {
|
||||||
|
affinity = parseInt(match[1]);
|
||||||
|
logger.debug(`[${character}] 好感度更新 ${affinity}`);
|
||||||
|
res.write(`data: ${JSON.stringify({ affinity })}\n\n`);
|
||||||
|
// 截取匹配后的剩余内容
|
||||||
|
buffer = buffer.slice(match[0].length);
|
||||||
|
affinityExtracted = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 如果已经提取过好感度,发送处理后的内容
|
||||||
|
if (affinityExtracted) {
|
||||||
|
// 发送当前缓冲区内容并清空
|
||||||
|
if (buffer.length > 0) {
|
||||||
|
res.write(`data: ${JSON.stringify({ content: buffer.trim() })}\n\n`);
|
||||||
|
totalContent += buffer;
|
||||||
|
buffer = '';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 发送最后剩余的缓冲区内容
|
||||||
|
if (buffer.length > 0) {
|
||||||
|
res.write(`data: ${JSON.stringify({ content: buffer })}\n\n`);
|
||||||
|
totalContent += buffer;
|
||||||
|
}
|
||||||
|
|
||||||
|
res.end();
|
||||||
|
|
||||||
|
logger.debug(`[${character}] 回复:${totalContent}`);
|
||||||
|
} else {
|
||||||
|
// 普通响应
|
||||||
|
const completion = await llm.chat.completions.create({
|
||||||
|
...requestOptions,
|
||||||
|
});
|
||||||
|
|
||||||
|
let content = completion.choices[0].message.content || '';
|
||||||
|
const match = affinityRegex.exec(content);
|
||||||
|
|
||||||
|
if (match) {
|
||||||
|
affinity = parseInt(match[1]);
|
||||||
|
logger.debug(`[${character}] 好感度更新 ${affinity}`);
|
||||||
|
content = content.replace(affinityRegex, '').trim();
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.debug(`[${character}] 回复:${content}`);
|
||||||
|
|
||||||
|
res.json({ content, affinity });
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.error("生成文本时出错:", error);
|
||||||
|
res.status(500).json({ code: 500, message: "生成文本时出错" });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
export default router;
|
|
@ -1,134 +1,12 @@
|
||||||
import { Router } from "express";
|
import { Router } from "express";
|
||||||
import { existsSync, readFileSync } from "fs";
|
|
||||||
import { ChatCompletionMessageParam, ChatCompletionSystemMessageParam } from "openai/resources";
|
import chatRouter from "./chat";
|
||||||
import llm from "../services/llm";
|
import llmRouter from "./llm";
|
||||||
import logger from "../utils/logger";
|
|
||||||
import { systemPromptPath, charactersPath } from "../utils/preload";
|
|
||||||
import path from "path";
|
|
||||||
|
|
||||||
const router = Router();
|
const router = Router();
|
||||||
|
|
||||||
// 提取好感度的正则表达式
|
router.use('/chat', chatRouter);
|
||||||
const affinityRegex = /^\|(\d+)\|/;
|
|
||||||
|
|
||||||
type ChatCompletionMessageWithAffinityParam = ChatCompletionMessageParam & {
|
router.use('/llm', llmRouter);
|
||||||
affinity: number;
|
|
||||||
}
|
|
||||||
|
|
||||||
router.post('/chat/:character', async (req, res) => {
|
|
||||||
const { messages, stream = false } = req.body;
|
|
||||||
const { character } = req.params;
|
|
||||||
|
|
||||||
if (!character || !existsSync(path.resolve(charactersPath, `${character}.md`))) {
|
|
||||||
res.status(404).json({ code: 404, message: '角色不存在' });
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const characterPrompt = readFileSync(path.resolve(charactersPath, `${character}.md`), 'utf-8');
|
|
||||||
|
|
||||||
let affinity = 0;
|
|
||||||
|
|
||||||
logger.debug(`[${character}] 请求:${messages[messages.length - 1].content}`);
|
|
||||||
|
|
||||||
const systemPrompt = readFileSync(systemPromptPath, 'utf-8');
|
|
||||||
|
|
||||||
const systemMessage: ChatCompletionSystemMessageParam =
|
|
||||||
{
|
|
||||||
role: 'system',
|
|
||||||
content: systemPrompt + '\n\n' + characterPrompt
|
|
||||||
};
|
|
||||||
|
|
||||||
const requestOptions = {
|
|
||||||
messages: [systemMessage, ...messages.map((message: ChatCompletionMessageParam & ChatCompletionMessageWithAffinityParam) => {
|
|
||||||
if (message.affinity) {
|
|
||||||
return {
|
|
||||||
role: message.role,
|
|
||||||
content: `|${message.affinity}|${(message.content as string).replace(affinityRegex, '')}`
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
return message;
|
|
||||||
}
|
|
||||||
})],
|
|
||||||
model: "deepseek-chat",
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
if (stream) {
|
|
||||||
// 流式响应
|
|
||||||
res.setHeader('Content-Type', 'text/event-stream');
|
|
||||||
res.setHeader('Cache-Control', 'no-cache');
|
|
||||||
res.setHeader('Connection', 'keep-alive');
|
|
||||||
|
|
||||||
const stream = await llm.chat.completions.create({
|
|
||||||
...requestOptions,
|
|
||||||
stream: true,
|
|
||||||
});
|
|
||||||
|
|
||||||
let buffer = '';
|
|
||||||
let totalContent = '';
|
|
||||||
let affinityExtracted = false;
|
|
||||||
|
|
||||||
for await (const chunk of stream) {
|
|
||||||
const content = chunk.choices[0]?.delta?.content || '';
|
|
||||||
buffer += content;
|
|
||||||
|
|
||||||
// 处理缓冲区内容
|
|
||||||
if (!affinityExtracted) {
|
|
||||||
const match = affinityRegex.exec(buffer);
|
|
||||||
|
|
||||||
if (match) {
|
|
||||||
affinity = parseInt(match[1]);
|
|
||||||
logger.debug(`[${character}] 好感度更新 ${affinity}`);
|
|
||||||
res.write(`data: ${JSON.stringify({ affinity })}\n\n`);
|
|
||||||
// 截取匹配后的剩余内容
|
|
||||||
buffer = buffer.slice(match[0].length);
|
|
||||||
affinityExtracted = true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// 如果已经提取过好感度,发送处理后的内容
|
|
||||||
if (affinityExtracted) {
|
|
||||||
// 发送当前缓冲区内容并清空
|
|
||||||
if (buffer.length > 0) {
|
|
||||||
res.write(`data: ${JSON.stringify({ content: buffer.trim() })}\n\n`);
|
|
||||||
totalContent += buffer;
|
|
||||||
buffer = '';
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// 发送最后剩余的缓冲区内容
|
|
||||||
if (buffer.length > 0) {
|
|
||||||
res.write(`data: ${JSON.stringify({ content: buffer })}\n\n`);
|
|
||||||
totalContent += buffer;
|
|
||||||
}
|
|
||||||
|
|
||||||
res.end();
|
|
||||||
|
|
||||||
logger.debug(`[${character}] 回复:${totalContent}`);
|
|
||||||
} else {
|
|
||||||
// 普通响应
|
|
||||||
const completion = await llm.chat.completions.create({
|
|
||||||
...requestOptions,
|
|
||||||
});
|
|
||||||
|
|
||||||
let content = completion.choices[0].message.content || '';
|
|
||||||
const match = affinityRegex.exec(content);
|
|
||||||
|
|
||||||
if (match) {
|
|
||||||
affinity = parseInt(match[1]);
|
|
||||||
logger.debug(`[${character}] 好感度更新 ${affinity}`);
|
|
||||||
content = content.replace(affinityRegex, '').trim();
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.debug(`[${character}] 回复:${content}`);
|
|
||||||
|
|
||||||
res.json({ content, affinity });
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
logger.error("生成文本时出错:", error);
|
|
||||||
res.status(500).json({ code: 500, message: "生成文本时出错" });
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
export default router;
|
export default router;
|
|
@ -0,0 +1,33 @@
|
||||||
|
import { Router } from "express";
|
||||||
|
import { getSiliconFlowModelList } from "../services/llm";
|
||||||
|
import logger from "../utils/logger";
|
||||||
|
|
||||||
|
const router = Router();
|
||||||
|
|
||||||
|
router.post('/getSiliconFlowModelList', async (req, res) => {
|
||||||
|
const { apiKey } = req.body;
|
||||||
|
|
||||||
|
if (!apiKey) {
|
||||||
|
res.status(400).json({
|
||||||
|
code: 400,
|
||||||
|
message: 'apiKey 不能为空'
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
const models = await getSiliconFlowModelList(apiKey);
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
code: 200,
|
||||||
|
data: models
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(error);
|
||||||
|
res.status(500).json({
|
||||||
|
code: 500,
|
||||||
|
message: '获取模型列表失败'
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
export default router;
|
|
@ -1,9 +1,27 @@
|
||||||
import OpenAI from "openai";
|
import OpenAI from "openai";
|
||||||
import { config } from 'dotenv';
|
import { config } from 'dotenv';
|
||||||
|
import axios from "../utils/axios";
|
||||||
|
import { SiliconFlowModelListItem } from "../typings/llm";
|
||||||
|
|
||||||
config();
|
config();
|
||||||
|
|
||||||
export default new OpenAI({
|
const openai = new OpenAI({
|
||||||
apiKey: process.env.DEEPSEEK_API_KEY,
|
apiKey: process.env.DEEPSEEK_API_KEY,
|
||||||
baseURL: 'https://api.deepseek.com',
|
baseURL: 'https://api.deepseek.com',
|
||||||
});
|
});
|
||||||
|
|
||||||
|
export default openai;
|
||||||
|
|
||||||
|
export const getSiliconFlowModelList = async (apiKey: string) => {
|
||||||
|
const response = await axios.get('https://api.siliconflow.cn/v1/models?type=text&sub_type=chat', {
|
||||||
|
headers: {
|
||||||
|
'Authorization': `Bearer ${apiKey}`
|
||||||
|
}
|
||||||
|
}).then(res => res.data);
|
||||||
|
|
||||||
|
if (typeof response === 'string') {
|
||||||
|
throw new Error(response);
|
||||||
|
}
|
||||||
|
|
||||||
|
return response.data as SiliconFlowModelListItem[];
|
||||||
|
};
|
||||||
|
|
|
@ -0,0 +1,6 @@
|
||||||
|
export interface SiliconFlowModelListItem {
|
||||||
|
id: string;
|
||||||
|
object: 'model';
|
||||||
|
created: number;
|
||||||
|
owned_by: string;
|
||||||
|
}
|
|
@ -0,0 +1,16 @@
|
||||||
|
import axios from "axios";
|
||||||
|
import { config } from 'dotenv';
|
||||||
|
|
||||||
|
config();
|
||||||
|
|
||||||
|
if (process.env.HTTP_PROXY_HOST && process.env.HTTP_PROXY_PORT && process.env.HTTP_PROXY_PROTOCOL) {
|
||||||
|
axios.defaults.proxy = {
|
||||||
|
host: process.env.HTTP_PROXY_HOST,
|
||||||
|
port: parseInt(process.env.HTTP_PROXY_PORT),
|
||||||
|
protocol: process.env.HTTP_PROXY_PROTOCOL
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const instance = axios.create();
|
||||||
|
|
||||||
|
export default instance;
|
|
@ -11,28 +11,28 @@
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@radix-ui/react-slot": "^1.1.2",
|
"@radix-ui/react-slot": "^1.1.2",
|
||||||
"@tailwindcss/vite": "^4.0.9",
|
"@tailwindcss/vite": "^4.0.12",
|
||||||
"class-variance-authority": "^0.7.1",
|
"class-variance-authority": "^0.7.1",
|
||||||
"clsx": "^2.1.1",
|
"clsx": "^2.1.1",
|
||||||
"lucide-react": "^0.477.0",
|
"lucide-react": "^0.477.0",
|
||||||
"react": "^19.0.0",
|
"react": "^19.0.0",
|
||||||
"react-dom": "^19.0.0",
|
"react-dom": "^19.0.0",
|
||||||
"tailwind-merge": "^3.0.2",
|
"tailwind-merge": "^3.0.2",
|
||||||
"tailwindcss": "^4.0.9",
|
"tailwindcss": "^4.0.12",
|
||||||
"tailwindcss-animate": "^1.0.7"
|
"tailwindcss-animate": "^1.0.7"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@eslint/js": "^9.21.0",
|
"@eslint/js": "^9.22.0",
|
||||||
"@types/node": "^22.13.9",
|
"@types/node": "^22.13.10",
|
||||||
"@types/react": "^19.0.10",
|
"@types/react": "^19.0.10",
|
||||||
"@types/react-dom": "^19.0.4",
|
"@types/react-dom": "^19.0.4",
|
||||||
"@vitejs/plugin-react-swc": "^3.8.0",
|
"@vitejs/plugin-react-swc": "^3.8.0",
|
||||||
"eslint": "^9.21.0",
|
"eslint": "^9.22.0",
|
||||||
"eslint-plugin-react-hooks": "^5.1.0",
|
"eslint-plugin-react-hooks": "^5.2.0",
|
||||||
"eslint-plugin-react-refresh": "^0.4.19",
|
"eslint-plugin-react-refresh": "^0.4.19",
|
||||||
"globals": "^15.15.0",
|
"globals": "^15.15.0",
|
||||||
"typescript": "~5.7.2",
|
"typescript": "~5.7.3",
|
||||||
"typescript-eslint": "^8.24.1",
|
"typescript-eslint": "^8.26.0",
|
||||||
"vite": "^6.2.0"
|
"vite": "^6.2.1"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
Loading…
Reference in New Issue