🔥 refactor: remove langchain

This commit is contained in:
arvinxx
2023-07-23 14:31:13 +08:00
parent f5e8d7c765
commit 7b0f96cdce
7 changed files with 1 additions and 169 deletions

View File

@@ -8,9 +8,7 @@
<h1>Lobe Chat</h1>
Lobe Chat is an open-source chatbot client using LangChain
Typescript and Next.js
Lobe Chat is an open-source chatbot client using OpenAI and Next.js
[Changelog](./CHANGELOG.md) · [Report Bug][issues-url] · [Request Feature][issues-url]

View File

@@ -76,7 +76,6 @@
"gpt-tokenizer": "^2",
"i18next": "^23",
"immer": "^10",
"langchain": "latest",
"lodash-es": "^4",
"lucide-react": "latest",
"nanoid": "^4",

View File

@@ -1,95 +0,0 @@
import { LLMChain } from 'langchain/chains';
import { ChatOpenAI } from 'langchain/chat_models/openai';
import {
AIMessagePromptTemplate,
ChatPromptTemplate,
HumanMessagePromptTemplate,
SystemMessagePromptTemplate,
} from 'langchain/prompts';
import { LangChainParams } from '@/types/langchain';
const isDev = process.env.NODE_ENV === 'development';
const OPENAI_PROXY_URL = process.env.OPENAI_PROXY_URL;
export function LangChainStream(payload: LangChainParams) {
const { prompts, vars, llm } = payload;
// 将 payload 中的消息转换为 ChatOpenAI 所需的 HumanChatMessage、SystemChatMessage 和 AIChatMessage 类型
const chatPrompt = ChatPromptTemplate.fromPromptMessages(
prompts.map((m) => {
switch (m.role) {
default:
case 'user': {
return HumanMessagePromptTemplate.fromTemplate(m.content);
}
case 'system': {
return SystemMessagePromptTemplate.fromTemplate(m.content);
}
case 'assistant': {
return AIMessagePromptTemplate.fromTemplate(m.content);
}
}
}),
);
// 使用 TextEncoder 将字符串转换为字节数组,以便在 ReadableStream 中发送
const encoder = new TextEncoder();
// 初始化换行符计数器
return new ReadableStream({
async start(controller) {
let newlineCounter = 0;
const chat = new ChatOpenAI(
{
streaming: true,
...llm,
callbacks: [
{
handleLLMNewToken(token) {
// 如果 message 是换行符,且 newlineCounter 小于 2那么跳过该换行符
if (newlineCounter < 2 && token === '\n') {
return;
}
// 将 message 编码为字节并添加到流中
const queue = encoder.encode(token);
controller.enqueue(queue);
newlineCounter++;
},
},
],
// 暂时设定不重试 ,后续看是否需要支持重试
maxRetries: 0,
},
isDev && OPENAI_PROXY_URL ? { basePath: OPENAI_PROXY_URL } : undefined,
);
const chain = new LLMChain({
callbacks: [
{
handleChainError(err: Error): Promise<void> | void {
console.log(err.message);
},
},
],
llm: chat,
prompt: chatPrompt,
verbose: true,
});
try {
// 使用转换后的聊天消息作为输入开始聊天
await chain.call(vars);
// 完成后,关闭流
controller.close();
} catch (error) {
// 如果在执行过程中发生错误,向流发送错误
controller.error(error);
}
},
});
}

View File

@@ -1,17 +0,0 @@
import { LangChainParams } from '@/types/langchain';
import { LangChainStream } from './LangChainStream';
if (!process.env.OPENAI_API_KEY) {
throw new Error('Missing env var from OpenAI');
}
export const config = {
runtime: 'edge',
};
export default async function handler(request: Request) {
const payload = (await request.json()) as LangChainParams;
return new Response(LangChainStream(payload));
}

View File

@@ -1,18 +0,0 @@
import { URLS } from '@/services/url';
import { LangChainParams } from '@/types/langchain';
import { fetchAIFactory } from '@/utils/fetch';
/**
* 专门用于 FlowChain 的 fetch
*/
export const fetchLangChain = fetchAIFactory(
(params: LangChainParams, signal?: AbortSignal | undefined) =>
fetch(URLS.chain, {
body: JSON.stringify(params),
headers: {
'Content-Type': 'application/json',
},
method: 'POST',
signal,
}),
);

View File

@@ -3,6 +3,5 @@ const isDev = process.env.NODE_ENV === 'development';
const prefix = isDev ? '-dev' : '';
export const URLS = {
chain: '/api/chain' + prefix,
openai: '/api/openai' + prefix,
};

View File

@@ -1,34 +0,0 @@
import { ChatMessage } from '@lobehub/ui';
export interface LangChainParams {
llm: {
/**
* 控制生成文本中的惩罚系数,用于减少重复性
*/
frequency_penalty?: number;
/**
* 生成文本的最大长度
*/
max_tokens?: number;
model: string;
/**
* 控制生成文本中的惩罚系数,用于减少主题的变化
*/
presence_penalty?: number;
/**
* 生成文本的随机度量,用于控制文本的创造性和多样性
* @default 0.6
*/
temperature: number;
/**
* 控制生成文本中最高概率的单个令牌
*/
top_p?: number;
};
/**
* 聊天信息列表
*/
prompts: ChatMessage[];
vars: Record<string, string>;
}