feat: support new mcbbs

このコミットが含まれているのは:
xiang 2023-06-03 14:45:28 +08:00
コミット 3b9719d506
4個のファイルの変更131行の追加6行の削除

ファイルの表示

@ -15,6 +15,7 @@ Have implemented models here:
If you do not want your website to appear here, please raise an issue and I will remove it immediately.
|model|support|status|active time|
|--|--|--|--|
|[ai.mcbbs.gq](ai.mcbbs.gq)|gpt3.5|![Active](https://img.shields.io/badge/Active-brightgreen)|after 2023-06-03|
|[forefront.ai](forefront.ai)|GPT-4/gpt3.5|![Active](https://img.shields.io/badge/Active-brightgreen)|after 2023-05-12|
|[aidream](http://aidream.cloud)|GPT-3.5|![Active](https://img.shields.io/badge/Active-brightgreen)|after 2023-05-12|
|[you.com](you.com)|GPT-3.5|![Active](https://img.shields.io/badge/Active-brightgreen)|after 2023-05-12
@ -67,7 +68,23 @@ docker-compose up --build -d
prompt: string; // required
```
aidread options
#### mcbbs options
```typescript
interface Message {
role: string;
content: string;
}
interface options {
parse: string;
messages: string; // attattion messages is Message[] json string
temperature: number;
}
```
#### aidread options
```typescript
interface options {
@ -79,7 +96,7 @@ interface options {
}
```
forefront options
#### forefront options
```
chatId?: string;
@ -97,9 +114,14 @@ resignup?: number; // default 0 if set 1, auto sign up when gpt4 times use up
### test now!
common request
use curl or input url in explorer
```shell
# test default model aidream
# test default model mcbbs
curl '127.0.0.1:3000/ask/stream?messages=[{"role":"system","content":"IMPORTANT: You are a virtual assistant powered by the gpt-3.5-turbo model, now time is 2023/6/3 13:42:27}"},{"role":"user","content":"你好\n"},{"role":"assistant","content":"你好!有什么我可以帮助你的吗?"},{"role":"user","content":"写个冒泡排序\n"}]&prompt=test&model=mcbbs&parse=false'
# test aidream
curl "http://127.0.0.1:3000/ask?prompt=hello&model=aidream"
# test default model chat.forefront.at

ファイルの表示

@ -28,7 +28,7 @@ interface AskReq {
}
router.get('/ask', async (ctx) => {
const {prompt, model = Model.Forefront, ...options} = ctx.query as unknown as AskReq;
const {prompt, model = Model.Mcbbs, ...options} = ctx.query as unknown as AskReq;
if (!prompt) {
ctx.body = 'please input prompt';
return;
@ -39,11 +39,11 @@ router.get('/ask', async (ctx) => {
return;
}
const res = await chat.ask({prompt: prompt as string, options});
ctx.body = res;
ctx.body = res.text;
});
router.get('/ask/stream', async (ctx) => {
const {prompt, model = Model.Forefront, ...options} = ctx.query as unknown as AskReq;
const {prompt, model = Model.Mcbbs, ...options} = ctx.query as unknown as AskReq;
if (!prompt) {
ctx.body = 'please input prompt';
return;

ファイルの表示

@ -3,6 +3,7 @@ import {You} from "./you";
import {AiDream} from "./aidream";
import {Phind} from "./phind";
import {Forefrontnew} from "./forefront";
import {Mcbbs} from "./mcbbs";
export enum Model {
// define new model here
@ -10,6 +11,7 @@ export enum Model {
Forefront = 'forefront',
AiDream = 'aidream',
Phind = 'phind',
Mcbbs = 'mcbbs',
}
export class ChatModelFactory {
@ -28,6 +30,7 @@ export class ChatModelFactory {
this.modelMap.set(Model.Forefront, new Forefrontnew(this.options))
this.modelMap.set(Model.AiDream, new AiDream(this.options))
this.modelMap.set(Model.Phind, new Phind(this.options))
this.modelMap.set(Model.Mcbbs, new Mcbbs(this.options))
}
get(model: Model): Chat | undefined {

100
model/mcbbs/index.ts ノーマルファイル
ファイルの表示

@ -0,0 +1,100 @@
import {Chat, ChatOptions, Request, Response, ResponseStream} from "../base";
import {AxiosInstance, AxiosRequestConfig, CreateAxiosDefaults} from "axios";
import {CreateAxiosProxy} from "../../utils/proxyAgent";
import es from "event-stream";
import {parseJSON} from "../../utils";
import {Stream} from "stream";
interface Message {
role: string;
content: string;
}
interface RealReq {
messages: Message[];
stream: boolean;
model: string;
temperature: number;
presence_penalty: number;
}
export interface McbbsReq extends Request {
options: {
parse: string;
messages: string;
temperature: number;
}
}
export class Mcbbs extends Chat {
private client: AxiosInstance;
constructor(options?: ChatOptions) {
super(options);
this.client = CreateAxiosProxy({
baseURL: 'https://ai.mcbbs.gq/api',
headers: {
'Content-Type': 'application/json',
"accept": "text/event-stream",
"Cache-Control": "no-cache",
"Proxy-Connection": "keep-alive"
}
} as CreateAxiosDefaults);
}
public async ask(req: McbbsReq): Promise<Response> {
const res = await this.askStream(req)
const result: Response = {
text: '', other: {}
}
return new Promise(resolve => {
res.text.on('data', (data) => {
result.text += data;
}).on('close', () => {
resolve(result);
})
})
}
public async askStream(req: McbbsReq): Promise<ResponseStream> {
const {
messages,
temperature = 1,
parse = 'true'
} = req.options;
const data: RealReq = {
stream: true,
messages: JSON.parse(messages),
temperature,
presence_penalty: 2,
model: 'gpt-3.5-turbo'
};
const res = await this.client.post('/openai/v1/chat/completions', data, {
responseType: 'stream',
} as AxiosRequestConfig);
if (parse === 'false') {
return {text: res.data}
}
return {
text: this.parseData(res.data)
};
}
parseData(v: Stream): Stream {
return v.pipe(es.split(/\r?\n\r?\n/)).pipe(es.map(async (chunk: any, cb: any) => {
const dataStr = chunk.replace('data: ', '');
if (dataStr === '[Done]') {
cb(null, '');
return;
}
const data = parseJSON(dataStr, {});
if (!data?.choices) {
cb(null, '');
return;
}
const [{delta: {content = ""}}] = data.choices;
cb(null, content);
}))
}
}