2023-06-29 18:24:08 +09:00
|
|
|
import {Chat, ChatOptions, ChatRequest, ChatResponse, ModelType} from "../base";
|
|
|
|
import {AxiosInstance, AxiosRequestConfig, CreateAxiosDefaults} from "axios";
|
|
|
|
import {CreateAxiosProxy} from "../../utils/proxyAgent";
|
|
|
|
import es from "event-stream";
|
|
|
|
import {ErrorData, Event, EventStream, MessageData, parseJSON} from "../../utils";
|
|
|
|
|
|
|
|
interface Message {
|
|
|
|
role: string;
|
|
|
|
content: string;
|
|
|
|
}
|
|
|
|
|
2023-07-02 13:08:14 +09:00
|
|
|
const modelMap = {
|
|
|
|
[ModelType.GPT3p5_16k]: 'gpt-3.5-turbo-16k',
|
|
|
|
[ModelType.GPT4]: 'gpt-4',
|
|
|
|
[ModelType.GPT3p5Turbo]: 'gpt-3.5-turbo'
|
|
|
|
} as Record<ModelType, string>
|
|
|
|
|
2023-06-29 18:24:08 +09:00
|
|
|
interface RealReq {
|
|
|
|
messages: Message[];
|
|
|
|
temperature: number;
|
|
|
|
stream: boolean;
|
|
|
|
model: string;
|
|
|
|
}
|
|
|
|
|
|
|
|
export class Better extends Chat {
|
|
|
|
private client: AxiosInstance;
|
|
|
|
|
|
|
|
constructor(options?: ChatOptions) {
|
|
|
|
super(options);
|
|
|
|
this.client = CreateAxiosProxy({
|
|
|
|
baseURL: 'https://openai-proxy-api.vercel.app/v1/',
|
|
|
|
headers: {
|
2023-07-02 13:08:14 +09:00
|
|
|
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/114.0.0.0 Safari/537.36 Edg/114.0.1823.58',
|
|
|
|
'Referer': 'https://chat.ylokh.xyz/',
|
|
|
|
'Origin': 'https://chat.ylokh.xyz',
|
|
|
|
'Content-Type': 'application/json'
|
2023-06-29 18:24:08 +09:00
|
|
|
}
|
|
|
|
} as CreateAxiosDefaults);
|
|
|
|
}
|
|
|
|
|
|
|
|
support(model: ModelType): number {
|
|
|
|
switch (model) {
|
|
|
|
case ModelType.GPT3p5_16k:
|
|
|
|
return 15000;
|
2023-07-02 13:08:14 +09:00
|
|
|
case ModelType.GPT3p5Turbo:
|
|
|
|
return 4000;
|
2023-06-29 18:24:08 +09:00
|
|
|
default:
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
public async ask(req: ChatRequest): Promise<ChatResponse> {
|
|
|
|
const stream = new EventStream();
|
|
|
|
const res = await this.askStream(req, stream);
|
|
|
|
const result: ChatResponse = {
|
|
|
|
content: '',
|
|
|
|
}
|
|
|
|
return new Promise(resolve => {
|
|
|
|
stream.read((event, data) => {
|
|
|
|
switch (event) {
|
|
|
|
case Event.done:
|
|
|
|
break;
|
|
|
|
case Event.message:
|
|
|
|
result.content += (data as MessageData).content || '';
|
|
|
|
break;
|
|
|
|
case Event.error:
|
|
|
|
result.error = (data as ErrorData).error;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}, () => {
|
|
|
|
resolve(result);
|
|
|
|
})
|
|
|
|
})
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
public async askStream(req: ChatRequest, stream: EventStream) {
|
|
|
|
const data: RealReq = {
|
|
|
|
messages: [{role: 'user', content: req.prompt}],
|
|
|
|
temperature: 1.0,
|
2023-07-02 13:08:14 +09:00
|
|
|
model: modelMap[req.model],
|
2023-06-29 18:24:08 +09:00
|
|
|
stream: true
|
|
|
|
};
|
|
|
|
try {
|
|
|
|
const res = await this.client.post('/chat/completions', data, {
|
|
|
|
responseType: 'stream',
|
|
|
|
} as AxiosRequestConfig);
|
|
|
|
res.data.pipe(es.split(/\r?\n\r?\n/)).pipe(es.map(async (chunk: any, cb: any) => {
|
|
|
|
const dataStr = chunk.replace('data: ', '');
|
|
|
|
if (!dataStr) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
const data = parseJSON(dataStr, {} as any);
|
|
|
|
if (!data?.choices) {
|
|
|
|
stream.write(Event.error, {error: 'not found data.choices'})
|
|
|
|
stream.end();
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
const [{delta: {content = ""}, finish_reason}] = data.choices;
|
|
|
|
if (finish_reason === 'stop') {
|
2023-07-02 13:08:14 +09:00
|
|
|
stream.write(Event.done, {content: ''})
|
|
|
|
stream.end();
|
2023-06-29 18:24:08 +09:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
stream.write(Event.message, {content});
|
|
|
|
}))
|
|
|
|
} catch (e: any) {
|
|
|
|
console.error(e);
|
|
|
|
stream.write(Event.error, {error: e.message})
|
|
|
|
stream.end();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|