openai-node
38 строк · 1.2 Кб
1import OpenAI from 'openai';
2import type { NextApiRequest, NextApiResponse } from 'next';
3
4// This file demonstrates how to stream from a Next.JS server as
5// a new-line separated JSON-encoded stream. This file cannot be run
6// without Next.JS scaffolding.
7
8export const runtime = 'edge';
9
10// This endpoint can be called with:
11//
12// curl 127.0.0.1:3000 -N -X POST -H 'Content-Type: text/plain' \
13// --data 'Can you explain why dogs are better than cats?'
14//
15// Or consumed with fetch:
16//
17// fetch('http://localhost:3000', {
18// method: 'POST',
19// body: 'Tell me why dogs are better than cats',
20// }).then(async res => {
21// const runner = ChatCompletionStreamingRunner.fromReadableStream(res)
22// })
23//
24// See examples/stream-to-client-browser.ts for a more complete example.
25export default async function handler(req: NextApiRequest, res: NextApiResponse) {
26const openai = new OpenAI();
27
28const stream = openai.beta.chat.completions.stream({
29model: 'gpt-3.5-turbo',
30stream: true,
31// @ts-ignore
32messages: [{ role: 'user', content: await req.text() }],
33});
34
35return res.send(stream.toReadableStream());
36// @ts-ignore -- Or, for the app router:
37return new Response(stream.toReadableStream());
38}
39