1.0.2 • Published 1 month ago
zod-stream v1.0.2
zod-stream
Installation
with pnpm
$ pnpm add zod-stream zod openai
with npm
$ npm install zod-stream zod openai
with bun
$ bun add zod-stream zod openai
Basic Usage
Creating an endpoint that calls OpenAI with a defined response model. (Next.js app router, route handler example)
/api/get-stream
import { OAIStream } from "zod-stream/OAIStream"
import { withResponseModel } from "zod-stream/response-model"
import OpenAI from "openai"
import { z } from "zod"
const oai = new OpenAI({
apiKey: process.env["OPENAI_API_KEY"] ?? undefined,
organization: process.env["OPENAI_ORG_ID"] ?? undefined
})
export async function POST(request: Request) {
const { messages } = await request.json()
const params = withResponseModel({
response_model: { schema: z.object({ content: z.string() }), name: "Content response" },
params: {
messages,
model: "gpt-4"
},
mode: "TOOLS"
})
const extractionStream = await oai.chat.completions.create({
...params,
stream: true
})
return new Response(
OAIStream({
res: extractionStream
})
)
}
Consuming the structured stream elsewhere, maybe in the browser.
const client = new ZodStream()
const stream = await client.create({
completionPromise: async () => {
const response = fetch("/api/get-stream", {
body: JSON.stringify({ messages: [] }),
method: "POST"
})
return response.body
},
response_model: { // should match model expected to be returned by the completion.
schema: z.object({
content: z.string()
})
}
})
for await (const chunk of extractionStream) {
console.log(chunk) // safe to parse partial json
}
1.0.2
1 month ago
1.0.1
2 months ago
1.0.0
3 months ago
0.0.8
3 months ago
0.0.7
3 months ago
0.0.6
3 months ago
0.0.5
3 months ago
0.0.3
4 months ago
0.0.4
4 months ago
0.0.2
4 months ago
0.0.1
4 months ago
0.0.1--alpha.3
4 months ago
0.0.1--alpha.4
4 months ago
0.0.1--alpha.1
4 months ago
0.0.1--alpha.2
4 months ago
0.0.1--alpha.7
4 months ago
0.0.1--alpha.8
4 months ago
0.0.1--alpha.5
4 months ago
0.0.1--alpha.6
4 months ago
0.0.1--alpha.0
4 months ago