0.0.3 • Published 1 year ago
@thinkableai/prompt v0.0.3
Thinkableai prompt
by @thinkableai
Introduction
- ⚡️ Build and execute a advanced prompts.
- 😍 Easy to extend and initial llm settings.
Quick Installation
npm install @thinkableai/prompt
# or
yarn add @thinkableai/prompt
# or
pnpm add @thinkableai/prompt
Usage
- Import a module.
import { Module } from "@nestjs/common";
import { AppController } from "./app.controller";
import { AppService } from "./app.service";
import { PromptModule } from "@thinkableai/prompt";
@Module({
imports: [PromptModule], // import a module.
controllers: [AppController],
providers: [AppService],
})
export class AppModule {}
- Call service to build and execute a prompt.
import { Controller, Post } from "@nestjs/common";
import {
ExecutePromptInput,
FormatContentTypeEnum,
LLMProviderEnum,
LLMSettingsModel,
PromptService,
} from "@thinkableai/prompt";
@Controller()
export class AppController {
constructor(private readonly promptService: PromptService) {}
@Post()
async run() {
const llmSettings = new LLMSettingsModel("CHAT", LLMProviderEnum.OPENAI);
const systemPrompt = `I act you as a JSON generator`;
const prompt = `{VAR_A} is value of a prompt and {VAR_B} is value of chat, make sure to not use a highlight`;
const values = {
VAR_A: "my_value",
VAR_B: "hello",
};
const input: ExecutePromptInput = {
prompt,
values,
systemPrompt,
llmSettings,
formatContentType: FormatContentTypeEnum.JSON, // force to receive a clean json format.
};
const content = await this.promptService.executePrompt(input);
return { content };
}
}