1.0.12 • Published 3 months ago
agent-setup-lib v1.0.12
import { TavilySearchResults } from "@langchain/community/tools/tavily_search";
import { CheerioWebBaseLoader } from "@langchain/community/document_loaders/web/cheerio";
import { tool } from "@langchain/core/tools";
import { z } from "zod";
export const tavilyTool = new TavilySearchResults({apiKey: "tavily api key"});
export const webLoader = tool(async (input) => {
const loader = new CheerioWebBaseLoader(input.url);
const docs = await loader.load();
const formattedDocs = docs.map(
(doc) =>
`<Document name="${doc.metadata?.title}">\n${doc.pageContent}\n</Document>`,
);
return formattedDocs.join("\n\n");
},
{
name: "webpage_loader",
description: "Scrape the contents of a webpage.",
schema: z.object({
url: z.string(),
}),
}
)
//////////////////////////////////////////////////////////////////////////////////////////////
// demo.ts
import Llm from "../src/utils/llm";
import { MongoHandler } from "../src/utils/memory";
import { AgentSetup, AgentState } from "../src/agent.setup";
import { ChainCreator, Member } from "../src/index";
import { HumanMessage } from "@langchain/core/messages";
import { createReactAgent } from "@langchain/langgraph/prebuilt";
import { webLoader, tavilyTool } from "./tools/tools";
// Replace these configuration values with your actual keys and endpoints.
const llmConfig = {
OLLAMA_BASE_URL: "http://localhost:11434",
OPENAI_CHAT_API_KEY: "your-openai-api-key",
OPENAI_EMBEDDINGS_API_KEY: "your-openai-api-key",
ANTHROPIC_API_KEY: "your-anthropic-api-key",
};
async function demo() {
try {
// 1. Initialize the LLM instance.
const llm = new Llm(llmConfig);
const chatLlm = llm.getChatLlm({
provider: "openai",
model: "gpt-4",
temperature: 0.7,
});
const embeddings = llm.getEmbeddings({
provider: "openai",
model: "text-embedding-ada-002",
});
console.log("LLM and embeddings instances created.");
// 2. Setup the MongoDB memory handler.
const mongoHandler = new MongoHandler(
embeddings.instance,
"mongodb://localhost:27017",
{
dbName: "multiAgentDB",
collectionName: "vectorStore",
indexName: "defaultIndex",
textKey: "pageContent",
embeddingKey: "embedding",
}
);
console.log("MongoDB memory handler initialized.");
// 3. Create an initial agent state.
const initialState = {
messages: [
new HumanMessage({
content: "Hello, starting multi-agent workflow.",
}),
],
team_members: [],
next: "supervisor",
instructions: "Select the correct team member for the task",
stateOption: "",
};
// 4. Initialize AgentSetup with the agent state and the chat LLM instance.
const agentSetup = new AgentSetup(initialState, chatLlm.instance);
console.log("Agent setup created.");
async function webSearchNode (state: typeof AgentState.State) {
const stateModifier = agentSetup.agentStateModifier(
"You are a web search agent",
[tavilyTool],
state.team_members ?? ["Web_Search"],
)
const agent = createReactAgent({
llm: chatLlm.instance,
tools: [tavilyTool],
messageModifier: stateModifier(state),
})
return agentSetup.runAgentNode({ state, agent: agent, name: "Web_Search" });
}
// 5. Define team members with their associated functions and classification.
const teamMembers: Member[] = [
{
name: "Web_Search",
func: webSearchNode,
classification: ["Search the web", "look for intresting facts about a topic"]
}
];
// 6. Create a team supervisor agent using AgentSetup.
// The supervisor uses the list of team member names to decide the next step.
//const members:Member[] = teamMembers.map((member) => ({ name: member.name , classification: member.classification}))
const supervisorAgent = await agentSetup.createTeamSupervisor(
teamMembers
);
console.log("Team supervisor created.");
// 7. Initialize a checkpointer (saver) using MongoDB.
const saver = await mongoHandler.initSaver("multiAgentDB", "checkpoints");
console.log("Checkpointer initialized.");
// 8. Create the agent chain using ChainCreator.
const chainCreator = new ChainCreator(saver, supervisorAgent, teamMembers);
const chain = chainCreator.createChain();
console.log("Chain created successfully:");
const finalState = await chain.stream({
messages: [new HumanMessage("return my last request")],
}, {
recursionLimit: 100,
configurable: { thread_id: "1" }
});
for await (const output of finalState) {
if (!output?.__end__) {
console.log(output);
console.log("----");
}
}
} catch (error) {
console.error("Error in demo:", error);
}
}
// Run the demo.
demo();