| 123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169 |
- import { Injectable, Inject, forwardRef } from '@nestjs/common';
- import { ChatOpenAI } from '@langchain/openai';
- import { ChatGoogleGenerativeAI } from '@langchain/google-genai';
- import { BaseChatModel } from "@langchain/core/language_models/chat_models";
- import { StateGraph, START, END } from "@langchain/langgraph";
- import { BaseMessage, HumanMessage } from "@langchain/core/messages";
- import { FFBVectorService } from './ffb-vector.service';
- import { FFBGateway } from '../ffb.gateway';
- // Config & Utils
- import { AgentState } from './config/agent-state';
- import { SessionManager } from './utils/session-manager';
- // Nodes
- import { entryNode } from './nodes/entry.node';
- import { routerNode } from './nodes/router.node';
- import { guidanceNode } from './nodes/guidance.node';
- import { metaNode } from './nodes/meta.node';
- import { refusalNode } from './nodes/refusal.node';
- import { vectorSearchNode } from './nodes/vector-search.node';
- import { aggregationNode } from './nodes/aggregation.node';
- import { synthesisNode } from './nodes/synthesis.node';
- @Injectable()
- export class FFBLangChainService {
- private openaiModel: BaseChatModel;
- private geminiModel: BaseChatModel;
- private graph: any;
- private sessionManager: SessionManager;
- constructor(
- private readonly vectorService: FFBVectorService,
- @Inject(forwardRef(() => FFBGateway))
- private readonly gateway: FFBGateway
- ) {
- this.openaiModel = new ChatOpenAI({
- modelName: 'gpt-4o-mini',
- apiKey: process.env.OPENAI_API_KEY,
- temperature: 0
- });
- this.geminiModel = new ChatGoogleGenerativeAI({
- model: 'gemini-2.5-flash',
- apiKey: process.env.GOOGLE_API_KEY,
- temperature: 0
- });
- this.sessionManager = new SessionManager();
- this.initGraph();
- }
- private getModel(socketId: string): BaseChatModel {
- const provider = this.sessionManager.getModelProvider(socketId);
- return provider === 'gemini' ? this.geminiModel : this.openaiModel;
- }
- switchModel(socketId: string, provider: 'openai' | 'gemini') {
- this.sessionManager.setModelProvider(socketId, provider);
- }
- getCurrentModel(socketId: string) {
- const provider = this.sessionManager.getModelProvider(socketId);
- return {
- provider: provider,
- modelName: provider === 'gemini' ? 'gemini-2.5-flash' : 'gpt-4o-mini'
- };
- }
- private initGraph() {
- const graph = new StateGraph(AgentState)
- .addNode("entry_node", (state) => entryNode(state, this.getModel(state.socketId), this.gateway))
- .addNode("guidance_node", (state) => guidanceNode(state))
- .addNode("meta_node", (state) => {
- const socketId = state.socketId;
- const provider = this.sessionManager.getModelProvider(socketId);
- const providerName = provider === 'gemini' ? 'Google Gemini' : 'OpenAI';
- return metaNode(state, this.getModel(socketId), providerName, this.vectorService);
- })
- .addNode("refusal_node", (state) => refusalNode(state))
- .addNode("router_node", (state) => routerNode(state, this.getModel(state.socketId), this.gateway))
- .addNode("vector_search_node", (state) => vectorSearchNode(state, this.vectorService, this.gateway))
- .addNode("aggregation_node", (state) => aggregationNode(state, this.getModel(state.socketId), this.vectorService, this.gateway))
- .addNode("synthesis_node", (state) => synthesisNode(state, this.getModel(state.socketId), this.gateway));
- // Add Edges
- graph.addEdge(START, "entry_node");
- graph.addConditionalEdges(
- "entry_node",
- (state) => state.entryCategory,
- {
- "InScope-Actionable": "router_node",
- "InScope-NeedsGuidance": "guidance_node",
- "InScope-Meta": "meta_node",
- "OutOfScope": "refusal_node"
- }
- );
- graph.addConditionalEdges(
- "router_node",
- (state) => state.activeIntent,
- {
- Semantic: "vector_search_node",
- Aggregate: "aggregation_node"
- }
- );
- graph.addEdge("guidance_node", END);
- graph.addEdge("meta_node", END);
- graph.addEdge("refusal_node", END);
- graph.addEdge("vector_search_node", "synthesis_node");
- graph.addEdge("aggregation_node", "synthesis_node");
- graph.addEdge("synthesis_node", END);
- this.graph = graph.compile();
- }
- // --- MAIN ENTRY POINT ---
- createSession(socketId: string) {
- this.sessionManager.createSession(socketId);
- }
- deleteSession(socketId: string) {
- this.sessionManager.deleteSession(socketId);
- }
- async chat(socketId: string, message: string): Promise<string> {
- try {
- // Get session & filter valid history
- const session = this.sessionManager.getSession(socketId);
- const validHistory = this.sessionManager.getValidHistory(socketId);
- const userMsg = new HumanMessage(message);
- const inputs = {
- messages: [...validHistory, userMsg],
- entityStore: session.entityStore,
- socketId: socketId
- };
- const result = await this.graph.invoke(inputs);
- const allMessages = result.messages as BaseMessage[];
- const updatedEntityStore = result.entityStore as Record<string, any>;
- const classification = result.entryCategory as string;
- // Get the AI response (last message)
- const agentMessages = allMessages.filter((m: BaseMessage) => m._getType() === 'ai');
- const lastResponse = agentMessages[agentMessages.length - 1];
- // Update Session Storage
- this.sessionManager.updateSession(
- socketId,
- userMsg,
- lastResponse,
- classification,
- updatedEntityStore
- );
- return lastResponse?.content as string || "I'm sorry, I encountered an error.";
- } catch (error) {
- console.error('Error calling LangGraph:', error);
- throw error;
- }
- }
- }
|