|
|
@@ -20,6 +20,7 @@ import { refusalNode } from './nodes/refusal.node';
|
|
|
import { vectorSearchNode } from './nodes/vector-search.node';
|
|
|
import { aggregationNode } from './nodes/aggregation.node';
|
|
|
import { synthesisNode } from './nodes/synthesis.node';
|
|
|
+import { generationNode } from './nodes/generation.node';
|
|
|
|
|
|
@Injectable()
|
|
|
export class FFBLangChainService {
|
|
|
@@ -49,32 +50,38 @@ export class FFBLangChainService {
|
|
|
this.initGraph();
|
|
|
}
|
|
|
|
|
|
+ private getProvider(socketId: string): 'openai' | 'gemini' {
|
|
|
+ if (socketId?.startsWith('stateless:')) {
|
|
|
+ return (socketId.split(':')[1] as 'openai' | 'gemini') || 'openai';
|
|
|
+ }
|
|
|
+ return this.sessionManager.getModelProvider(socketId);
|
|
|
+ }
|
|
|
+
|
|
|
private getModel(socketId: string): BaseChatModel {
|
|
|
- const provider = this.sessionManager.getModelProvider(socketId);
|
|
|
+ const provider = this.getProvider(socketId);
|
|
|
return provider === 'gemini' ? this.geminiModel : this.openaiModel;
|
|
|
}
|
|
|
|
|
|
switchModel(socketId: string, provider: 'openai' | 'gemini') {
|
|
|
+ if (socketId?.startsWith('stateless:')) return;
|
|
|
this.sessionManager.setModelProvider(socketId, provider);
|
|
|
}
|
|
|
|
|
|
getCurrentModel(socketId: string) {
|
|
|
- const provider = this.sessionManager.getModelProvider(socketId);
|
|
|
+ const provider = this.getProvider(socketId);
|
|
|
return {
|
|
|
provider: provider,
|
|
|
modelName: provider === 'gemini' ? 'gemini-2.5-flash' : 'gpt-4o-mini'
|
|
|
};
|
|
|
}
|
|
|
|
|
|
-
|
|
|
-
|
|
|
private initGraph() {
|
|
|
const graph = new StateGraph(AgentState)
|
|
|
.addNode("entry_node", (state) => entryNode(state, this.getModel(state.socketId), this.gateway))
|
|
|
.addNode("guidance_node", (state) => guidanceNode(state))
|
|
|
.addNode("meta_node", (state) => {
|
|
|
const socketId = state.socketId;
|
|
|
- const provider = this.sessionManager.getModelProvider(socketId);
|
|
|
+ const provider = this.getProvider(socketId);
|
|
|
const providerName = provider === 'gemini' ? 'Google Gemini' : 'OpenAI';
|
|
|
return metaNode(state, this.getModel(socketId), providerName, this.vectorService);
|
|
|
})
|
|
|
@@ -82,7 +89,8 @@ export class FFBLangChainService {
|
|
|
.addNode("router_node", (state) => routerNode(state, this.getModel(state.socketId), this.gateway))
|
|
|
.addNode("vector_search_node", (state) => vectorSearchNode(state, this.vectorService, this.gateway))
|
|
|
.addNode("aggregation_node", (state) => aggregationNode(state, this.getModel(state.socketId), this.vectorService, this.gateway))
|
|
|
- .addNode("synthesis_node", (state) => synthesisNode(state, this.getModel(state.socketId), this.gateway));
|
|
|
+ .addNode("synthesis_node", (state) => synthesisNode(state, this.getModel(state.socketId), this.gateway))
|
|
|
+ .addNode("generation_node", (state) => generationNode(state, this.getModel(state.socketId), this.gateway));
|
|
|
|
|
|
// Add Edges
|
|
|
graph.addEdge(START, "entry_node");
|
|
|
@@ -94,6 +102,7 @@ export class FFBLangChainService {
|
|
|
"InScope-Actionable": "router_node",
|
|
|
"InScope-NeedsGuidance": "guidance_node",
|
|
|
"InScope-Meta": "meta_node",
|
|
|
+ "InScope-Generation": "generation_node",
|
|
|
"OutOfScope": "refusal_node"
|
|
|
}
|
|
|
);
|
|
|
@@ -109,6 +118,7 @@ export class FFBLangChainService {
|
|
|
|
|
|
graph.addEdge("guidance_node", END);
|
|
|
graph.addEdge("meta_node", END);
|
|
|
+ graph.addEdge("generation_node", END);
|
|
|
graph.addEdge("refusal_node", END);
|
|
|
graph.addEdge("vector_search_node", "synthesis_node");
|
|
|
graph.addEdge("aggregation_node", "synthesis_node");
|
|
|
@@ -166,4 +176,28 @@ export class FFBLangChainService {
|
|
|
throw error;
|
|
|
}
|
|
|
}
|
|
|
+
|
|
|
+ async chatStateless(message: string, provider: 'openai' | 'gemini' = 'openai'): Promise<string> {
|
|
|
+ try {
|
|
|
+ const userMsg = new HumanMessage(message);
|
|
|
+ const socketId = `stateless:${provider}`;
|
|
|
+
|
|
|
+ const inputs = {
|
|
|
+ messages: [userMsg],
|
|
|
+ entityStore: {},
|
|
|
+ socketId: socketId
|
|
|
+ };
|
|
|
+
|
|
|
+ const result = await this.graph.invoke(inputs);
|
|
|
+
|
|
|
+ const allMessages = result.messages as BaseMessage[];
|
|
|
+ const agentMessages = allMessages.filter((m: BaseMessage) => m._getType() === 'ai');
|
|
|
+ const lastResponse = agentMessages[agentMessages.length - 1];
|
|
|
+
|
|
|
+ return lastResponse?.content as string || "I'm sorry, I encountered an error.";
|
|
|
+ } catch (error) {
|
|
|
+ console.error('Error calling LangGraph (stateless):', error);
|
|
|
+ throw error;
|
|
|
+ }
|
|
|
+ }
|
|
|
}
|