Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion packages/components/nodes/agentflow/Agent/Agent.ts
Original file line number Diff line number Diff line change
Expand Up @@ -686,6 +686,7 @@ class Agent_Agentflow implements INode {
if (!model) {
throw new Error('Model is required')
}
const modelName = modelConfig?.model ?? modelConfig?.modelName

// Extract tools
const tools = nodeData.inputs?.agentTools as ITool[]
Expand Down Expand Up @@ -1390,7 +1391,7 @@ class Agent_Agentflow implements INode {

// End analytics tracking
if (analyticHandlers && llmIds) {
await analyticHandlers.onLLMEnd(llmIds, finalResponse)
await analyticHandlers.onLLMEnd(llmIds, output, { model: modelName, provider: model })
}

// Send additional streaming events if needed
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ import {
} from '../utils'
import { CONDITION_AGENT_SYSTEM_PROMPT, DEFAULT_SUMMARIZER_TEMPLATE } from '../prompt'
import { BaseChatModel } from '@langchain/core/language_models/chat_models'
import { findBestScenarioIndex } from './matchScenario'

class ConditionAgent_Agentflow implements INode {
label: string
Expand Down Expand Up @@ -259,6 +260,8 @@ class ConditionAgent_Agentflow implements INode {
if (!model) {
throw new Error('Model is required')
}
const modelName = modelConfig?.model ?? modelConfig?.modelName

const conditionAgentInput = nodeData.inputs?.conditionAgentInput as string
let input = conditionAgentInput || question
const conditionAgentInstructions = nodeData.inputs?.conditionAgentInstructions as string
Expand Down Expand Up @@ -376,12 +379,20 @@ class ConditionAgent_Agentflow implements INode {
const endTime = Date.now()
const timeDelta = endTime - startTime

// End analytics tracking
// End analytics tracking (pass structured output with usage metadata)
if (analyticHandlers && llmIds) {
await analyticHandlers.onLLMEnd(
llmIds,
typeof response.content === 'string' ? response.content : JSON.stringify(response.content)
)
const analyticsOutput: any = {
content: typeof response.content === 'string' ? response.content : JSON.stringify(response.content)
}
// Include usage metadata if available
if (response.usage_metadata) {
analyticsOutput.usageMetadata = response.usage_metadata
}
// Include response metadata (contains model name) if available
if (response.response_metadata) {
analyticsOutput.responseMetadata = response.response_metadata
}
await analyticHandlers.onLLMEnd(llmIds, analyticsOutput, { model: modelName, provider: model })
}

let calledOutputName: string
Expand All @@ -406,10 +417,7 @@ class ConditionAgent_Agentflow implements INode {
}
}

// Find the first exact match
const matchedScenarioIndex = _conditionAgentScenarios.findIndex(
(scenario) => calledOutputName.toLowerCase() === scenario.scenario.toLowerCase()
)
const matchedScenarioIndex = findBestScenarioIndex(_conditionAgentScenarios, calledOutputName)

const conditions = _conditionAgentScenarios.map((scenario, index) => {
return {
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
import { findBestScenarioIndex } from './matchScenario'

describe('findBestScenarioIndex', () => {
const scenarios = [{ scenario: 'billing issue' }, { scenario: 'technical support' }, { scenario: 'other' }]

it('matches exact scenario (case-insensitive)', () => {
expect(findBestScenarioIndex(scenarios, 'Technical Support')).toBe(1)
})

it('matches exact scenario with surrounding whitespace', () => {
expect(findBestScenarioIndex(scenarios, ' billing issue ')).toBe(0)
})

it('matches abbreviated output using startsWith fallback', () => {
expect(findBestScenarioIndex(scenarios, 'tech')).toBe(1)
})

it('matches substring output in either direction', () => {
expect(findBestScenarioIndex(scenarios, 'need help with billing issue today')).toBe(0)
})

it('falls back to last scenario when no match is found', () => {
expect(findBestScenarioIndex(scenarios, 'completely unrelated')).toBe(2)
})

it('returns -1 for empty scenarios list', () => {
expect(findBestScenarioIndex([], 'anything')).toBe(-1)
})
})
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
export type ConditionScenario = { scenario: string }

export const findBestScenarioIndex = (scenarios: ConditionScenario[], calledOutputName: string): number => {
if (!Array.isArray(scenarios) || scenarios.length === 0) return -1

const normalizedOutput = calledOutputName.toLowerCase().trim()

// try exact match first
let matchedScenarioIndex = scenarios.findIndex((scenario) => scenario.scenario.toLowerCase() === normalizedOutput)

// fallback: check if LLM returned a partial/abbreviated scenario name
if (matchedScenarioIndex === -1) {
matchedScenarioIndex = scenarios.findIndex((scenario) => scenario.scenario.toLowerCase().startsWith(normalizedOutput))
}

// further fallback: substring match in either direction
if (matchedScenarioIndex === -1) {
matchedScenarioIndex = scenarios.findIndex(
(scenario) =>
scenario.scenario.toLowerCase().includes(normalizedOutput) || normalizedOutput.includes(scenario.scenario.toLowerCase())
)
}

// last resort: if still no match, use the last scenario as an "else" branch
if (matchedScenarioIndex === -1) {
matchedScenarioIndex = scenarios.length - 1
}

return matchedScenarioIndex
}
3 changes: 2 additions & 1 deletion packages/components/nodes/agentflow/LLM/LLM.ts
Original file line number Diff line number Diff line change
Expand Up @@ -348,6 +348,7 @@ class LLM_Agentflow implements INode {
if (!model) {
throw new Error('Model is required')
}
const modelName = modelConfig?.model ?? modelConfig?.modelName

// Extract memory and configuration options
const enableMemory = nodeData.inputs?.llmEnableMemory as boolean
Expand Down Expand Up @@ -576,7 +577,7 @@ class LLM_Agentflow implements INode {

// End analytics tracking
if (analyticHandlers && llmIds) {
await analyticHandlers.onLLMEnd(llmIds, finalResponse)
await analyticHandlers.onLLMEnd(llmIds, output, { model: modelName, provider: model })
}

// Send additional streaming events if needed
Expand Down
2 changes: 1 addition & 1 deletion packages/components/src/Interface.ts
Original file line number Diff line number Diff line change
Expand Up @@ -458,7 +458,7 @@ export enum FollowUpPromptProvider {
}

export type FollowUpPromptProviderConfig = {
[key in FollowUpPromptProvider]: {
[_key in FollowUpPromptProvider]: {
credentialId: string
modelName: string
baseUrl: string
Expand Down
Loading