diff --git a/api/docs/nlp/README.md b/api/docs/nlp/README.md new file mode 100644 index 000000000..89f1b46c5 --- /dev/null +++ b/api/docs/nlp/README.md @@ -0,0 +1,102 @@ +# NLP Block Scoring +## Purpose + +**NLP Block Scoring** is a mechanism used to select the most relevant response block based on: + +- Matching patterns between user input and block definitions +- Configurable weights assigned to each entity type +- Confidence values provided by the NLU engine for detected entities + +It enables more intelligent and context-aware block selection in conversational flows. + +## Core Use Cases +### Standard Matching + +A user input contains entities that directly match a block’s patterns. +```bash +Example: Input: intent = enquiry & subject = claim +Block A: Patterns: intent: enquiry & subject: claim +Block A will be selected. +``` + +### High Confidence, Partial Match + +A block may match only some patterns but have high-confidence input on those matched ones, making it a better candidate than others with full matches but low-confidence entities. +**Note: Confidence is multiplied by a pre-defined weight for each entity type.** + +```bash +Example: +Input: intent = issue (confidence: 0.92) & subject = claim (confidence: 0.65) +Block A: Pattern: intent: issue +Block B: Pattern: subject: claim +➤ Block A gets a high score based on confidence × weight (assuming both weights are equal to 1). +``` + +### Multiple Blocks with Similar Patterns + +```bash +Input: intent = issue & subject = insurance +Block A: intent = enquiry & subject = insurance +Block B: subject = insurance +➤ Block B is selected — Block A mismatches on intent. +``` + +### Exclusion Due to Extra Patterns + +If a block contains patterns that require entities not present in the user input, the block is excluded from scoring altogether. No penalties are applied — the block simply isn't considered a valid candidate. + +```bash +Input: intent = issue & subject = insurance +Block A: intent = enquiry & subject = insurance & location = office +Block B: subject = insurance & time = morning +➤ Neither block is selected due to unmatched required patterns (`location`, `time`) +``` + +### Tie-Breaking with Penalty Factors + +When multiple blocks receive similar scores, penalty factors can help break the tie — especially in cases where patterns are less specific (e.g., using `Any` as a value). + +```bash +Input: intent = enquiry & subject = insurance + +Block A: intent = enquiry & subject = Any +Block B: intent = enquiry & subject = insurance +Block C: subject = insurance + +Scoring Summary: +- Block A matches both patterns, but subject = Any is considered less specific. +- Block B has a redundant but fully specific match. +- Block C matches only one pattern. + +➤ Block A and Block B have similar raw scores. +➤ A penalty factor is applied to Block A due to its use of Any, reducing its final score. +➤ Block B is selected. +``` + +## How Scoring Works +### Matching and Confidence + +For each entity in the block's pattern: +- If the entity `matches` an entity in the user input: + - the score is increased by: `confidence × weight` + - `Confidence` is a value between 0 and 1, returned by the NLU engine. + - `Weight` is a configured importance factor for that specific entity type. +- If the match is a wildcard (i.e., the block accepts any value): + - A **penalty factor** is applied to slightly reduce its contribution: + ``confidence × weight × penaltyFactor``. This encourages more specific matches when available. + +### Scoring Formula Summary + +For each matched entity: + +```bash +score += confidence × weight × [optional penalty factor if wildcard] +``` + +The total block score is the sum of all matched patterns in that block. + +### Penalty Factor + +The **penalty factor** is a global multiplier (typically less than `1`, e.g., `0.8`) applied when the match type is less specific — such as wildcard or loose entity type matches. It allows the system to: +- Break ties in favor of more precise blocks +- Discourage overly generic blocks from being selected when better matches are available diff --git a/api/src/chat/chat.module.ts b/api/src/chat/chat.module.ts index 1452d2960..48bfcab58 100644 --- a/api/src/chat/chat.module.ts +++ b/api/src/chat/chat.module.ts @@ -16,6 +16,7 @@ import { AttachmentModel } from '@/attachment/schemas/attachment.schema'; import { AttachmentService } from '@/attachment/services/attachment.service'; import { ChannelModule } from '@/channel/channel.module'; import { CmsModule } from '@/cms/cms.module'; +import { NlpModule } from '@/nlp/nlp.module'; import { UserModule } from '@/user/user.module'; import { BlockController } from './controllers/block.controller'; @@ -68,6 +69,7 @@ import { SubscriberService } from './services/subscriber.service'; AttachmentModule, EventEmitter2, UserModule, + NlpModule, ], controllers: [ CategoryController, diff --git a/api/src/chat/controllers/block.controller.spec.ts b/api/src/chat/controllers/block.controller.spec.ts index 24945c3de..426a19b3c 100644 --- a/api/src/chat/controllers/block.controller.spec.ts +++ b/api/src/chat/controllers/block.controller.spec.ts @@ -20,6 +20,15 @@ import { LanguageRepository } from '@/i18n/repositories/language.repository'; import { LanguageModel } from '@/i18n/schemas/language.schema'; import { I18nService } from '@/i18n/services/i18n.service'; import { LanguageService } from '@/i18n/services/language.service'; +import { LoggerService } from '@/logger/logger.service'; +import { NlpEntityRepository } from '@/nlp/repositories/nlp-entity.repository'; +import { NlpSampleEntityRepository } from '@/nlp/repositories/nlp-sample-entity.repository'; +import { NlpValueRepository } from '@/nlp/repositories/nlp-value.repository'; +import { NlpEntityModel } from '@/nlp/schemas/nlp-entity.schema'; +import { NlpSampleEntityModel } from '@/nlp/schemas/nlp-sample-entity.schema'; +import { NlpValueModel } from '@/nlp/schemas/nlp-value.schema'; +import { NlpEntityService } from '@/nlp/services/nlp-entity.service'; +import { NlpValueService } from '@/nlp/services/nlp-value.service'; import { PluginService } from '@/plugins/plugins.service'; import { SettingService } from '@/setting/services/setting.service'; import { InvitationRepository } from '@/user/repositories/invitation.repository'; @@ -93,6 +102,9 @@ describe('BlockController', () => { RoleModel, PermissionModel, LanguageModel, + NlpEntityModel, + NlpSampleEntityModel, + NlpValueModel, ]), ], providers: [ @@ -116,6 +128,12 @@ describe('BlockController', () => { PermissionService, LanguageService, PluginService, + LoggerService, + NlpEntityService, + NlpEntityRepository, + NlpSampleEntityRepository, + NlpValueRepository, + NlpValueService, { provide: I18nService, useValue: { diff --git a/api/src/chat/schemas/types/pattern.ts b/api/src/chat/schemas/types/pattern.ts index 48df5efed..6b430f699 100644 --- a/api/src/chat/schemas/types/pattern.ts +++ b/api/src/chat/schemas/types/pattern.ts @@ -8,6 +8,8 @@ import { z } from 'zod'; +import { BlockFull } from '../block.schema'; + import { PayloadType } from './button'; export const payloadPatternSchema = z.object({ @@ -57,3 +59,19 @@ export const patternSchema = z.union([ ]); export type Pattern = z.infer; + +export type NlpPatternMatchResult = { + block: BlockFull; + matchedPattern: NlpPattern[]; +}; + +export function isNlpPattern(pattern: NlpPattern) { + return ( + (typeof pattern === 'object' && + pattern !== null && + 'entity' in pattern && + 'match' in pattern && + pattern.match === 'entity') || + pattern.match === 'value' + ); +} diff --git a/api/src/chat/services/block.service.spec.ts b/api/src/chat/services/block.service.spec.ts index 393d91603..0c94a74f1 100644 --- a/api/src/chat/services/block.service.spec.ts +++ b/api/src/chat/services/block.service.spec.ts @@ -31,6 +31,15 @@ import { LanguageRepository } from '@/i18n/repositories/language.repository'; import { LanguageModel } from '@/i18n/schemas/language.schema'; import { I18nService } from '@/i18n/services/i18n.service'; import { LanguageService } from '@/i18n/services/language.service'; +import { NlpEntityRepository } from '@/nlp/repositories/nlp-entity.repository'; +import { NlpSampleEntityRepository } from '@/nlp/repositories/nlp-sample-entity.repository'; +import { NlpValueRepository } from '@/nlp/repositories/nlp-value.repository'; +import { NlpEntityModel } from '@/nlp/schemas/nlp-entity.schema'; +import { NlpSampleEntityModel } from '@/nlp/schemas/nlp-sample-entity.schema'; +import { NlpValueModel } from '@/nlp/schemas/nlp-value.schema'; +import { NlpCacheMap } from '@/nlp/schemas/types'; +import { NlpEntityService } from '@/nlp/services/nlp-entity.service'; +import { NlpValueService } from '@/nlp/services/nlp-value.service'; import { PluginService } from '@/plugins/plugins.service'; import { SettingService } from '@/setting/services/setting.service'; import { @@ -43,12 +52,21 @@ import { blockGetStarted, blockProductListMock, blocks, + mockModifiedNlpBlock, + mockNlpBlock, + mockNlpPatternsSetOne, + mockNlpPatternsSetThree, + mockNlpPatternsSetTwo, } from '@/utils/test/mocks/block'; import { contextBlankInstance, subscriberContextBlankInstance, } from '@/utils/test/mocks/conversation'; -import { nlpEntitiesGreeting } from '@/utils/test/mocks/nlp'; +import { + mockNlpCacheMap, + mockNlpEntitiesSetOne, + nlpEntitiesGreeting, +} from '@/utils/test/mocks/nlp'; import { closeInMongodConnection, rootMongooseTestModule, @@ -61,6 +79,7 @@ import { Category, CategoryModel } from '../schemas/category.schema'; import { LabelModel } from '../schemas/label.schema'; import { FileType } from '../schemas/types/attachment'; import { StdOutgoingListMessage } from '../schemas/types/message'; +import { NlpPattern } from '../schemas/types/pattern'; import { CategoryRepository } from './../repositories/category.repository'; import { BlockService } from './block.service'; @@ -75,6 +94,7 @@ describe('BlockService', () => { let hasPreviousBlocks: Block; let contentService: ContentService; let contentTypeService: ContentTypeService; + let nlpEntityService: NlpEntityService; beforeAll(async () => { const { getMocks } = await buildTestingMocks({ @@ -91,6 +111,9 @@ describe('BlockService', () => { AttachmentModel, LabelModel, LanguageModel, + NlpEntityModel, + NlpSampleEntityModel, + NlpValueModel, ]), ], providers: [ @@ -106,6 +129,14 @@ describe('BlockService', () => { ContentService, AttachmentService, LanguageService, + NlpEntityRepository, + NlpValueRepository, + NlpSampleEntityRepository, + NlpEntityService, + { + provide: NlpValueService, + useValue: {}, + }, { provide: PluginService, useValue: {}, @@ -145,12 +176,14 @@ describe('BlockService', () => { contentTypeService, categoryRepository, blockRepository, + nlpEntityService, ] = await getMocks([ BlockService, ContentService, ContentTypeService, CategoryRepository, BlockRepository, + NlpEntityService, ]); category = (await categoryRepository.findOne({ label: 'default' }))!; hasPreviousBlocks = (await blockRepository.findOne({ @@ -320,6 +353,137 @@ describe('BlockService', () => { }); }); + describe('matchBestNLP', () => { + const nlpPenaltyFactor = 0.95; + it('should return the block with the highest NLP score', async () => { + jest + .spyOn(nlpEntityService, 'getNlpMap') + .mockResolvedValue(mockNlpCacheMap); + const blocks = [mockNlpBlock, blockGetStarted]; // You can add more blocks with different patterns and scores + const matchedPatterns = [mockNlpPatternsSetOne, mockNlpPatternsSetTwo]; + const nlp = mockNlpEntitiesSetOne; + // Spy on calculateBlockScore to check if it's called + const calculateBlockScoreSpy = jest.spyOn( + blockService, + 'calculateBlockScore', + ); + const bestBlock = await blockService.matchBestNLP( + blocks, + matchedPatterns, + nlp, + nlpPenaltyFactor, + ); + + // Ensure calculateBlockScore was called at least once for each block + expect(calculateBlockScoreSpy).toHaveBeenCalledTimes(2); // Called for each block + + // Restore the spy after the test + calculateBlockScoreSpy.mockRestore(); + // Assert that the block with the highest NLP score is selected + expect(bestBlock).toEqual(mockNlpBlock); + }); + + it('should return the block with the highest NLP score applying penalties', async () => { + jest + .spyOn(nlpEntityService, 'getNlpMap') + .mockResolvedValue(mockNlpCacheMap); + const blocks = [mockNlpBlock, mockModifiedNlpBlock]; // You can add more blocks with different patterns and scores + const matchedPatterns = [mockNlpPatternsSetOne, mockNlpPatternsSetThree]; + const nlp = mockNlpEntitiesSetOne; + // Spy on calculateBlockScore to check if it's called + const calculateBlockScoreSpy = jest.spyOn( + blockService, + 'calculateBlockScore', + ); + const bestBlock = await blockService.matchBestNLP( + blocks, + matchedPatterns, + nlp, + nlpPenaltyFactor, + ); + + // Ensure calculateBlockScore was called at least once for each block + expect(calculateBlockScoreSpy).toHaveBeenCalledTimes(2); // Called for each block + + // Restore the spy after the test + calculateBlockScoreSpy.mockRestore(); + // Assert that the block with the highest NLP score is selected + expect(bestBlock).toEqual(mockNlpBlock); + }); + + it('should return undefined if no blocks match or the list is empty', async () => { + jest + .spyOn(nlpEntityService, 'getNlpMap') + .mockResolvedValue(mockNlpCacheMap); + const blocks: Block[] = []; // Empty block array + const matchedPatterns: NlpPattern[][] = []; + const nlp = mockNlpEntitiesSetOne; + + const bestBlock = await blockService.matchBestNLP( + blocks, + matchedPatterns, + nlp, + nlpPenaltyFactor, + ); + + // Assert that undefined is returned when no blocks are available + expect(bestBlock).toBeUndefined(); + }); + }); + + describe('calculateBlockScore', () => { + const nlpPenaltyFactor = 0.9; + it('should calculate the correct NLP score for a block', async () => { + const score = blockService.calculateBlockScore( + mockNlpPatternsSetOne, + mockNlpEntitiesSetOne, + mockNlpCacheMap, + nlpPenaltyFactor, + ); + const score2 = blockService.calculateBlockScore( + mockNlpPatternsSetTwo, + mockNlpEntitiesSetOne, + mockNlpCacheMap, + nlpPenaltyFactor, + ); + + expect(score).toBeGreaterThan(0); + expect(score2).toBe(0); + expect(score).toBeGreaterThan(score2); + }); + + it('should calculate the correct NLP score for a block and apply penalties ', async () => { + const score = blockService.calculateBlockScore( + mockNlpPatternsSetOne, + mockNlpEntitiesSetOne, + mockNlpCacheMap, + nlpPenaltyFactor, + ); + const score2 = blockService.calculateBlockScore( + mockNlpPatternsSetThree, + mockNlpEntitiesSetOne, + mockNlpCacheMap, + nlpPenaltyFactor, + ); + + expect(score).toBeGreaterThan(0); + expect(score2).toBeGreaterThan(0); + expect(score).toBeGreaterThan(score2); + }); + + it('should return 0 if no matching entities are found', async () => { + const nlpCacheMap: NlpCacheMap = new Map(); + const score = blockService.calculateBlockScore( + mockNlpPatternsSetTwo, + mockNlpEntitiesSetOne, + nlpCacheMap, + nlpPenaltyFactor, + ); + + expect(score).toBe(0); // No matching entity, so score should be 0 + }); + }); + describe('matchPayload', () => { it('should return undefined for empty payload', () => { const result = blockService.matchPayload('', blockGetStarted); diff --git a/api/src/chat/services/block.service.ts b/api/src/chat/services/block.service.ts index 5f0dbbb8b..d023c2412 100644 --- a/api/src/chat/services/block.service.ts +++ b/api/src/chat/services/block.service.ts @@ -16,9 +16,12 @@ import { CONSOLE_CHANNEL_NAME } from '@/extensions/channels/console/settings'; import { NLU } from '@/helper/types'; import { I18nService } from '@/i18n/services/i18n.service'; import { LanguageService } from '@/i18n/services/language.service'; +import { NlpCacheMap } from '@/nlp/schemas/types'; +import { NlpEntityService } from '@/nlp/services/nlp-entity.service'; import { PluginService } from '@/plugins/plugins.service'; import { PluginType } from '@/plugins/types'; import { SettingService } from '@/setting/services/setting.service'; +import { FALLBACK_DEFAULT_NLU_PENALTY_FACTOR } from '@/utils/constants/nlp'; import { BaseService } from '@/utils/generics/base-service'; import { getRandomElement } from '@/utils/helpers/safeRandom'; @@ -35,7 +38,12 @@ import { StdOutgoingEnvelope, StdOutgoingSystemEnvelope, } from '../schemas/types/message'; -import { NlpPattern, PayloadPattern } from '../schemas/types/pattern'; +import { + isNlpPattern, + NlpPattern, + NlpPatternMatchResult, + PayloadPattern, +} from '../schemas/types/pattern'; import { Payload, StdQuickReply } from '../schemas/types/quick-reply'; import { SubscriberContext } from '../schemas/types/subscriberContext'; @@ -53,6 +61,7 @@ export class BlockService extends BaseService< private readonly pluginService: PluginService, protected readonly i18n: I18nService, protected readonly languageService: LanguageService, + protected readonly entityService: NlpEntityService, ) { super(repository); } @@ -181,20 +190,48 @@ export class BlockService extends BaseService< .shift(); // Perform an NLP Match + if (!block && nlp) { - // Find block pattern having the best match of nlp entities - let nlpBest = 0; - filteredBlocks.forEach((b, index, self) => { - const nlpPattern = this.matchNLP(nlp, b); - if (nlpPattern && nlpPattern.length > nlpBest) { - nlpBest = nlpPattern.length; - block = self[index]; - } - }); + // Use the `reduce` function to iterate over `filteredBlocks` and accumulate a new array `matchesWithPatterns`. + // This approach combines the matching of NLP patterns and filtering of blocks with empty or invalid matches + // into a single operation. This avoids the need for a separate mapping and filtering step, improving performance. + // For each block in `filteredBlocks`, we call `matchNLP` to find patterns that match the NLP data. + // If `matchNLP` returns a non-empty list of matched patterns, the block and its matched patterns are added + // to the accumulator array `acc`, which is returned as the final result. + // This ensures that only blocks with valid matches are kept, and blocks with no matches are excluded, + // all while iterating through the list only once. + + const matchesWithPatterns: NlpPatternMatchResult[] = + filteredBlocks.reduce((acc, b) => { + const matchedPattern = this.matchNLP(nlp, b); + + if (matchedPattern && matchedPattern.length > 0) { + acc.push({ block: b, matchedPattern }); + } + return acc; + }, []); + + // Log the matched patterns + this.logger.debug( + `Matched patterns: ${JSON.stringify(matchesWithPatterns.map((p) => p.matchedPattern))}`, + ); + + // Retrieve Nlu Penalty Factor from global settings + const nluPenaltyFactor: number = + await this.getDefaultNluPenaltyFactor(); + + // Proceed with matching the best NLP block + if (matchesWithPatterns.length > 0) { + block = (await this.matchBestNLP( + matchesWithPatterns.map((m) => m.block), + matchesWithPatterns.map((p) => p.matchedPattern), + nlp, + nluPenaltyFactor, + )) as BlockFull | undefined; + } } } - // Uknown event type => return false; - // this.logger.error('Unable to recognize event type while matching', event); + return block; } @@ -313,12 +350,10 @@ export class BlockService extends BaseService< const nlpPatterns = block.patterns?.filter((p) => { return Array.isArray(p); }) as NlpPattern[][]; - // No nlp patterns found if (nlpPatterns.length === 0) { return undefined; } - // Find NLP pattern match based on best guessed entities return nlpPatterns.find((entities: NlpPattern[]) => { return entities.every((ev: NlpPattern) => { @@ -338,6 +373,139 @@ export class BlockService extends BaseService< }); } + /** + * Selects the best-matching block based on NLP pattern scoring. + * + * This function evaluates each block by calculating a score derived from its matched NLP patterns, + * the parsed NLP entities, and a penalty factor. It compares the scores across all blocks and + * returns the one with the highest calculated score. + * + * @param blocks - An array of candidate blocks to evaluate. + * @param matchedPatterns - A two-dimensional array of matched NLP patterns corresponding to each block. + * @param nlp - The parsed NLP entities used for scoring. + * @param nlpPenaltyFactor - A numeric penalty factor applied during scoring to influence block selection. + * @returns The block with the highest NLP score, or undefined if no valid block is found. + */ + async matchBestNLP( + blocks: (Block | BlockFull)[] | undefined, + matchedPatterns: NlpPattern[][], + nlp: NLU.ParseEntities, + nlpPenaltyFactor: number, + ): Promise { + if (!blocks || blocks.length === 0) return undefined; + if (blocks.length === 1) return blocks[0]; + + let bestBlock: Block | BlockFull | undefined; + let highestScore = 0; + const entityNames: string[] = blocks.flatMap((block) => + block.patterns.flatMap((patternGroup) => { + if (Array.isArray(patternGroup)) { + return patternGroup.flatMap((pattern) => + isNlpPattern(pattern) ? [pattern.entity] : [], + ); + } + return []; // Skip non-array patternGroups + }), + ); + const uniqueEntityNames: string[] = [...new Set(entityNames)]; + const nlpCacheMap: NlpCacheMap = + await this.entityService.getNlpMap(uniqueEntityNames); + // Iterate through all blocks and calculate their NLP score + for (let i = 0; i < blocks.length; i++) { + const block = blocks[i]; + const patterns = matchedPatterns[i]; + // If compatible, calculate the NLP score for this block + const nlpScore: number = this.calculateBlockScore( + patterns, + nlp, + nlpCacheMap, + nlpPenaltyFactor, + ); + if (nlpScore > highestScore) { + highestScore = nlpScore; + bestBlock = block; + } + } + + this.logger.debug(`Best NLP score obtained: ${highestScore}`); + this.logger.debug(`Best block selected: ${JSON.stringify(bestBlock)}`); + + return bestBlock; + } + + /** + * Computes the NLP score for a given block using its matched NLP patterns and parsed NLP entities. + * + * Each pattern is evaluated against the parsed NLP entities to determine matches based on entity name, + * value, and confidence. A score is computed using the entity's weight and the confidence level of the match. + * A penalty factor is optionally applied for entity-level matches to adjust the scoring. + * + * The function uses a cache (`nlpCacheMap`) to avoid redundant database lookups for entity metadata. + * + * @param patterns - The NLP patterns associated with the block. + * @param nlp - The parsed NLP entities from the user input. + * @param nlpCacheMap - A cache to reuse fetched entity metadata (e.g., weights and valid values). + * @param nlpPenaltyFactor - A multiplier applied to scores when the pattern match type is 'entity'. + * @returns A numeric score representing how well the block matches the given NLP context. + */ + calculateBlockScore( + patterns: NlpPattern[], + nlp: NLU.ParseEntities, + nlpCacheMap: NlpCacheMap, + nlpPenaltyFactor: number, + ): number { + // Compute individual pattern scores using the cache + const patternScores: number[] = patterns.map((pattern) => { + const entityData = nlpCacheMap.get(pattern.entity); + if (!entityData) return 0; + + const matchedEntity: NLU.ParseEntity | undefined = nlp.entities.find( + (e) => + e.entity === pattern.entity && + entityData?.values.some((v) => v === e.value) && + (pattern.match !== 'value' || e.value === pattern.value), + ); + + return matchedEntity?.confidence + ? matchedEntity.confidence * + entityData.weight * + (pattern.match === 'entity' ? nlpPenaltyFactor : 1) + : 0; + }); + + // Sum the scores + return patternScores.reduce((sum, score) => sum + score, 0); + } + + /** + * Retrieves the default NLU penalty factor from chatbot settings. + * + * This factor is used in NLU-based block scoring to reduce the influence + * of generic or broad entity matches (e.g. patterns using "Any"). + * It helps prioritize blocks with more specific and confident entity matches. + * + * @returns {Promise} The configured default NLU penalty factor. + */ + async getDefaultNluPenaltyFactor(): Promise { + const settings: Settings = await this.settingService.getSettings(); + const nluPenaltyFactor = + settings.chatbot_settings.default_nlu_penalty_factor; + + if (typeof nluPenaltyFactor !== 'number') { + this.logger.error( + 'NLU Penalty Factor setting is missing or invalid. Using fallback...', + ); + return FALLBACK_DEFAULT_NLU_PENALTY_FACTOR; + } + if (nluPenaltyFactor < 0 || nluPenaltyFactor > 1) { + this.logger.error( + 'NLU Penalty Factor must be between 0 and 1. Using fallback...', + ); + return FALLBACK_DEFAULT_NLU_PENALTY_FACTOR; + } + return nluPenaltyFactor; + } + /** * Matches an outcome-based block from a list of available blocks * based on the outcome of a system message. diff --git a/api/src/chat/services/bot.service.spec.ts b/api/src/chat/services/bot.service.spec.ts index 820cff080..951c74e0b 100644 --- a/api/src/chat/services/bot.service.spec.ts +++ b/api/src/chat/services/bot.service.spec.ts @@ -33,6 +33,14 @@ import { LanguageRepository } from '@/i18n/repositories/language.repository'; import { LanguageModel } from '@/i18n/schemas/language.schema'; import { I18nService } from '@/i18n/services/i18n.service'; import { LanguageService } from '@/i18n/services/language.service'; +import { NlpEntityRepository } from '@/nlp/repositories/nlp-entity.repository'; +import { NlpSampleEntityRepository } from '@/nlp/repositories/nlp-sample-entity.repository'; +import { NlpValueRepository } from '@/nlp/repositories/nlp-value.repository'; +import { NlpEntityModel } from '@/nlp/schemas/nlp-entity.schema'; +import { NlpSampleEntityModel } from '@/nlp/schemas/nlp-sample-entity.schema'; +import { NlpValueModel } from '@/nlp/schemas/nlp-value.schema'; +import { NlpEntityService } from '@/nlp/services/nlp-entity.service'; +import { NlpValueService } from '@/nlp/services/nlp-value.service'; import { PluginService } from '@/plugins/plugins.service'; import { SettingService } from '@/setting/services/setting.service'; import { installBlockFixtures } from '@/utils/test/fixtures/block'; @@ -100,6 +108,9 @@ describe('BlockService', () => { MenuModel, ContextVarModel, LanguageModel, + NlpEntityModel, + NlpSampleEntityModel, + NlpValueModel, ]), JwtModule, ], @@ -131,6 +142,11 @@ describe('BlockService', () => { ContextVarService, ContextVarRepository, LanguageService, + NlpEntityService, + NlpEntityRepository, + NlpSampleEntityRepository, + NlpValueRepository, + NlpValueService, { provide: HelperService, useValue: {}, diff --git a/api/src/helper/lib/__test__/base-nlp-helper.spec.ts b/api/src/helper/lib/__test__/base-nlp-helper.spec.ts index 4f2f8ddb0..c9509b1d8 100644 --- a/api/src/helper/lib/__test__/base-nlp-helper.spec.ts +++ b/api/src/helper/lib/__test__/base-nlp-helper.spec.ts @@ -139,6 +139,7 @@ describe('BaseNlpHelper', () => { updatedAt: new Date(), builtin: false, lookups: [], + weight: 1, }, entity2: { id: new ObjectId().toString(), @@ -147,6 +148,7 @@ describe('BaseNlpHelper', () => { updatedAt: new Date(), builtin: false, lookups: [], + weight: 1, }, }); jest.spyOn(NlpValue, 'getValueMap').mockReturnValue({ @@ -207,6 +209,7 @@ describe('BaseNlpHelper', () => { updatedAt: new Date(), builtin: false, lookups: [], + weight: 1, }, }); diff --git a/api/src/i18n/controllers/translation.controller.spec.ts b/api/src/i18n/controllers/translation.controller.spec.ts index 06dabf288..e6b4d3ce5 100644 --- a/api/src/i18n/controllers/translation.controller.spec.ts +++ b/api/src/i18n/controllers/translation.controller.spec.ts @@ -30,6 +30,14 @@ import { MenuModel } from '@/cms/schemas/menu.schema'; import { ContentService } from '@/cms/services/content.service'; import { MenuService } from '@/cms/services/menu.service'; import { I18nService } from '@/i18n/services/i18n.service'; +import { NlpEntityRepository } from '@/nlp/repositories/nlp-entity.repository'; +import { NlpSampleEntityRepository } from '@/nlp/repositories/nlp-sample-entity.repository'; +import { NlpValueRepository } from '@/nlp/repositories/nlp-value.repository'; +import { NlpEntityModel } from '@/nlp/schemas/nlp-entity.schema'; +import { NlpSampleEntityModel } from '@/nlp/schemas/nlp-sample-entity.schema'; +import { NlpValueModel } from '@/nlp/schemas/nlp-value.schema'; +import { NlpEntityService } from '@/nlp/services/nlp-entity.service'; +import { NlpValueService } from '@/nlp/services/nlp-value.service'; import { NlpService } from '@/nlp/services/nlp.service'; import { PluginService } from '@/plugins/plugins.service'; import { SettingService } from '@/setting/services/setting.service'; @@ -75,6 +83,9 @@ describe('TranslationController', () => { BlockModel, ContentModel, LanguageModel, + NlpEntityModel, + NlpSampleEntityModel, + NlpValueModel, ]), ], providers: [ @@ -130,6 +141,11 @@ describe('TranslationController', () => { }, LanguageService, LanguageRepository, + NlpEntityRepository, + NlpEntityService, + NlpValueRepository, + NlpValueService, + NlpSampleEntityRepository, ], }); [translationService, translationController] = await getMocks([ diff --git a/api/src/migration/migrations/1735836154221-v-2-2-0.migration.ts b/api/src/migration/migrations/1735836154221-v-2-2-0.migration.ts index c77538578..aebde14f7 100644 --- a/api/src/migration/migrations/1735836154221-v-2-2-0.migration.ts +++ b/api/src/migration/migrations/1735836154221-v-2-2-0.migration.ts @@ -798,9 +798,9 @@ const addDefaultStorageHelper = async ({ logger }: MigrationServices) => { upsert: true, }, ); - logger.log('Successfuly added the default local storage helper setting'); + logger.log('Successfully added the default local storage helper setting'); } catch (err) { - logger.error('Unable to add the default local storage helper setting'); + logger.error('Unable to add the default local storage helper setting', err); } }; @@ -811,9 +811,12 @@ const removeDefaultStorageHelper = async ({ logger }: MigrationServices) => { group: 'chatbot_settings', label: 'default_storage_helper', }); - logger.log('Successfuly removed the default local storage helper setting'); + logger.log('Successfully removed the default local storage helper setting'); } catch (err) { - logger.error('Unable to remove the default local storage helper setting'); + logger.error( + 'Unable to remove the default local storage helper setting', + err, + ); } }; diff --git a/api/src/migration/migrations/1745594957327-v-2-2-6.migration.ts b/api/src/migration/migrations/1745594957327-v-2-2-6.migration.ts new file mode 100644 index 000000000..d348ac4da --- /dev/null +++ b/api/src/migration/migrations/1745594957327-v-2-2-6.migration.ts @@ -0,0 +1,71 @@ +/* + * Copyright © 2025 Hexastack. All rights reserved. + * + * Licensed under the GNU Affero General Public License v3.0 (AGPLv3) with the following additional terms: + * 1. The name "Hexabot" is a trademark of Hexastack. You may not use this name in derivative works without express written permission. + * 2. All derivative works must include clear attribution to the original creator and software, Hexastack and Hexabot, in a prominent location (e.g., in the software's "About" section, documentation, and README file). + */ + +import mongoose from 'mongoose'; + +import settingSchema, { Setting } from '@/setting/schemas/setting.schema'; +import { SettingType } from '@/setting/schemas/types'; + +import { MigrationServices } from '../types'; + +const addDefaultNluPenaltyFactor = async ({ logger }: MigrationServices) => { + const SettingModel = mongoose.model(Setting.name, settingSchema); + try { + await SettingModel.updateOne( + { + group: 'chatbot_settings', + label: 'default_nlu_penalty_factor', + }, + { + group: 'chatbot_settings', + label: 'default_nlu_penalty_factor', + value: 0.95, + type: SettingType.number, + config: { + min: 0, + max: 1, + step: 0.01, + }, + weight: 2, + }, + { + upsert: true, + }, + ); + logger.log('Successfully added the default NLU penalty factor setting'); + } catch (err) { + logger.error('Unable to add the default NLU penalty factor setting', err); + } +}; + +const removeDefaultNluPenaltyFactor = async ({ logger }: MigrationServices) => { + const SettingModel = mongoose.model(Setting.name, settingSchema); + try { + await SettingModel.deleteOne({ + group: 'chatbot_settings', + label: 'default_nlu_penalty_factor', + }); + logger.log('Successfully removed the default NLU penalty factor setting'); + } catch (err) { + logger.error( + 'Unable to remove the default NLU penalty factor setting', + err, + ); + } +}; + +module.exports = { + async up(services: MigrationServices) { + await addDefaultNluPenaltyFactor(services); + return true; + }, + async down(services: MigrationServices) { + await removeDefaultNluPenaltyFactor(services); + return true; + }, +}; diff --git a/api/src/nlp/controllers/nlp-entity.controller.spec.ts b/api/src/nlp/controllers/nlp-entity.controller.spec.ts index c1334ef93..8eadf3575 100644 --- a/api/src/nlp/controllers/nlp-entity.controller.spec.ts +++ b/api/src/nlp/controllers/nlp-entity.controller.spec.ts @@ -6,6 +6,7 @@ * 2. All derivative works must include clear attribution to the original creator and software, Hexastack and Hexabot, in a prominent location (e.g., in the software's "About" section, documentation, and README file). */ +import { CACHE_MANAGER } from '@nestjs/cache-manager'; import { BadRequestException, MethodNotAllowedException, @@ -67,6 +68,12 @@ describe('NlpEntityController', () => { NlpValueService, NlpSampleEntityRepository, NlpValueRepository, + { + provide: CACHE_MANAGER, + useValue: { + del: jest.fn(), + }, + }, ], }); [nlpEntityController, nlpValueService, nlpEntityService] = await getMocks([ @@ -109,6 +116,7 @@ describe('NlpEntityController', () => { ) as NlpEntityFull['values'], lookups: curr.lookups!, builtin: curr.builtin!, + weight: curr.weight!, }); return acc; }, @@ -163,6 +171,7 @@ describe('NlpEntityController', () => { name: 'sentiment', lookups: ['trait'], builtin: false, + weight: 1, }; const result = await nlpEntityController.create(sentimentEntity); expect(result).toEqualPayload(sentimentEntity); @@ -214,6 +223,7 @@ describe('NlpEntityController', () => { updatedAt: firstNameEntity!.updatedAt, lookups: firstNameEntity!.lookups, builtin: firstNameEntity!.builtin, + weight: firstNameEntity!.weight, }; const result = await nlpEntityController.findOne(firstNameEntity!.id, [ 'values', @@ -238,6 +248,7 @@ describe('NlpEntityController', () => { doc: '', lookups: ['trait'], builtin: false, + weight: 1, }; const result = await nlpEntityController.updateOne( firstNameEntity!.id, @@ -258,7 +269,7 @@ describe('NlpEntityController', () => { ).rejects.toThrow(NotFoundException); }); - it('should throw exception when nlp entity is builtin', async () => { + it('should throw an exception if entity is builtin but weight not provided', async () => { const updateNlpEntity: NlpEntityCreateDto = { name: 'updated', doc: '', @@ -269,6 +280,57 @@ describe('NlpEntityController', () => { nlpEntityController.updateOne(buitInEntityId!, updateNlpEntity), ).rejects.toThrow(MethodNotAllowedException); }); + + it('should update weight if entity is builtin and weight is provided', async () => { + const updatedNlpEntity: NlpEntityCreateDto = { + name: 'updated', + doc: '', + lookups: ['trait'], + builtin: false, + weight: 4, + }; + const findOneSpy = jest.spyOn(nlpEntityService, 'findOne'); + const updateWeightSpy = jest.spyOn(nlpEntityService, 'updateWeight'); + + const result = await nlpEntityController.updateOne( + buitInEntityId!, + updatedNlpEntity, + ); + + expect(findOneSpy).toHaveBeenCalledWith(buitInEntityId!); + expect(updateWeightSpy).toHaveBeenCalledWith( + buitInEntityId!, + updatedNlpEntity.weight, + ); + expect(result.weight).toBe(updatedNlpEntity.weight); + }); + + it('should update only the weight of the builtin entity', async () => { + const updatedNlpEntity: NlpEntityCreateDto = { + name: 'updated', + doc: '', + lookups: ['trait'], + builtin: false, + weight: 4, + }; + const originalEntity: NlpEntity | null = await nlpEntityService.findOne( + buitInEntityId!, + ); + + const result: NlpEntity = await nlpEntityController.updateOne( + buitInEntityId!, + updatedNlpEntity, + ); + + // Check weight is updated + expect(result.weight).toBe(updatedNlpEntity.weight); + + Object.entries(originalEntity!).forEach(([key, value]) => { + if (key !== 'weight' && key !== 'updatedAt') { + expect(result[key as keyof typeof result]).toEqual(value); + } + }); + }); }); describe('deleteMany', () => { it('should delete multiple nlp entities', async () => { diff --git a/api/src/nlp/controllers/nlp-entity.controller.ts b/api/src/nlp/controllers/nlp-entity.controller.ts index 0adbce591..a801ff150 100644 --- a/api/src/nlp/controllers/nlp-entity.controller.ts +++ b/api/src/nlp/controllers/nlp-entity.controller.ts @@ -157,10 +157,19 @@ export class NlpEntityController extends BaseController< this.logger.warn(`Unable to update NLP Entity by id ${id}`); throw new NotFoundException(`NLP Entity with ID ${id} not found`); } + if (nlpEntity.builtin) { - throw new MethodNotAllowedException( - `Cannot update builtin NLP Entity ${nlpEntity.name}`, - ); + // Only allow weight update for builtin entities + if (updateNlpEntityDto.weight) { + return await this.nlpEntityService.updateWeight( + id, + updateNlpEntityDto.weight, + ); + } else { + throw new MethodNotAllowedException( + `Cannot update builtin NLP Entity ${nlpEntity.name} except for weight`, + ); + } } return await this.nlpEntityService.updateOne(id, updateNlpEntityDto); diff --git a/api/src/nlp/controllers/nlp-sample.controller.spec.ts b/api/src/nlp/controllers/nlp-sample.controller.spec.ts index 4da14ed04..031c5dacf 100644 --- a/api/src/nlp/controllers/nlp-sample.controller.spec.ts +++ b/api/src/nlp/controllers/nlp-sample.controller.spec.ts @@ -372,6 +372,7 @@ describe('NlpSampleController', () => { lookups: ['trait'], doc: '', builtin: false, + weight: 1, }; const priceValueEntity = await nlpEntityService.findOne({ name: 'intent', diff --git a/api/src/nlp/controllers/nlp-value.controller.spec.ts b/api/src/nlp/controllers/nlp-value.controller.spec.ts index a72b85710..be6af2017 100644 --- a/api/src/nlp/controllers/nlp-value.controller.spec.ts +++ b/api/src/nlp/controllers/nlp-value.controller.spec.ts @@ -6,6 +6,7 @@ * 2. All derivative works must include clear attribution to the original creator and software, Hexastack and Hexabot, in a prominent location (e.g., in the software's "About" section, documentation, and README file). */ +import { CACHE_MANAGER } from '@nestjs/cache-manager'; import { BadRequestException, NotFoundException } from '@nestjs/common'; import { MongooseModule } from '@nestjs/mongoose'; @@ -57,6 +58,12 @@ describe('NlpValueController', () => { NlpSampleEntityRepository, NlpEntityService, NlpEntityRepository, + { + provide: CACHE_MANAGER, + useValue: { + del: jest.fn(), + }, + }, ], }); [nlpValueController, nlpValueService, nlpEntityService] = await getMocks([ diff --git a/api/src/nlp/dto/nlp-entity.dto.ts b/api/src/nlp/dto/nlp-entity.dto.ts index 009b1841b..6efaa9992 100644 --- a/api/src/nlp/dto/nlp-entity.dto.ts +++ b/api/src/nlp/dto/nlp-entity.dto.ts @@ -1,5 +1,5 @@ /* - * Copyright © 2024 Hexastack. All rights reserved. + * Copyright © 2025 Hexastack. All rights reserved. * * Licensed under the GNU Affero General Public License v3.0 (AGPLv3) with the following additional terms: * 1. The name "Hexabot" is a trademark of Hexastack. You may not use this name in derivative works without express written permission. @@ -12,6 +12,7 @@ import { IsBoolean, IsIn, IsNotEmpty, + IsNumber, IsOptional, IsString, Matches, @@ -47,6 +48,14 @@ export class NlpEntityCreateDto { @IsBoolean() @IsOptional() builtin?: boolean; + + @ApiPropertyOptional({ + description: 'Nlp entity associated weight for next block triggering', + type: Number, + }) + @IsNumber() + @IsOptional() + weight?: number; } export type NlpEntityDto = DtoConfig<{ diff --git a/api/src/nlp/nlp.module.ts b/api/src/nlp/nlp.module.ts index 342ffa1f5..4c783bd87 100644 --- a/api/src/nlp/nlp.module.ts +++ b/api/src/nlp/nlp.module.ts @@ -1,5 +1,5 @@ /* - * Copyright © 2024 Hexastack. All rights reserved. + * Copyright © 2025 Hexastack. All rights reserved. * * Licensed under the GNU Affero General Public License v3.0 (AGPLv3) with the following additional terms: * 1. The name "Hexabot" is a trademark of Hexastack. You may not use this name in derivative works without express written permission. diff --git a/api/src/nlp/schemas/nlp-entity.schema.ts b/api/src/nlp/schemas/nlp-entity.schema.ts index 86085d050..0c61c0c88 100644 --- a/api/src/nlp/schemas/nlp-entity.schema.ts +++ b/api/src/nlp/schemas/nlp-entity.schema.ts @@ -1,5 +1,5 @@ /* - * Copyright © 2024 Hexastack. All rights reserved. + * Copyright © 2025 Hexastack. All rights reserved. * * Licensed under the GNU Affero General Public License v3.0 (AGPLv3) with the following additional terms: * 1. The name "Hexabot" is a trademark of Hexastack. You may not use this name in derivative works without express written permission. @@ -58,6 +58,12 @@ export class NlpEntityStub extends BaseSchema { @Prop({ type: Boolean, default: false }) builtin: boolean; + /** + * Entity's weight used to determine the next block to trigger in the conversational flow. + */ + @Prop({ type: Number, default: 1, min: 0 }) + weight: number; + /** * Returns a map object for entities * @param entities - Array of entities diff --git a/api/src/nlp/schemas/types.ts b/api/src/nlp/schemas/types.ts index 482fdf578..3d5634bff 100644 --- a/api/src/nlp/schemas/types.ts +++ b/api/src/nlp/schemas/types.ts @@ -1,5 +1,5 @@ /* - * Copyright © 2024 Hexastack. All rights reserved. + * Copyright © 2025 Hexastack. All rights reserved. * * Licensed under the GNU Affero General Public License v3.0 (AGPLv3) with the following additional terms: * 1. The name "Hexabot" is a trademark of Hexastack. You may not use this name in derivative works without express written permission. @@ -25,3 +25,8 @@ export enum NlpSampleState { test = 'test', inbox = 'inbox', } + +export type NlpCacheMap = Map< + string, + { id: string; weight: number; values: string[] } +>; diff --git a/api/src/nlp/services/nlp-entity.service.spec.ts b/api/src/nlp/services/nlp-entity.service.spec.ts index 53118e95d..04b0fc770 100644 --- a/api/src/nlp/services/nlp-entity.service.spec.ts +++ b/api/src/nlp/services/nlp-entity.service.spec.ts @@ -6,6 +6,7 @@ * 2. All derivative works must include clear attribution to the original creator and software, Hexastack and Hexabot, in a prominent location (e.g., in the software's "About" section, documentation, and README file). */ +import { CACHE_MANAGER } from '@nestjs/cache-manager'; import { MongooseModule } from '@nestjs/mongoose'; import { nlpEntityFixtures } from '@/utils/test/fixtures/nlpentity'; @@ -20,7 +21,11 @@ import { buildTestingMocks } from '@/utils/test/utils'; import { NlpEntityRepository } from '../repositories/nlp-entity.repository'; import { NlpSampleEntityRepository } from '../repositories/nlp-sample-entity.repository'; import { NlpValueRepository } from '../repositories/nlp-value.repository'; -import { NlpEntity, NlpEntityModel } from '../schemas/nlp-entity.schema'; +import { + NlpEntity, + NlpEntityFull, + NlpEntityModel, +} from '../schemas/nlp-entity.schema'; import { NlpSampleEntityModel } from '../schemas/nlp-sample-entity.schema'; import { NlpValueModel } from '../schemas/nlp-value.schema'; @@ -48,6 +53,12 @@ describe('nlpEntityService', () => { NlpValueService, NlpValueRepository, NlpSampleEntityRepository, + { + provide: CACHE_MANAGER, + useValue: { + del: jest.fn(), + }, + }, ], }); [nlpEntityService, nlpEntityRepository, nlpValueRepository] = @@ -117,6 +128,77 @@ describe('nlpEntityService', () => { expect(result).toEqualPayload(entitiesWithValues); }); }); + describe('NlpEntityService - updateWeight', () => { + let createdEntity: NlpEntity; + beforeEach(async () => { + createdEntity = await nlpEntityRepository.create({ + name: 'testentity', + builtin: false, + weight: 3, + }); + }); + + it('should update the weight of an NLP entity', async () => { + const newWeight = 8; + + const updatedEntity = await nlpEntityService.updateWeight( + createdEntity.id, + newWeight, + ); + + expect(updatedEntity.weight).toBe(newWeight); + }); + + it('should handle updating weight of non-existent entity', async () => { + const nonExistentId = '507f1f77bcf86cd799439011'; // Example MongoDB ObjectId + + try { + await nlpEntityService.updateWeight(nonExistentId, 5); + fail('Expected error was not thrown'); + } catch (error) { + expect(error).toBeDefined(); + } + }); + + it('should use default weight of 1 when creating entity without weight', async () => { + const createdEntity = await nlpEntityRepository.create({ + name: 'entityWithoutWeight', + builtin: true, + // weight not specified + }); + + expect(createdEntity.weight).toBe(1); + }); + + it('should throw an error if weight is less than 1', async () => { + const invalidWeight = 0; + + await expect( + nlpEntityService.updateWeight(createdEntity.id, invalidWeight), + ).rejects.toThrow('Weight must be a positive integer'); + }); + + it('should throw an error if weight is a decimal', async () => { + const invalidWeight = 2.5; + + await expect( + nlpEntityService.updateWeight(createdEntity.id, invalidWeight), + ).rejects.toThrow('Weight must be a positive integer'); + }); + + it('should throw an error if weight is negative', async () => { + const invalidWeight = -3; + + await expect( + nlpEntityService.updateWeight(createdEntity.id, invalidWeight), + ).rejects.toThrow('Weight must be a positive integer'); + }); + + afterEach(async () => { + // Clean the collection after each test + await nlpEntityRepository.deleteOne(createdEntity.id); + }); + }); describe('storeNewEntities', () => { it('should store new entities', async () => { @@ -150,4 +232,58 @@ describe('nlpEntityService', () => { expect(result).toEqualPayload(storedEntites); }); }); + describe('getNlpMap', () => { + it('should return a NlpCacheMap with the correct structure', async () => { + // Arrange + const firstMockValues = { + id: '1', + weight: 1, + }; + const firstMockLookup = { + name: 'intent', + ...firstMockValues, + values: [{ value: 'buy' }, { value: 'sell' }], + } as unknown as Partial; + const secondMockValues = { + id: '2', + weight: 5, + }; + const secondMockLook = { + name: 'subject', + ...secondMockValues, + values: [{ value: 'product' }], + } as unknown as Partial; + const mockLookups = [firstMockLookup, secondMockLook]; + + const entityNames = ['intent', 'subject']; + + // Mock findAndPopulate + jest + .spyOn(nlpEntityService, 'findAndPopulate') + .mockResolvedValue(mockLookups as unknown as NlpEntityFull[]); + + // Act + const result = await nlpEntityService.getNlpMap(entityNames); + + expect(result).toBeInstanceOf(Map); + expect(result.size).toBe(2); + expect(result.get('intent')).toEqual({ + ...firstMockValues, + values: ['buy', 'sell'], + }); + expect(result.get('subject')).toEqual({ + ...secondMockValues, + values: ['product'], + }); + }); + + it('should return an empty map if no lookups are found', async () => { + jest.spyOn(nlpEntityService, 'findAndPopulate').mockResolvedValue([]); + + const result = await nlpEntityService.getNlpMap(['nonexistent']); + + expect(result).toBeInstanceOf(Map); + expect(result.size).toBe(0); + }); + }); }); diff --git a/api/src/nlp/services/nlp-entity.service.ts b/api/src/nlp/services/nlp-entity.service.ts index e8531e959..39932ef7d 100644 --- a/api/src/nlp/services/nlp-entity.service.ts +++ b/api/src/nlp/services/nlp-entity.service.ts @@ -1,13 +1,18 @@ /* - * Copyright © 2024 Hexastack. All rights reserved. + * Copyright © 2025 Hexastack. All rights reserved. * * Licensed under the GNU Affero General Public License v3.0 (AGPLv3) with the following additional terms: * 1. The name "Hexabot" is a trademark of Hexastack. You may not use this name in derivative works without express written permission. * 2. All derivative works must include clear attribution to the original creator and software, Hexastack and Hexabot, in a prominent location (e.g., in the software's "About" section, documentation, and README file). */ -import { Injectable } from '@nestjs/common'; +import { CACHE_MANAGER } from '@nestjs/cache-manager'; +import { Inject, Injectable } from '@nestjs/common'; +import { OnEvent } from '@nestjs/event-emitter'; +import { Cache } from 'cache-manager'; +import { NLP_MAP_CACHE_KEY } from '@/utils/constants/cache'; +import { Cacheable } from '@/utils/decorators/cacheable.decorator'; import { BaseService } from '@/utils/generics/base-service'; import { Lookup, NlpEntityDto } from '../dto/nlp-entity.dto'; @@ -17,7 +22,7 @@ import { NlpEntityFull, NlpEntityPopulate, } from '../schemas/nlp-entity.schema'; -import { NlpSampleEntityValue } from '../schemas/types'; +import { NlpCacheMap, NlpSampleEntityValue } from '../schemas/types'; import { NlpValueService } from './nlp-value.service'; @@ -30,6 +35,7 @@ export class NlpEntityService extends BaseService< > { constructor( readonly repository: NlpEntityRepository, + @Inject(CACHE_MANAGER) private readonly cacheManager: Cache, private readonly nlpValueService: NlpValueService, ) { super(repository); @@ -46,6 +52,28 @@ export class NlpEntityService extends BaseService< return await this.repository.deleteOne(id); } + /** + * Updates the `weight` field of a specific NLP entity by its ID. + * + * This method is part of the NLP-based blocks prioritization strategy. + * The weight influences the scoring of blocks when multiple blocks match a user's input. + * @param id - The unique identifier of the entity to update. + * @param updatedWeight - The new weight to assign. Must be a positive integer. + * @throws Error if the weight is not a positive integer. + * @returns A promise that resolves to the updated entity. + */ + async updateWeight(id: string, updatedWeight: number): Promise { + if (!Number.isInteger(updatedWeight) || updatedWeight < 1) { + throw new Error('Weight must be a positive integer'); + } + + return await this.repository.updateOne( + id, + { weight: updatedWeight }, + { new: true }, + ); + } + /** * Stores new entities based on the sample text and sample entities. * Deletes all values relative to this entity before deleting the entity itself. @@ -97,4 +125,58 @@ export class NlpEntityService extends BaseService< ); return Promise.all(findOrCreate); } + + /** + * Clears the NLP map cache + */ + async clearCache() { + this.cacheManager.del(NLP_MAP_CACHE_KEY); + } + + /** + * Event handler for Nlp Entity updates. Listens to 'hook:nlpEntity:*' events + * and invalidates the cache for nlp entities when triggered. + */ + @OnEvent('hook:nlpEntity:*') + async handleNlpEntityUpdateEvent() { + this.clearCache(); + } + + /** + * Event handler for Nlp Value updates. Listens to 'hook:nlpValue:*' events + * and invalidates the cache for nlp values when triggered. + */ + @OnEvent('hook:nlpValue:*') + async handleNlpValueUpdateEvent() { + this.clearCache(); + } + + /** + * Retrieves NLP entity lookup information for the given list of entity names. + * + * This method queries the database for lookups that match any of the provided + * entity names, transforms the result into a map structure where each key is + * the entity name and each value contains metadata (id, weight, and list of values), + * and caches the result using the configured cache key. + * + * @param entityNames - Array of entity names to retrieve lookup data for. + * @returns A Promise that resolves to a map of entity name to its corresponding lookup metadata. + */ + @Cacheable(NLP_MAP_CACHE_KEY) + async getNlpMap(entityNames: string[]): Promise { + const lookups = await this.findAndPopulate({ name: { $in: entityNames } }); + const map: NlpCacheMap = new Map(); + if (!lookups.length) { + return map; // Return empty map if no entities found + } + for (const lookup of lookups) { + map.set(lookup.name, { + id: lookup.id, + weight: lookup.weight, + values: lookup.values?.map((v) => v.value) ?? [], + }); + } + + return map; + } } diff --git a/api/src/nlp/services/nlp-sample-entity.service.spec.ts b/api/src/nlp/services/nlp-sample-entity.service.spec.ts index ed2be6f11..44c153bb3 100644 --- a/api/src/nlp/services/nlp-sample-entity.service.spec.ts +++ b/api/src/nlp/services/nlp-sample-entity.service.spec.ts @@ -6,6 +6,7 @@ * 2. All derivative works must include clear attribution to the original creator and software, Hexastack and Hexabot, in a prominent location (e.g., in the software's "About" section, documentation, and README file). */ +import { CACHE_MANAGER } from '@nestjs/cache-manager'; import { MongooseModule } from '@nestjs/mongoose'; import { LanguageRepository } from '@/i18n/repositories/language.repository'; @@ -76,6 +77,12 @@ describe('NlpSampleEntityService', () => { NlpSampleEntityService, NlpEntityService, NlpValueService, + { + provide: CACHE_MANAGER, + useValue: { + del: jest.fn(), + }, + }, ], }); [ diff --git a/api/src/nlp/services/nlp-value.service.spec.ts b/api/src/nlp/services/nlp-value.service.spec.ts index 3f9991449..eacae6060 100644 --- a/api/src/nlp/services/nlp-value.service.spec.ts +++ b/api/src/nlp/services/nlp-value.service.spec.ts @@ -6,6 +6,7 @@ * 2. All derivative works must include clear attribution to the original creator and software, Hexastack and Hexabot, in a prominent location (e.g., in the software's "About" section, documentation, and README file). */ +import { CACHE_MANAGER } from '@nestjs/cache-manager'; import { MongooseModule } from '@nestjs/mongoose'; import { BaseSchema } from '@/utils/generics/base-schema'; @@ -58,6 +59,12 @@ describe('NlpValueService', () => { NlpEntityRepository, NlpValueService, NlpEntityService, + { + provide: CACHE_MANAGER, + useValue: { + del: jest.fn(), + }, + }, ], }); [ diff --git a/api/src/setting/seeds/setting.seed-model.ts b/api/src/setting/seeds/setting.seed-model.ts index 27cf57852..7ca8569bf 100644 --- a/api/src/setting/seeds/setting.seed-model.ts +++ b/api/src/setting/seeds/setting.seed-model.ts @@ -38,6 +38,18 @@ export const DEFAULT_SETTINGS = [ }, weight: 2, }, + { + group: 'chatbot_settings', + label: 'default_nlu_penalty_factor', + value: 0.95, + type: SettingType.number, + config: { + min: 0, + max: 1, + step: 0.01, + }, + weight: 3, + }, { group: 'chatbot_settings', label: 'default_storage_helper', @@ -50,14 +62,14 @@ export const DEFAULT_SETTINGS = [ idKey: 'name', labelKey: 'name', }, - weight: 3, + weight: 4, }, { group: 'chatbot_settings', label: 'global_fallback', value: true, type: SettingType.checkbox, - weight: 4, + weight: 5, }, { group: 'chatbot_settings', @@ -72,7 +84,7 @@ export const DEFAULT_SETTINGS = [ idKey: 'id', labelKey: 'name', }, - weight: 5, + weight: 6, }, { group: 'chatbot_settings', @@ -82,7 +94,7 @@ export const DEFAULT_SETTINGS = [ "I'm really sorry but i don't quite understand what you are saying :(", ] as string[], type: SettingType.multiple_text, - weight: 6, + weight: 7, translatable: true, }, { diff --git a/api/src/utils/constants/cache.ts b/api/src/utils/constants/cache.ts index ccbf392a1..91dc30d04 100644 --- a/api/src/utils/constants/cache.ts +++ b/api/src/utils/constants/cache.ts @@ -18,3 +18,5 @@ export const LANGUAGES_CACHE_KEY = 'languages'; export const DEFAULT_LANGUAGE_CACHE_KEY = 'default_language'; export const ALLOWED_ORIGINS_CACHE_KEY = 'allowed_origins'; + +export const NLP_MAP_CACHE_KEY = 'nlp_map'; diff --git a/api/src/utils/constants/nlp.ts b/api/src/utils/constants/nlp.ts new file mode 100644 index 000000000..fb281d5d8 --- /dev/null +++ b/api/src/utils/constants/nlp.ts @@ -0,0 +1,9 @@ +/* + * Copyright © 2025 Hexastack. All rights reserved. + * + * Licensed under the GNU Affero General Public License v3.0 (AGPLv3) with the following additional terms: + * 1. The name "Hexabot" is a trademark of Hexastack. You may not use this name in derivative works without express written permission. + * 2. All derivative works must include clear attribution to the original creator and software, Hexastack and Hexabot, in a prominent location (e.g., in the software's "About" section, documentation, and README file). + */ + +export const FALLBACK_DEFAULT_NLU_PENALTY_FACTOR = 0.95; diff --git a/api/src/utils/test/fixtures/nlpentity.ts b/api/src/utils/test/fixtures/nlpentity.ts index 16902dd88..deb44e97a 100644 --- a/api/src/utils/test/fixtures/nlpentity.ts +++ b/api/src/utils/test/fixtures/nlpentity.ts @@ -1,5 +1,5 @@ /* - * Copyright © 2024 Hexastack. All rights reserved. + * Copyright © 2025 Hexastack. All rights reserved. * * Licensed under the GNU Affero General Public License v3.0 (AGPLv3) with the following additional terms: * 1. The name "Hexabot" is a trademark of Hexastack. You may not use this name in derivative works without express written permission. @@ -17,18 +17,21 @@ export const nlpEntityFixtures: NlpEntityCreateDto[] = [ lookups: ['trait'], doc: '', builtin: false, + weight: 1, }, { name: 'first_name', lookups: ['keywords'], doc: '', builtin: false, + weight: 1, }, { name: 'built_in', lookups: ['trait'], doc: '', builtin: true, + weight: 1, }, ]; diff --git a/api/src/utils/test/mocks/block.ts b/api/src/utils/test/mocks/block.ts index 48dcdd479..314d24d5e 100644 --- a/api/src/utils/test/mocks/block.ts +++ b/api/src/utils/test/mocks/block.ts @@ -16,7 +16,7 @@ import { ButtonType, PayloadType } from '@/chat/schemas/types/button'; import { CaptureVar } from '@/chat/schemas/types/capture-var'; import { OutgoingMessageFormat } from '@/chat/schemas/types/message'; import { BlockOptions, ContentOptions } from '@/chat/schemas/types/options'; -import { Pattern } from '@/chat/schemas/types/pattern'; +import { NlpPattern, Pattern } from '@/chat/schemas/types/pattern'; import { QuickReplyType } from '@/chat/schemas/types/quick-reply'; import { modelInstance } from './misc'; @@ -246,6 +246,84 @@ export const blockGetStarted = { message: ['Welcome! How are you ? '], } as unknown as BlockFull; +export const mockNlpPatternsSetOne: NlpPattern[] = [ + { + entity: 'intent', + match: 'value', + value: 'greeting', + }, + { + entity: 'firstname', + match: 'value', + value: 'jhon', + }, +]; + +export const mockNlpPatternsSetTwo: NlpPattern[] = [ + { + entity: 'intent', + match: 'value', + value: 'affirmation', + }, + { + entity: 'firstname', + match: 'value', + value: 'mark', + }, +]; + +export const mockNlpPatternsSetThree: NlpPattern[] = [ + { + entity: 'intent', + match: 'value', + value: 'greeting', + }, + { + entity: 'firstname', + match: 'entity', + }, +]; + +export const mockNlpBlock: BlockFull = { + ...baseBlockInstance, + name: 'Mock Nlp', + patterns: [ + 'Hello', + '/we*lcome/', + { label: 'Mock Nlp', value: 'MOCK_NLP' }, + [ + ...mockNlpPatternsSetOne, + [ + { + entity: 'intent', + match: 'value', + value: 'greeting', + }, + { + entity: 'firstname', + match: 'value', + value: 'doe', + }, + ], + ], + ], + trigger_labels: customerLabelsMock, + message: ['Good to see you again '], +} as unknown as BlockFull; + +export const mockModifiedNlpBlock: BlockFull = { + ...baseBlockInstance, + name: 'Modified Mock Nlp', + patterns: [ + 'Hello', + '/we*lcome/', + { label: 'Modified Mock Nlp', value: 'MODIFIED_MOCK_NLP' }, + [...mockNlpPatternsSetThree], + ], + trigger_labels: customerLabelsMock, + message: ['Hello there'], +} as unknown as BlockFull; + const patternsProduct: Pattern[] = [ 'produit', [ @@ -285,3 +363,5 @@ export const blockCarouselMock = { } as unknown as BlockFull; export const blocks: BlockFull[] = [blockGetStarted, blockEmpty]; + +export const nlpBlocks: BlockFull[] = [blockGetStarted, mockNlpBlock]; diff --git a/api/src/utils/test/mocks/nlp.ts b/api/src/utils/test/mocks/nlp.ts index 04a6e0bd8..a88b3bbd8 100644 --- a/api/src/utils/test/mocks/nlp.ts +++ b/api/src/utils/test/mocks/nlp.ts @@ -1,5 +1,5 @@ /* - * Copyright © 2024 Hexastack. All rights reserved. + * Copyright © 2025 Hexastack. All rights reserved. * * Licensed under the GNU Affero General Public License v3.0 (AGPLv3) with the following additional terms: * 1. The name "Hexabot" is a trademark of Hexastack. You may not use this name in derivative works without express written permission. @@ -7,6 +7,7 @@ */ import { NLU } from '@/helper/types'; +import { NlpCacheMap } from '@/nlp/schemas/types'; export const nlpEntitiesGreeting: NLU.ParseEntities = { entities: [ @@ -27,3 +28,52 @@ export const nlpEntitiesGreeting: NLU.ParseEntities = { }, ], }; + +export const mockNlpEntitiesSetOne: NLU.ParseEntities = { + entities: [ + { + entity: 'intent', + value: 'greeting', + confidence: 0.999, + }, + { + entity: 'firstname', + value: 'jhon', + confidence: 0.5, + }, + ], +}; + +export const mockNlpEntitiesSetTwo: NLU.ParseEntities = { + entities: [ + { + entity: 'intent', + value: 'greeting', + confidence: 0.94, + }, + { + entity: 'firstname', + value: 'doe', + confidence: 0.33, + }, + ], +}; + +export const mockNlpCacheMap: NlpCacheMap = new Map([ + [ + 'intent', + { + id: '67e3e41eff551ca5be70559c', + weight: 1, + values: ['greeting', 'affirmation'], + }, + ], + [ + 'firstname', + { + id: '67e3e41eff551ca5be70559d', + weight: 1, + values: ['jhon', 'doe'], + }, + ], +]); diff --git a/frontend/public/locales/en/chatbot_settings.json b/frontend/public/locales/en/chatbot_settings.json index 170af9441..680e9f6c2 100644 --- a/frontend/public/locales/en/chatbot_settings.json +++ b/frontend/public/locales/en/chatbot_settings.json @@ -8,13 +8,15 @@ "fallback_block": "Fallback Block", "default_nlu_helper": "Default NLU Helper", "default_llm_helper": "Default LLM Helper", - "default_storage_helper": "Default Storage Helper" + "default_storage_helper": "Default Storage Helper", + "default_nlu_penalty_factor": "Default NLU Penalty Factor" }, "help": { "global_fallback": "Global fallback allows you to send custom messages when user entry does not match any of the block messages.", "fallback_message": "If no fallback block is selected, then one of these messages will be sent.", "default_nlu_helper": "The NLU helper is responsible for processing and understanding user inputs, including tasks like intent prediction, language detection, and entity recognition.", "default_llm_helper": "The LLM helper leverages advanced generative AI to perform tasks such as text generation, chat completion, and complex query responses.", - "default_storage_helper": "The storage helper defines where to store attachment files. By default, the default local storage helper stores them locally, but you can choose to use Minio or any other storage solution." + "default_storage_helper": "The storage helper defines where to store attachment files. By default, the default local storage helper stores them locally, but you can choose to use Minio or any other storage solution.", + "default_nlu_penalty_factor": "The NLU penalty factor is a coefficient (between 0 and 1) applied exclusively to NLU-based entity matching. It reduces the score contribution of patterns that match broadly (e.g. using wildcard values like Any) rather than specific entity values. This helps the engine prioritize blocks triggered by more precise NLU matches, without affecting other matching strategies such as text, regex, or interaction triggers." } } diff --git a/frontend/public/locales/en/translation.json b/frontend/public/locales/en/translation.json index 5b204ff0a..1854a3655 100644 --- a/frontend/public/locales/en/translation.json +++ b/frontend/public/locales/en/translation.json @@ -121,7 +121,9 @@ "file_error": "File not found", "audio_error": "Audio not found", "video_error": "Video not found", - "missing_fields_error": "Please make sure that all required fields are filled" + "missing_fields_error": "Please make sure that all required fields are filled", + "weight_required_error": "Weight is required or invalid", + "weight_positive_integer_error": "Weight must be a positive integer" }, "menu": { "terms": "Terms of Use", @@ -348,6 +350,7 @@ "nlp_lookup_trait": "Trait", "doc": "Documentation", "builtin": "Built-in?", + "weight": "Weight", "dataset": "Dataset", "yes": "Yes", "no": "No", diff --git a/frontend/public/locales/fr/chatbot_settings.json b/frontend/public/locales/fr/chatbot_settings.json index 4b1ed4201..abefed82c 100644 --- a/frontend/public/locales/fr/chatbot_settings.json +++ b/frontend/public/locales/fr/chatbot_settings.json @@ -8,13 +8,15 @@ "fallback_block": "Bloc de secours", "default_nlu_helper": "Utilitaire NLU par défaut", "default_llm_helper": "Utilitaire LLM par défaut", - "default_storage_helper": "Utilitaire de stockage par défaut" + "default_storage_helper": "Utilitaire de stockage par défaut", + "default_nlu_penalty_factor": "Facteur de pénalité NLU par défaut" }, "help": { "global_fallback": "La réponse de secours globale vous permet d'envoyer des messages personnalisés lorsque l'entrée de l'utilisateur ne correspond à aucun des messages des blocs.", "fallback_message": "Si aucun bloc de secours n'est sélectionné, l'un de ces messages sera envoyé.", "default_nlu_helper": "Utilitaire du traitement et de la compréhension des entrées des utilisateurs, incluant des tâches telles que la prédiction d'intention, la détection de langue et la reconnaissance d'entités.", "default_llm_helper": "Utilitaire responsable de l'intelligence artificielle générative avancée pour effectuer des tâches telles que la génération de texte, la complétion de chat et les réponses à des requêtes complexes.", - "default_storage_helper": "Utilitaire de stockage définit l'emplacement où stocker les fichiers joints. Par défaut, le stockage local les conserve localement, mais vous pouvez choisir d'utiliser Minio ou toute autre solution de stockage." + "default_storage_helper": "Utilitaire de stockage définit l'emplacement où stocker les fichiers joints. Par défaut, le stockage local les conserve localement, mais vous pouvez choisir d'utiliser Minio ou toute autre solution de stockage.", + "default_nlu_penalty_factor": "Le facteur de pénalité NLU est un coefficient (entre 0 et 1) appliqué exclusivement aux correspondances d'entités basées sur NLU. Il réduit la contribution au score des motifs qui correspondent de manière générale (par exemple, en utilisant des valeurs génériques comme Any) plutôt que des valeurs d'entité spécifiques. Cela permet au chatbot de donner la priorité aux blocs déclenchés par des correspondances NLU plus précises, sans affecter d'autres stratégies de correspondance telles que le texte, les expressions regex ou les déclencheurs d'interaction." } } diff --git a/frontend/public/locales/fr/translation.json b/frontend/public/locales/fr/translation.json index ce8c53528..8a80e4915 100644 --- a/frontend/public/locales/fr/translation.json +++ b/frontend/public/locales/fr/translation.json @@ -121,7 +121,9 @@ "file_error": "Fichier introuvable", "audio_error": "Audio introuvable", "video_error": "Vidéo introuvable", - "missing_fields_error": "Veuillez vous assurer que tous les champs sont remplis correctement" + "missing_fields_error": "Veuillez vous assurer que tous les champs sont remplis correctement", + "weight_positive_integer_error": "Le poids doit être un nombre entier positif", + "weight_required_error": "Le poids est requis ou bien invalide" }, "menu": { "terms": "Conditions d'utilisation", @@ -347,6 +349,7 @@ "nlp_lookup_trait": "Trait", "synonyms": "Synonymes", "doc": "Documentation", + "weight": "Poids", "builtin": "Intégré?", "dataset": "Données", "yes": "Oui", diff --git a/frontend/src/app-components/tables/columns/getColumns.tsx b/frontend/src/app-components/tables/columns/getColumns.tsx index 0c43fc869..46bd269ee 100644 --- a/frontend/src/app-components/tables/columns/getColumns.tsx +++ b/frontend/src/app-components/tables/columns/getColumns.tsx @@ -156,8 +156,7 @@ function StackComponent({ disabled={ (isDisabled && isDisabled(params.row)) || (params.row.builtin && - (requires.includes(PermissionAction.UPDATE) || - requires.includes(PermissionAction.DELETE))) + requires.includes(PermissionAction.DELETE)) } onClick={() => { action && action(params.row); diff --git a/frontend/src/components/nlp/components/NlpEntity.tsx b/frontend/src/components/nlp/components/NlpEntity.tsx index 43de506ca..48cd044d2 100644 --- a/frontend/src/components/nlp/components/NlpEntity.tsx +++ b/frontend/src/components/nlp/components/NlpEntity.tsx @@ -165,6 +165,16 @@ const NlpEntity = () => { resizable: false, renderHeader, }, + { + maxWidth: 210, + field: "weight", + headerName: t("label.weight"), + renderCell: (val) => , + sortable: true, + disableColumnMenu: true, + resizable: false, + renderHeader, + }, { maxWidth: 90, field: "builtin", diff --git a/frontend/src/components/nlp/components/NlpEntityForm.tsx b/frontend/src/components/nlp/components/NlpEntityForm.tsx index e2fba34f9..2e7cfc369 100644 --- a/frontend/src/components/nlp/components/NlpEntityForm.tsx +++ b/frontend/src/components/nlp/components/NlpEntityForm.tsx @@ -60,6 +60,7 @@ export const NlpEntityVarForm: FC> = ({ name: data?.name || "", doc: data?.doc || "", lookups: data?.lookups || ["keywords"], + weight: data?.weight || 1, }, }); const validationRules = { @@ -82,6 +83,7 @@ export const NlpEntityVarForm: FC> = ({ reset({ name: data.name, doc: data.doc, + weight: data.weight, }); } else { reset(); @@ -121,6 +123,7 @@ export const NlpEntityVarForm: FC> = ({ required autoFocus helperText={errors.name ? errors.name.message : null} + disabled={data?.builtin} /> @@ -128,8 +131,35 @@ export const NlpEntityVarForm: FC> = ({ label={t("label.doc")} {...register("doc")} multiline={true} + disabled={data?.builtin} /> + + + value && Number.isInteger(value) && value! > 0 + ? true + : t("message.weight_positive_integer_error"), + })} + type="number" + inputProps={{ + min: 1, + step: 1, + inputMode: "numeric", + pattern: "[1-9][0-9]*", + }} + error={!!errors.weight} + helperText={errors.weight?.message} + /> + diff --git a/frontend/src/types/nlp-entity.types.ts b/frontend/src/types/nlp-entity.types.ts index 2d0624e88..97c0ddce2 100644 --- a/frontend/src/types/nlp-entity.types.ts +++ b/frontend/src/types/nlp-entity.types.ts @@ -1,5 +1,5 @@ /* - * Copyright © 2024 Hexastack. All rights reserved. + * Copyright © 2025 Hexastack. All rights reserved. * * Licensed under the GNU Affero General Public License v3.0 (AGPLv3) with the following additional terms: * 1. The name "Hexabot" is a trademark of Hexastack. You may not use this name in derivative works without express written permission. @@ -19,6 +19,7 @@ export interface INlpEntityAttributes { lookups: Lookup[]; doc?: string; builtin?: boolean; + weight?: number; } export enum NlpLookups {