mirror of
https://github.com/hexastack/hexabot
synced 2025-06-26 18:27:28 +00:00
Merge pull request #1000 from Hexastack/feat/implement-block-nlp-prioritization-strategy-v3
Feat/implement block nlp prioritization strategy v3
This commit is contained in:
@@ -13,6 +13,7 @@ import { MongooseModule } from '@nestjs/mongoose';
|
||||
import { AttachmentModule } from '@/attachment/attachment.module';
|
||||
import { ChannelModule } from '@/channel/channel.module';
|
||||
import { CmsModule } from '@/cms/cms.module';
|
||||
import { NlpModule } from '@/nlp/nlp.module';
|
||||
import { UserModule } from '@/user/user.module';
|
||||
|
||||
import { BlockController } from './controllers/block.controller';
|
||||
@@ -64,6 +65,7 @@ import { SubscriberService } from './services/subscriber.service';
|
||||
AttachmentModule,
|
||||
EventEmitter2,
|
||||
UserModule,
|
||||
NlpModule,
|
||||
],
|
||||
controllers: [
|
||||
CategoryController,
|
||||
|
||||
@@ -20,6 +20,7 @@ import { LanguageRepository } from '@/i18n/repositories/language.repository';
|
||||
import { LanguageModel } from '@/i18n/schemas/language.schema';
|
||||
import { I18nService } from '@/i18n/services/i18n.service';
|
||||
import { LanguageService } from '@/i18n/services/language.service';
|
||||
import { NlpService } from '@/nlp/services/nlp.service';
|
||||
import { PluginService } from '@/plugins/plugins.service';
|
||||
import { SettingService } from '@/setting/services/setting.service';
|
||||
import { InvitationRepository } from '@/user/repositories/invitation.repository';
|
||||
@@ -139,6 +140,10 @@ describe('BlockController', () => {
|
||||
set: jest.fn(),
|
||||
},
|
||||
},
|
||||
{
|
||||
provide: NlpService,
|
||||
useValue: {},
|
||||
},
|
||||
],
|
||||
});
|
||||
[blockController, blockService, categoryService] = await getMocks([
|
||||
|
||||
@@ -27,10 +27,24 @@ import WebChannelHandler from '@/extensions/channels/web/index.channel';
|
||||
import { WEB_CHANNEL_NAME } from '@/extensions/channels/web/settings';
|
||||
import { Web } from '@/extensions/channels/web/types';
|
||||
import WebEventWrapper from '@/extensions/channels/web/wrapper';
|
||||
import { HelperService } from '@/helper/helper.service';
|
||||
import { LanguageRepository } from '@/i18n/repositories/language.repository';
|
||||
import { LanguageModel } from '@/i18n/schemas/language.schema';
|
||||
import { I18nService } from '@/i18n/services/i18n.service';
|
||||
import { LanguageService } from '@/i18n/services/language.service';
|
||||
import { NlpEntityRepository } from '@/nlp/repositories/nlp-entity.repository';
|
||||
import { NlpSampleEntityRepository } from '@/nlp/repositories/nlp-sample-entity.repository';
|
||||
import { NlpSampleRepository } from '@/nlp/repositories/nlp-sample.repository';
|
||||
import { NlpValueRepository } from '@/nlp/repositories/nlp-value.repository';
|
||||
import { NlpEntityModel } from '@/nlp/schemas/nlp-entity.schema';
|
||||
import { NlpSampleEntityModel } from '@/nlp/schemas/nlp-sample-entity.schema';
|
||||
import { NlpSampleModel } from '@/nlp/schemas/nlp-sample.schema';
|
||||
import { NlpValueModel } from '@/nlp/schemas/nlp-value.schema';
|
||||
import { NlpEntityService } from '@/nlp/services/nlp-entity.service';
|
||||
import { NlpSampleEntityService } from '@/nlp/services/nlp-sample-entity.service';
|
||||
import { NlpSampleService } from '@/nlp/services/nlp-sample.service';
|
||||
import { NlpValueService } from '@/nlp/services/nlp-value.service';
|
||||
import { NlpService } from '@/nlp/services/nlp.service';
|
||||
import { PluginService } from '@/plugins/plugins.service';
|
||||
import { SettingService } from '@/setting/services/setting.service';
|
||||
import {
|
||||
@@ -38,17 +52,26 @@ import {
|
||||
installBlockFixtures,
|
||||
} from '@/utils/test/fixtures/block';
|
||||
import { installContentFixtures } from '@/utils/test/fixtures/content';
|
||||
import { installNlpValueFixtures } from '@/utils/test/fixtures/nlpvalue';
|
||||
import {
|
||||
blockEmpty,
|
||||
blockGetStarted,
|
||||
blockProductListMock,
|
||||
blocks,
|
||||
mockNlpAffirmationPatterns,
|
||||
mockNlpGreetingAnyNamePatterns,
|
||||
mockNlpGreetingNamePatterns,
|
||||
mockNlpGreetingPatterns,
|
||||
mockNlpGreetingWrongNamePatterns,
|
||||
} from '@/utils/test/mocks/block';
|
||||
import {
|
||||
contextBlankInstance,
|
||||
subscriberContextBlankInstance,
|
||||
} from '@/utils/test/mocks/conversation';
|
||||
import { nlpEntitiesGreeting } from '@/utils/test/mocks/nlp';
|
||||
import {
|
||||
mockNlpGreetingFullNameEntities,
|
||||
mockNlpGreetingNameEntities,
|
||||
} from '@/utils/test/mocks/nlp';
|
||||
import {
|
||||
closeInMongodConnection,
|
||||
rootMongooseTestModule,
|
||||
@@ -56,7 +79,7 @@ import {
|
||||
import { buildTestingMocks } from '@/utils/test/utils';
|
||||
|
||||
import { BlockRepository } from '../repositories/block.repository';
|
||||
import { Block, BlockModel } from '../schemas/block.schema';
|
||||
import { Block, BlockFull, BlockModel } from '../schemas/block.schema';
|
||||
import { Category, CategoryModel } from '../schemas/category.schema';
|
||||
import { LabelModel } from '../schemas/label.schema';
|
||||
import { FileType } from '../schemas/types/attachment';
|
||||
@@ -82,6 +105,7 @@ describe('BlockService', () => {
|
||||
rootMongooseTestModule(async () => {
|
||||
await installContentFixtures();
|
||||
await installBlockFixtures();
|
||||
await installNlpValueFixtures();
|
||||
}),
|
||||
MongooseModule.forFeature([
|
||||
BlockModel,
|
||||
@@ -91,6 +115,10 @@ describe('BlockService', () => {
|
||||
AttachmentModel,
|
||||
LabelModel,
|
||||
LanguageModel,
|
||||
NlpEntityModel,
|
||||
NlpSampleEntityModel,
|
||||
NlpValueModel,
|
||||
NlpSampleModel,
|
||||
]),
|
||||
],
|
||||
providers: [
|
||||
@@ -106,6 +134,16 @@ describe('BlockService', () => {
|
||||
ContentService,
|
||||
AttachmentService,
|
||||
LanguageService,
|
||||
NlpEntityRepository,
|
||||
NlpValueRepository,
|
||||
NlpSampleRepository,
|
||||
NlpSampleEntityRepository,
|
||||
NlpEntityService,
|
||||
NlpValueService,
|
||||
NlpSampleService,
|
||||
NlpSampleEntityService,
|
||||
NlpService,
|
||||
HelperService,
|
||||
{
|
||||
provide: PluginService,
|
||||
useValue: {},
|
||||
@@ -268,7 +306,7 @@ describe('BlockService', () => {
|
||||
|
||||
it('should match block with nlp', async () => {
|
||||
webEventGreeting.setSender(subscriberWithLabels);
|
||||
webEventGreeting.setNLP(nlpEntitiesGreeting);
|
||||
webEventGreeting.setNLP(mockNlpGreetingFullNameEntities);
|
||||
const result = await blockService.match(blocks, webEventGreeting);
|
||||
expect(result).toEqual(blockGetStarted);
|
||||
});
|
||||
@@ -276,47 +314,200 @@ describe('BlockService', () => {
|
||||
|
||||
describe('matchNLP', () => {
|
||||
it('should return undefined for match nlp against a block with no patterns', () => {
|
||||
const result = blockService.matchNLP(nlpEntitiesGreeting, blockEmpty);
|
||||
expect(result).toEqual(undefined);
|
||||
const result = blockService.getMatchingNluPatterns(
|
||||
mockNlpGreetingFullNameEntities,
|
||||
blockEmpty,
|
||||
);
|
||||
expect(result).toEqual([]);
|
||||
});
|
||||
|
||||
it('should return undefined for match nlp when no nlp entities are provided', () => {
|
||||
const result = blockService.matchNLP({ entities: [] }, blockGetStarted);
|
||||
expect(result).toEqual(undefined);
|
||||
const result = blockService.getMatchingNluPatterns(
|
||||
{ entities: [] },
|
||||
blockGetStarted,
|
||||
);
|
||||
expect(result).toEqual([]);
|
||||
});
|
||||
|
||||
it('should return match nlp patterns', () => {
|
||||
const result = blockService.matchNLP(
|
||||
nlpEntitiesGreeting,
|
||||
blockGetStarted,
|
||||
const result = blockService.getMatchingNluPatterns(
|
||||
mockNlpGreetingFullNameEntities,
|
||||
{
|
||||
...blockGetStarted,
|
||||
patterns: [...blockGetStarted.patterns, mockNlpGreetingNamePatterns],
|
||||
},
|
||||
);
|
||||
expect(result).toEqual([
|
||||
{
|
||||
entity: 'intent',
|
||||
match: 'value',
|
||||
value: 'greeting',
|
||||
},
|
||||
{
|
||||
entity: 'firstname',
|
||||
match: 'entity',
|
||||
},
|
||||
[
|
||||
{
|
||||
entity: 'intent',
|
||||
match: 'value',
|
||||
value: 'greeting',
|
||||
},
|
||||
{
|
||||
entity: 'firstname',
|
||||
match: 'value',
|
||||
value: 'jhon',
|
||||
},
|
||||
],
|
||||
]);
|
||||
});
|
||||
|
||||
it('should return undefined when it does not match nlp patterns', () => {
|
||||
const result = blockService.matchNLP(nlpEntitiesGreeting, {
|
||||
...blockGetStarted,
|
||||
patterns: [[{ entity: 'lastname', match: 'value', value: 'Belakhel' }]],
|
||||
});
|
||||
expect(result).toEqual(undefined);
|
||||
it('should return empty array when it does not match nlp patterns', () => {
|
||||
const result = blockService.getMatchingNluPatterns(
|
||||
mockNlpGreetingFullNameEntities,
|
||||
{
|
||||
...blockGetStarted,
|
||||
patterns: [
|
||||
[{ entity: 'lastname', match: 'value', value: 'Belakhel' }],
|
||||
],
|
||||
},
|
||||
);
|
||||
expect(result).toEqual([]);
|
||||
});
|
||||
|
||||
it('should return undefined when unknown nlp patterns', () => {
|
||||
const result = blockService.matchNLP(nlpEntitiesGreeting, {
|
||||
it('should return empty array when unknown nlp patterns', () => {
|
||||
const result = blockService.getMatchingNluPatterns(
|
||||
mockNlpGreetingFullNameEntities,
|
||||
{
|
||||
...blockGetStarted,
|
||||
patterns: [[{ entity: 'product', match: 'value', value: 'pizza' }]],
|
||||
},
|
||||
);
|
||||
expect(result).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('matchBestNLP', () => {
|
||||
it('should return the block with the highest NLP score', async () => {
|
||||
const mockExpectedBlock: BlockFull = {
|
||||
...blockGetStarted,
|
||||
patterns: [[{ entity: 'product', match: 'value', value: 'pizza' }]],
|
||||
});
|
||||
expect(result).toEqual(undefined);
|
||||
patterns: [...blockGetStarted.patterns, mockNlpGreetingNamePatterns],
|
||||
};
|
||||
const blocks: BlockFull[] = [
|
||||
// no match
|
||||
blockGetStarted,
|
||||
// match
|
||||
mockExpectedBlock,
|
||||
// match
|
||||
{
|
||||
...blockGetStarted,
|
||||
patterns: [...blockGetStarted.patterns, mockNlpGreetingPatterns],
|
||||
},
|
||||
// no match
|
||||
{
|
||||
...blockGetStarted,
|
||||
patterns: [
|
||||
...blockGetStarted.patterns,
|
||||
mockNlpGreetingWrongNamePatterns,
|
||||
],
|
||||
},
|
||||
// no match
|
||||
{
|
||||
...blockGetStarted,
|
||||
patterns: [...blockGetStarted.patterns, mockNlpAffirmationPatterns],
|
||||
},
|
||||
// no match
|
||||
blockGetStarted,
|
||||
];
|
||||
|
||||
// Spy on calculateBlockScore to check if it's called
|
||||
const calculateBlockScoreSpy = jest.spyOn(
|
||||
blockService,
|
||||
'calculateNluPatternMatchScore',
|
||||
);
|
||||
const bestBlock = blockService.matchBestNLP(
|
||||
blocks,
|
||||
mockNlpGreetingNameEntities,
|
||||
);
|
||||
|
||||
// Ensure calculateBlockScore was called at least once for each block
|
||||
expect(calculateBlockScoreSpy).toHaveBeenCalledTimes(2); // Called for each block
|
||||
|
||||
// Assert that the block with the highest NLP score is selected
|
||||
expect(bestBlock).toEqual(mockExpectedBlock);
|
||||
});
|
||||
|
||||
it('should return the block with the highest NLP score applying penalties', async () => {
|
||||
const mockExpectedBlock: BlockFull = {
|
||||
...blockGetStarted,
|
||||
patterns: [...blockGetStarted.patterns, mockNlpGreetingNamePatterns],
|
||||
};
|
||||
const blocks: BlockFull[] = [
|
||||
// no match
|
||||
blockGetStarted,
|
||||
// match
|
||||
mockExpectedBlock,
|
||||
// match
|
||||
{
|
||||
...blockGetStarted,
|
||||
patterns: [...blockGetStarted.patterns, mockNlpGreetingPatterns],
|
||||
},
|
||||
// match
|
||||
{
|
||||
...blockGetStarted,
|
||||
patterns: [
|
||||
...blockGetStarted.patterns,
|
||||
mockNlpGreetingAnyNamePatterns,
|
||||
],
|
||||
},
|
||||
];
|
||||
const nlp = mockNlpGreetingNameEntities;
|
||||
// Spy on calculateBlockScore to check if it's called
|
||||
const calculateBlockScoreSpy = jest.spyOn(
|
||||
blockService,
|
||||
'calculateNluPatternMatchScore',
|
||||
);
|
||||
const bestBlock = blockService.matchBestNLP(blocks, nlp);
|
||||
|
||||
// Ensure calculateBlockScore was called at least once for each block
|
||||
expect(calculateBlockScoreSpy).toHaveBeenCalledTimes(3); // Called for each block
|
||||
|
||||
// Assert that the block with the highest NLP score is selected
|
||||
expect(bestBlock).toEqual(mockExpectedBlock);
|
||||
});
|
||||
|
||||
it('should return undefined if no blocks match or the list is empty', async () => {
|
||||
const blocks: BlockFull[] = [
|
||||
{
|
||||
...blockGetStarted,
|
||||
patterns: [...blockGetStarted.patterns, mockNlpAffirmationPatterns],
|
||||
},
|
||||
blockGetStarted,
|
||||
];
|
||||
|
||||
const bestBlock = blockService.matchBestNLP(
|
||||
blocks,
|
||||
mockNlpGreetingNameEntities,
|
||||
);
|
||||
|
||||
// Assert that undefined is returned when no blocks are available
|
||||
expect(bestBlock).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('calculateNluPatternMatchScore', () => {
|
||||
it('should calculate the correct NLP score for a block', async () => {
|
||||
const matchingScore = blockService.calculateNluPatternMatchScore(
|
||||
mockNlpGreetingNamePatterns,
|
||||
mockNlpGreetingNameEntities,
|
||||
);
|
||||
|
||||
expect(matchingScore).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
it('should calculate the correct NLP score for a block and apply penalties ', async () => {
|
||||
const scoreWithoutPenalty = blockService.calculateNluPatternMatchScore(
|
||||
mockNlpGreetingNamePatterns,
|
||||
mockNlpGreetingNameEntities,
|
||||
);
|
||||
|
||||
const scoreWithPenalty = blockService.calculateNluPatternMatchScore(
|
||||
mockNlpGreetingAnyNamePatterns,
|
||||
mockNlpGreetingNameEntities,
|
||||
);
|
||||
|
||||
expect(scoreWithoutPenalty).toBeGreaterThan(scoreWithPenalty);
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@@ -16,6 +16,7 @@ import { CONSOLE_CHANNEL_NAME } from '@/extensions/channels/console/settings';
|
||||
import { NLU } from '@/helper/types';
|
||||
import { I18nService } from '@/i18n/services/i18n.service';
|
||||
import { LanguageService } from '@/i18n/services/language.service';
|
||||
import { NlpService } from '@/nlp/services/nlp.service';
|
||||
import { PluginService } from '@/plugins/plugins.service';
|
||||
import { PluginType } from '@/plugins/types';
|
||||
import { SettingService } from '@/setting/services/setting.service';
|
||||
@@ -25,7 +26,12 @@ import { getRandomElement } from '@/utils/helpers/safeRandom';
|
||||
import { BlockDto } from '../dto/block.dto';
|
||||
import { EnvelopeFactory } from '../helpers/envelope-factory';
|
||||
import { BlockRepository } from '../repositories/block.repository';
|
||||
import { Block, BlockFull, BlockPopulate } from '../schemas/block.schema';
|
||||
import {
|
||||
Block,
|
||||
BlockFull,
|
||||
BlockPopulate,
|
||||
BlockStub,
|
||||
} from '../schemas/block.schema';
|
||||
import { Label } from '../schemas/label.schema';
|
||||
import { Subscriber } from '../schemas/subscriber.schema';
|
||||
import { Context } from '../schemas/types/context';
|
||||
@@ -53,6 +59,7 @@ export class BlockService extends BaseService<
|
||||
private readonly pluginService: PluginService,
|
||||
protected readonly i18n: I18nService,
|
||||
protected readonly languageService: LanguageService,
|
||||
protected readonly nlpService: NlpService,
|
||||
) {
|
||||
super(repository);
|
||||
}
|
||||
@@ -161,18 +168,6 @@ export class BlockService extends BaseService<
|
||||
// Perform a text match (Text or Quick reply)
|
||||
const text = event.getText().trim();
|
||||
|
||||
// Check & catch user language through NLP
|
||||
const nlp = event.getNLP();
|
||||
if (nlp) {
|
||||
const languages = await this.languageService.getLanguages();
|
||||
const lang = nlp.entities.find((e) => e.entity === 'language');
|
||||
if (lang && Object.keys(languages).indexOf(lang.value) !== -1) {
|
||||
const profile = event.getSender();
|
||||
profile.language = lang.value;
|
||||
event.setSender(profile);
|
||||
}
|
||||
}
|
||||
|
||||
// Perform a text pattern match
|
||||
block = filteredBlocks
|
||||
.filter((b) => {
|
||||
@@ -181,20 +176,17 @@ export class BlockService extends BaseService<
|
||||
.shift();
|
||||
|
||||
// Perform an NLP Match
|
||||
const nlp = event.getNLP();
|
||||
if (!block && nlp) {
|
||||
// Find block pattern having the best match of nlp entities
|
||||
let nlpBest = 0;
|
||||
filteredBlocks.forEach((b, index, self) => {
|
||||
const nlpPattern = this.matchNLP(nlp, b);
|
||||
if (nlpPattern && nlpPattern.length > nlpBest) {
|
||||
nlpBest = nlpPattern.length;
|
||||
block = self[index];
|
||||
}
|
||||
});
|
||||
const scoredEntities =
|
||||
await this.nlpService.computePredictionScore(nlp);
|
||||
|
||||
if (scoredEntities.entities.length > 0) {
|
||||
block = this.matchBestNLP(filteredBlocks, scoredEntities);
|
||||
}
|
||||
}
|
||||
}
|
||||
// Uknown event type => return false;
|
||||
// this.logger.error('Unable to recognize event type while matching', event);
|
||||
|
||||
return block;
|
||||
}
|
||||
|
||||
@@ -294,33 +286,33 @@ export class BlockService extends BaseService<
|
||||
}
|
||||
|
||||
/**
|
||||
* Performs an NLP pattern match based on the best guessed entities and/or values
|
||||
* Performs an NLU pattern match based on the predicted entities and/or values
|
||||
*
|
||||
* @param nlp - Parsed NLP entities
|
||||
* @param block - The block to test
|
||||
*
|
||||
* @returns The NLP patterns that matches
|
||||
* @returns The NLU patterns that matches the predicted entities
|
||||
*/
|
||||
matchNLP(
|
||||
nlp: NLU.ParseEntities,
|
||||
block: Block | BlockFull,
|
||||
): NlpPattern[] | undefined {
|
||||
getMatchingNluPatterns<E extends NLU.ParseEntities, B extends BlockStub>(
|
||||
nlp: E,
|
||||
block: B,
|
||||
): NlpPattern[][] {
|
||||
// No nlp entities to check against
|
||||
if (nlp.entities.length === 0) {
|
||||
return undefined;
|
||||
return [];
|
||||
}
|
||||
|
||||
const nlpPatterns = block.patterns?.filter((p) => {
|
||||
const nlpPatterns = block.patterns.filter((p) => {
|
||||
return Array.isArray(p);
|
||||
}) as NlpPattern[][];
|
||||
|
||||
// No nlp patterns found
|
||||
if (nlpPatterns.length === 0) {
|
||||
return undefined;
|
||||
return [];
|
||||
}
|
||||
|
||||
// Find NLP pattern match based on best guessed entities
|
||||
return nlpPatterns.find((entities: NlpPattern[]) => {
|
||||
// Filter NLP patterns match based on best guessed entities
|
||||
return nlpPatterns.filter((entities: NlpPattern[]) => {
|
||||
return entities.every((ev: NlpPattern) => {
|
||||
if (ev.match === 'value') {
|
||||
return nlp.entities.find((e) => {
|
||||
@@ -338,6 +330,142 @@ export class BlockService extends BaseService<
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Finds and returns the block that best matches the given scored NLU entities.
|
||||
*
|
||||
* This function evaluates each block by matching its NLP patterns against the provided
|
||||
* `scoredEntities`, using `matchNLP` and `calculateNluPatternMatchScore` to compute
|
||||
* a confidence score for each match. The block with the highest total pattern match score
|
||||
* is returned.
|
||||
*
|
||||
* If no block yields a positive score, the function returns `undefined`.
|
||||
*
|
||||
* @param blocks - A list of blocks to evaluate, each potentially containing NLP patterns.
|
||||
* @param scoredEntities - The scored NLU entities to use for pattern matching.
|
||||
*
|
||||
* @returns A promise that resolves to the block with the highest NLP match score,
|
||||
* or `undefined` if no suitable match is found.
|
||||
*/
|
||||
matchBestNLP<B extends BlockStub>(
|
||||
blocks: B[],
|
||||
scoredEntities: NLU.ScoredEntities,
|
||||
): B | undefined {
|
||||
const bestMatch = blocks.reduce(
|
||||
(bestMatch, block) => {
|
||||
const matchedPatterns = this.getMatchingNluPatterns(
|
||||
scoredEntities,
|
||||
block,
|
||||
);
|
||||
|
||||
// Compute the score (Weighted sum = weight * confidence)
|
||||
// for each of block NLU patterns
|
||||
const score = matchedPatterns.reduce((maxScore, patterns) => {
|
||||
const score = this.calculateNluPatternMatchScore(
|
||||
patterns,
|
||||
scoredEntities,
|
||||
);
|
||||
return Math.max(maxScore, score);
|
||||
}, 0);
|
||||
|
||||
return score > bestMatch.score ? { block, score } : bestMatch;
|
||||
},
|
||||
{ block: undefined, score: 0 },
|
||||
);
|
||||
|
||||
return bestMatch.block;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculates the total NLU pattern match score by summing the individual pattern scores
|
||||
* for each pattern that matches a scored entity.
|
||||
*
|
||||
* For each pattern in the list, the function attempts to find a matching entity in the
|
||||
* NLU prediction. If a match is found, the score is computed using `computePatternScore`,
|
||||
* potentially applying a penalty if the match is generic (entity-only).
|
||||
*
|
||||
* This scoring mechanism allows the system to prioritize more precise matches and
|
||||
* quantify the overall alignment between predicted NLU entities and predefined patterns.
|
||||
*
|
||||
* @param patterns - A list of patterns to evaluate against the NLU prediction.
|
||||
* @param prediction - The scored entities resulting from NLU inference.
|
||||
* @param [penaltyFactor=0.95] - Optional penalty factor to apply for generic matches (default is 0.95).
|
||||
*
|
||||
* @returns The total aggregated match score based on matched patterns and their computed scores.
|
||||
*/
|
||||
calculateNluPatternMatchScore(
|
||||
patterns: NlpPattern[],
|
||||
prediction: NLU.ScoredEntities,
|
||||
penaltyFactor = 0.95,
|
||||
): number {
|
||||
if (!patterns.length || !prediction.entities.length) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
return patterns.reduce((score, pattern) => {
|
||||
const matchedEntity: NLU.ScoredEntity | undefined =
|
||||
prediction.entities.find((e) => this.matchesNluEntity(e, pattern));
|
||||
|
||||
const patternScore = matchedEntity
|
||||
? this.computePatternScore(matchedEntity, pattern, penaltyFactor)
|
||||
: 0;
|
||||
|
||||
return score + patternScore;
|
||||
}, 0);
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if a given `ParseEntity` from the NLP model matches the specified pattern
|
||||
* and if its value exists within the values provided in the cache for the specified entity.
|
||||
*
|
||||
* @param e - The `ParseEntity` object from the NLP model, containing information about the entity and its value.
|
||||
* @param pattern - The `NlpPattern` object representing the entity and value pattern to be matched.
|
||||
* @param entityData - The `NlpCacheMapValues` object containing cached data, including entity values and weight, for the entity being matched.
|
||||
*
|
||||
* @returns A boolean indicating whether the `ParseEntity` matches the pattern and entity data from the cache.
|
||||
*
|
||||
* - The function compares the entity type between the `ParseEntity` and the `NlpPattern`.
|
||||
* - If the pattern's match type is not `'value'`, it checks if the entity's value is present in the cache's `values` array.
|
||||
* - If the pattern's match type is `'value'`, it further ensures that the entity's value matches the specified value in the pattern.
|
||||
* - Returns `true` if all conditions are met, otherwise `false`.
|
||||
*/
|
||||
private matchesNluEntity<E extends NLU.ParseEntity>(
|
||||
{ entity, value }: E,
|
||||
pattern: NlpPattern,
|
||||
): boolean {
|
||||
return (
|
||||
entity === pattern.entity &&
|
||||
(pattern.match !== 'value' || value === pattern.value)
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Computes a pattern score by applying a penalty factor based on the matching rule of the pattern.
|
||||
*
|
||||
* This scoring mechanism allows prioritization of more specific patterns (entity + value) over
|
||||
* more generic ones (entity only).
|
||||
*
|
||||
* @param entity - The scored entity object containing the base score.
|
||||
* @param pattern - The pattern definition to match against the entity.
|
||||
* @param [penaltyFactor=0.95] - Optional penalty factor applied when the pattern only matches the entity (default is 0.95).
|
||||
*
|
||||
* @returns The final pattern score after applying any applicable penalty.
|
||||
*/
|
||||
private computePatternScore(
|
||||
entity: NLU.ScoredEntity,
|
||||
pattern: NlpPattern,
|
||||
penaltyFactor: number = 0.95,
|
||||
): number {
|
||||
if (!entity || !pattern) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
// In case the pattern matches the entity regardless of the value (any)
|
||||
// we apply a penalty so that we prioritize other patterns where both entity and value matches
|
||||
const penalty = pattern.match === 'entity' ? penaltyFactor : 1;
|
||||
|
||||
return entity.score * penalty;
|
||||
}
|
||||
|
||||
/**
|
||||
* Matches an outcome-based block from a list of available blocks
|
||||
* based on the outcome of a system message.
|
||||
|
||||
@@ -33,6 +33,19 @@ import { LanguageRepository } from '@/i18n/repositories/language.repository';
|
||||
import { LanguageModel } from '@/i18n/schemas/language.schema';
|
||||
import { I18nService } from '@/i18n/services/i18n.service';
|
||||
import { LanguageService } from '@/i18n/services/language.service';
|
||||
import { NlpEntityRepository } from '@/nlp/repositories/nlp-entity.repository';
|
||||
import { NlpSampleEntityRepository } from '@/nlp/repositories/nlp-sample-entity.repository';
|
||||
import { NlpSampleRepository } from '@/nlp/repositories/nlp-sample.repository';
|
||||
import { NlpValueRepository } from '@/nlp/repositories/nlp-value.repository';
|
||||
import { NlpEntityModel } from '@/nlp/schemas/nlp-entity.schema';
|
||||
import { NlpSampleEntityModel } from '@/nlp/schemas/nlp-sample-entity.schema';
|
||||
import { NlpSampleModel } from '@/nlp/schemas/nlp-sample.schema';
|
||||
import { NlpValueModel } from '@/nlp/schemas/nlp-value.schema';
|
||||
import { NlpEntityService } from '@/nlp/services/nlp-entity.service';
|
||||
import { NlpSampleEntityService } from '@/nlp/services/nlp-sample-entity.service';
|
||||
import { NlpSampleService } from '@/nlp/services/nlp-sample.service';
|
||||
import { NlpValueService } from '@/nlp/services/nlp-value.service';
|
||||
import { NlpService } from '@/nlp/services/nlp.service';
|
||||
import { PluginService } from '@/plugins/plugins.service';
|
||||
import { SettingService } from '@/setting/services/setting.service';
|
||||
import { installBlockFixtures } from '@/utils/test/fixtures/block';
|
||||
@@ -100,6 +113,10 @@ describe('BlockService', () => {
|
||||
MenuModel,
|
||||
ContextVarModel,
|
||||
LanguageModel,
|
||||
NlpEntityModel,
|
||||
NlpSampleEntityModel,
|
||||
NlpValueModel,
|
||||
NlpSampleModel,
|
||||
]),
|
||||
JwtModule,
|
||||
],
|
||||
@@ -116,6 +133,11 @@ describe('BlockService', () => {
|
||||
MessageRepository,
|
||||
MenuRepository,
|
||||
LanguageRepository,
|
||||
ContextVarRepository,
|
||||
NlpEntityRepository,
|
||||
NlpSampleEntityRepository,
|
||||
NlpValueRepository,
|
||||
NlpSampleRepository,
|
||||
BlockService,
|
||||
CategoryService,
|
||||
ContentTypeService,
|
||||
@@ -129,8 +151,12 @@ describe('BlockService', () => {
|
||||
MenuService,
|
||||
WebChannelHandler,
|
||||
ContextVarService,
|
||||
ContextVarRepository,
|
||||
LanguageService,
|
||||
NlpEntityService,
|
||||
NlpValueService,
|
||||
NlpSampleService,
|
||||
NlpSampleEntityService,
|
||||
NlpService,
|
||||
{
|
||||
provide: HelperService,
|
||||
useValue: {},
|
||||
|
||||
@@ -21,6 +21,8 @@ import {
|
||||
import EventWrapper from '@/channel/lib/EventWrapper';
|
||||
import { config } from '@/config';
|
||||
import { HelperService } from '@/helper/helper.service';
|
||||
import { HelperType } from '@/helper/types';
|
||||
import { LanguageService } from '@/i18n/services/language.service';
|
||||
import { LoggerService } from '@/logger/logger.service';
|
||||
import { WebsocketGateway } from '@/websocket/websocket.gateway';
|
||||
|
||||
@@ -46,6 +48,7 @@ export class ChatService {
|
||||
private readonly websocketGateway: WebsocketGateway,
|
||||
private readonly helperService: HelperService,
|
||||
private readonly attachmentService: AttachmentService,
|
||||
private readonly languageService: LanguageService,
|
||||
) {}
|
||||
|
||||
/**
|
||||
@@ -330,15 +333,7 @@ export class ChatService {
|
||||
return;
|
||||
}
|
||||
|
||||
if (event.getText() && !event.getNLP()) {
|
||||
try {
|
||||
const helper = await this.helperService.getDefaultNluHelper();
|
||||
const nlp = await helper.predict(event.getText(), true);
|
||||
event.setNLP(nlp);
|
||||
} catch (err) {
|
||||
this.logger.error('Unable to perform NLP parse', err);
|
||||
}
|
||||
}
|
||||
await this.enrichEventWithNLU(event);
|
||||
|
||||
this.botService.handleMessageEvent(event);
|
||||
} catch (err) {
|
||||
@@ -346,6 +341,40 @@ export class ChatService {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Enriches an incoming event by performing NLP inference and updating the sender's language profile if detected.
|
||||
*
|
||||
* @param event - The incoming event object containing user input and metadata.
|
||||
* @returns Resolves when preprocessing is complete. Any errors are logged without throwing.
|
||||
*/
|
||||
async enrichEventWithNLU(event: EventWrapper<any, any>) {
|
||||
if (!event.getText() || event.getNLP()) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const helper = await this.helperService.getDefaultHelper(HelperType.NLU);
|
||||
const nlp = await helper.predict(event.getText(), true);
|
||||
|
||||
// Check & catch user language through NLP
|
||||
if (nlp) {
|
||||
const languages = await this.languageService.getLanguages();
|
||||
const spokenLanguage = nlp.entities.find(
|
||||
(e) => e.entity === 'language',
|
||||
);
|
||||
if (spokenLanguage && spokenLanguage.value in languages) {
|
||||
const profile = event.getSender();
|
||||
profile.language = spokenLanguage.value;
|
||||
event.setSender(profile);
|
||||
}
|
||||
}
|
||||
|
||||
event.setNLP(nlp);
|
||||
} catch (err) {
|
||||
this.logger.error('Unable to perform NLP parse', err);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle new subscriber and send notification the websocket
|
||||
*
|
||||
|
||||
@@ -139,6 +139,7 @@ describe('BaseNlpHelper', () => {
|
||||
updatedAt: new Date(),
|
||||
builtin: false,
|
||||
lookups: [],
|
||||
weight: 1,
|
||||
},
|
||||
entity2: {
|
||||
id: new ObjectId().toString(),
|
||||
@@ -147,6 +148,7 @@ describe('BaseNlpHelper', () => {
|
||||
updatedAt: new Date(),
|
||||
builtin: false,
|
||||
lookups: [],
|
||||
weight: 1,
|
||||
},
|
||||
});
|
||||
jest.spyOn(NlpValue, 'getValueMap').mockReturnValue({
|
||||
@@ -207,6 +209,7 @@ describe('BaseNlpHelper', () => {
|
||||
updatedAt: new Date(),
|
||||
builtin: false,
|
||||
lookups: [],
|
||||
weight: 1,
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
@@ -26,6 +26,14 @@ export namespace NLU {
|
||||
export interface ParseEntities {
|
||||
entities: ParseEntity[];
|
||||
}
|
||||
|
||||
export interface ScoredEntity extends ParseEntity {
|
||||
score: number; // Computed as confidence * weight
|
||||
}
|
||||
|
||||
export interface ScoredEntities extends ParseEntities {
|
||||
entities: ScoredEntity[];
|
||||
}
|
||||
}
|
||||
|
||||
export namespace LLM {
|
||||
|
||||
@@ -30,6 +30,14 @@ import { MenuModel } from '@/cms/schemas/menu.schema';
|
||||
import { ContentService } from '@/cms/services/content.service';
|
||||
import { MenuService } from '@/cms/services/menu.service';
|
||||
import { I18nService } from '@/i18n/services/i18n.service';
|
||||
import { NlpEntityRepository } from '@/nlp/repositories/nlp-entity.repository';
|
||||
import { NlpSampleEntityRepository } from '@/nlp/repositories/nlp-sample-entity.repository';
|
||||
import { NlpValueRepository } from '@/nlp/repositories/nlp-value.repository';
|
||||
import { NlpEntityModel } from '@/nlp/schemas/nlp-entity.schema';
|
||||
import { NlpSampleEntityModel } from '@/nlp/schemas/nlp-sample-entity.schema';
|
||||
import { NlpValueModel } from '@/nlp/schemas/nlp-value.schema';
|
||||
import { NlpEntityService } from '@/nlp/services/nlp-entity.service';
|
||||
import { NlpValueService } from '@/nlp/services/nlp-value.service';
|
||||
import { NlpService } from '@/nlp/services/nlp.service';
|
||||
import { PluginService } from '@/plugins/plugins.service';
|
||||
import { SettingService } from '@/setting/services/setting.service';
|
||||
@@ -75,6 +83,9 @@ describe('TranslationController', () => {
|
||||
BlockModel,
|
||||
ContentModel,
|
||||
LanguageModel,
|
||||
NlpEntityModel,
|
||||
NlpSampleEntityModel,
|
||||
NlpValueModel,
|
||||
]),
|
||||
],
|
||||
providers: [
|
||||
@@ -130,6 +141,11 @@ describe('TranslationController', () => {
|
||||
},
|
||||
LanguageService,
|
||||
LanguageRepository,
|
||||
NlpEntityRepository,
|
||||
NlpEntityService,
|
||||
NlpValueRepository,
|
||||
NlpValueService,
|
||||
NlpSampleEntityRepository,
|
||||
],
|
||||
});
|
||||
[translationService, translationController] = await getMocks([
|
||||
|
||||
@@ -47,7 +47,7 @@ export class LanguageService extends BaseService<
|
||||
* and the corresponding value is the `Language` object.
|
||||
*/
|
||||
@Cacheable(LANGUAGES_CACHE_KEY)
|
||||
async getLanguages() {
|
||||
async getLanguages(): Promise<Record<string, Language>> {
|
||||
const languages = await this.findAll();
|
||||
return languages.reduce((acc, curr) => {
|
||||
return {
|
||||
|
||||
@@ -6,8 +6,10 @@
|
||||
* 2. All derivative works must include clear attribution to the original creator and software, Hexastack and Hexabot, in a prominent location (e.g., in the software's "About" section, documentation, and README file).
|
||||
*/
|
||||
|
||||
import { CACHE_MANAGER } from '@nestjs/cache-manager';
|
||||
import {
|
||||
BadRequestException,
|
||||
ConflictException,
|
||||
MethodNotAllowedException,
|
||||
NotFoundException,
|
||||
} from '@nestjs/common';
|
||||
@@ -27,7 +29,7 @@ import {
|
||||
import { TFixtures } from '@/utils/test/types';
|
||||
import { buildTestingMocks } from '@/utils/test/utils';
|
||||
|
||||
import { NlpEntityCreateDto } from '../dto/nlp-entity.dto';
|
||||
import { NlpEntityCreateDto, NlpEntityUpdateDto } from '../dto/nlp-entity.dto';
|
||||
import { NlpEntityRepository } from '../repositories/nlp-entity.repository';
|
||||
import { NlpSampleEntityRepository } from '../repositories/nlp-sample-entity.repository';
|
||||
import { NlpValueRepository } from '../repositories/nlp-value.repository';
|
||||
@@ -67,6 +69,12 @@ describe('NlpEntityController', () => {
|
||||
NlpValueService,
|
||||
NlpSampleEntityRepository,
|
||||
NlpValueRepository,
|
||||
{
|
||||
provide: CACHE_MANAGER,
|
||||
useValue: {
|
||||
del: jest.fn(),
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
[nlpEntityController, nlpValueService, nlpEntityService] = await getMocks([
|
||||
@@ -109,6 +117,7 @@ describe('NlpEntityController', () => {
|
||||
) as NlpEntityFull['values'],
|
||||
lookups: curr.lookups!,
|
||||
builtin: curr.builtin!,
|
||||
weight: curr.weight!,
|
||||
});
|
||||
return acc;
|
||||
},
|
||||
@@ -163,6 +172,7 @@ describe('NlpEntityController', () => {
|
||||
name: 'sentiment',
|
||||
lookups: ['trait'],
|
||||
builtin: false,
|
||||
weight: 1,
|
||||
};
|
||||
const result = await nlpEntityController.create(sentimentEntity);
|
||||
expect(result).toEqualPayload(sentimentEntity);
|
||||
@@ -191,18 +201,18 @@ describe('NlpEntityController', () => {
|
||||
describe('findOne', () => {
|
||||
it('should find a nlp entity', async () => {
|
||||
const firstNameEntity = await nlpEntityService.findOne({
|
||||
name: 'first_name',
|
||||
name: 'firstname',
|
||||
});
|
||||
const result = await nlpEntityController.findOne(firstNameEntity!.id, []);
|
||||
|
||||
expect(result).toEqualPayload(
|
||||
nlpEntityFixtures.find(({ name }) => name === 'first_name')!,
|
||||
nlpEntityFixtures.find(({ name }) => name === 'firstname')!,
|
||||
);
|
||||
});
|
||||
|
||||
it('should find a nlp entity, and populate its values', async () => {
|
||||
const firstNameEntity = await nlpEntityService.findOne({
|
||||
name: 'first_name',
|
||||
name: 'firstname',
|
||||
});
|
||||
const firstNameValues = await nlpValueService.findOne({ value: 'jhon' });
|
||||
const firstNameWithValues: NlpEntityFull = {
|
||||
@@ -214,6 +224,7 @@ describe('NlpEntityController', () => {
|
||||
updatedAt: firstNameEntity!.updatedAt,
|
||||
lookups: firstNameEntity!.lookups,
|
||||
builtin: firstNameEntity!.builtin,
|
||||
weight: firstNameEntity!.weight,
|
||||
};
|
||||
const result = await nlpEntityController.findOne(firstNameEntity!.id, [
|
||||
'values',
|
||||
@@ -231,13 +242,14 @@ describe('NlpEntityController', () => {
|
||||
describe('updateOne', () => {
|
||||
it('should update a nlp entity', async () => {
|
||||
const firstNameEntity = await nlpEntityService.findOne({
|
||||
name: 'first_name',
|
||||
name: 'firstname',
|
||||
});
|
||||
const updatedNlpEntity: NlpEntityCreateDto = {
|
||||
name: 'updated',
|
||||
doc: '',
|
||||
lookups: ['trait'],
|
||||
builtin: false,
|
||||
weight: 1,
|
||||
};
|
||||
const result = await nlpEntityController.updateOne(
|
||||
firstNameEntity!.id,
|
||||
@@ -258,16 +270,58 @@ describe('NlpEntityController', () => {
|
||||
).rejects.toThrow(NotFoundException);
|
||||
});
|
||||
|
||||
it('should throw exception when nlp entity is builtin', async () => {
|
||||
const updateNlpEntity: NlpEntityCreateDto = {
|
||||
name: 'updated',
|
||||
doc: '',
|
||||
lookups: ['trait'],
|
||||
builtin: false,
|
||||
it('should update weight if entity is builtin and weight is provided', async () => {
|
||||
const updatedNlpEntity: NlpEntityUpdateDto = {
|
||||
weight: 4,
|
||||
};
|
||||
const findOneSpy = jest.spyOn(nlpEntityService, 'findOne');
|
||||
const updateWeightSpy = jest.spyOn(nlpEntityService, 'updateWeight');
|
||||
|
||||
const result = await nlpEntityController.updateOne(
|
||||
buitInEntityId!,
|
||||
updatedNlpEntity,
|
||||
);
|
||||
|
||||
expect(findOneSpy).toHaveBeenCalledWith(buitInEntityId!);
|
||||
expect(updateWeightSpy).toHaveBeenCalledWith(
|
||||
buitInEntityId!,
|
||||
updatedNlpEntity.weight,
|
||||
);
|
||||
expect(result.weight).toBe(updatedNlpEntity.weight);
|
||||
});
|
||||
|
||||
it('should throw an exception if entity is builtin but weight not provided', async () => {
|
||||
await expect(
|
||||
nlpEntityController.updateOne(buitInEntityId!, updateNlpEntity),
|
||||
).rejects.toThrow(MethodNotAllowedException);
|
||||
nlpEntityController.updateOne(buitInEntityId!, {
|
||||
name: 'updated',
|
||||
doc: '',
|
||||
lookups: ['trait'],
|
||||
builtin: false,
|
||||
} as any),
|
||||
).rejects.toThrow(ConflictException);
|
||||
});
|
||||
|
||||
it('should update only the weight of the builtin entity', async () => {
|
||||
const updatedNlpEntity: NlpEntityUpdateDto = {
|
||||
weight: 8,
|
||||
};
|
||||
const originalEntity: NlpEntity | null = await nlpEntityService.findOne(
|
||||
buitInEntityId!,
|
||||
);
|
||||
|
||||
const result: NlpEntity = await nlpEntityController.updateOne(
|
||||
buitInEntityId!,
|
||||
updatedNlpEntity,
|
||||
);
|
||||
|
||||
// Check weight is updated
|
||||
expect(result.weight).toBe(updatedNlpEntity.weight);
|
||||
|
||||
Object.entries(originalEntity!).forEach(([key, value]) => {
|
||||
if (key !== 'weight' && key !== 'updatedAt') {
|
||||
expect(result[key as keyof typeof result]).toEqual(value);
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
describe('deleteMany', () => {
|
||||
|
||||
@@ -9,6 +9,7 @@
|
||||
import {
|
||||
BadRequestException,
|
||||
Body,
|
||||
ConflictException,
|
||||
Controller,
|
||||
Delete,
|
||||
Get,
|
||||
@@ -33,7 +34,7 @@ import { PopulatePipe } from '@/utils/pipes/populate.pipe';
|
||||
import { SearchFilterPipe } from '@/utils/pipes/search-filter.pipe';
|
||||
import { TFilterQuery } from '@/utils/types/filter.types';
|
||||
|
||||
import { NlpEntityCreateDto } from '../dto/nlp-entity.dto';
|
||||
import { NlpEntityCreateDto, NlpEntityUpdateDto } from '../dto/nlp-entity.dto';
|
||||
import {
|
||||
NlpEntity,
|
||||
NlpEntityFull,
|
||||
@@ -142,7 +143,7 @@ export class NlpEntityController extends BaseController<
|
||||
* This endpoint allows updating an existing NLP entity. The entity must not be a built-in entity.
|
||||
*
|
||||
* @param id - The ID of the NLP entity to update.
|
||||
* @param updateNlpEntityDto - The new data for the NLP entity.
|
||||
* @param nlpEntityDto - The new data for the NLP entity.
|
||||
*
|
||||
* @returns The updated NLP entity.
|
||||
*/
|
||||
@@ -150,20 +151,29 @@ export class NlpEntityController extends BaseController<
|
||||
@Patch(':id')
|
||||
async updateOne(
|
||||
@Param('id') id: string,
|
||||
@Body() updateNlpEntityDto: NlpEntityCreateDto,
|
||||
@Body() nlpEntityDto: NlpEntityUpdateDto,
|
||||
): Promise<NlpEntity> {
|
||||
const nlpEntity = await this.nlpEntityService.findOne(id);
|
||||
if (!nlpEntity) {
|
||||
this.logger.warn(`Unable to update NLP Entity by id ${id}`);
|
||||
throw new NotFoundException(`NLP Entity with ID ${id} not found`);
|
||||
}
|
||||
|
||||
if (nlpEntity.builtin) {
|
||||
throw new MethodNotAllowedException(
|
||||
`Cannot update builtin NLP Entity ${nlpEntity.name}`,
|
||||
);
|
||||
if (nlpEntityDto.weight) {
|
||||
// Only allow weight update for builtin entities
|
||||
return await this.nlpEntityService.updateWeight(
|
||||
id,
|
||||
nlpEntityDto.weight,
|
||||
);
|
||||
} else {
|
||||
throw new ConflictException(
|
||||
`Cannot update builtin NLP Entity ${nlpEntity.name} except for weight`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return await this.nlpEntityService.updateOne(id, updateNlpEntityDto);
|
||||
return await this.nlpEntityService.updateOne(id, nlpEntityDto);
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -372,6 +372,7 @@ describe('NlpSampleController', () => {
|
||||
lookups: ['trait'],
|
||||
doc: '',
|
||||
builtin: false,
|
||||
weight: 1,
|
||||
};
|
||||
const priceValueEntity = await nlpEntityService.findOne({
|
||||
name: 'intent',
|
||||
|
||||
@@ -6,6 +6,7 @@
|
||||
* 2. All derivative works must include clear attribution to the original creator and software, Hexastack and Hexabot, in a prominent location (e.g., in the software's "About" section, documentation, and README file).
|
||||
*/
|
||||
|
||||
import { CACHE_MANAGER } from '@nestjs/cache-manager';
|
||||
import { BadRequestException, NotFoundException } from '@nestjs/common';
|
||||
import { MongooseModule } from '@nestjs/mongoose';
|
||||
|
||||
@@ -57,6 +58,12 @@ describe('NlpValueController', () => {
|
||||
NlpSampleEntityRepository,
|
||||
NlpEntityService,
|
||||
NlpEntityRepository,
|
||||
{
|
||||
provide: CACHE_MANAGER,
|
||||
useValue: {
|
||||
del: jest.fn(),
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
[nlpValueController, nlpValueService, nlpEntityService] = await getMocks([
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright © 2024 Hexastack. All rights reserved.
|
||||
* Copyright © 2025 Hexastack. All rights reserved.
|
||||
*
|
||||
* Licensed under the GNU Affero General Public License v3.0 (AGPLv3) with the following additional terms:
|
||||
* 1. The name "Hexabot" is a trademark of Hexastack. You may not use this name in derivative works without express written permission.
|
||||
@@ -12,9 +12,11 @@ import {
|
||||
IsBoolean,
|
||||
IsIn,
|
||||
IsNotEmpty,
|
||||
IsNumber,
|
||||
IsOptional,
|
||||
IsString,
|
||||
Matches,
|
||||
Validate,
|
||||
} from 'class-validator';
|
||||
|
||||
import { DtoConfig } from '@/utils/types/dto.types';
|
||||
@@ -47,8 +49,38 @@ export class NlpEntityCreateDto {
|
||||
@IsBoolean()
|
||||
@IsOptional()
|
||||
builtin?: boolean;
|
||||
|
||||
@ApiPropertyOptional({
|
||||
description: 'Nlp entity associated weight for next block triggering',
|
||||
type: Number,
|
||||
})
|
||||
@IsOptional()
|
||||
@Validate((value: number) => value > 0, {
|
||||
message: 'Weight must be a strictly positive number',
|
||||
})
|
||||
@IsNumber({ allowNaN: false, allowInfinity: false })
|
||||
weight?: number;
|
||||
}
|
||||
|
||||
export class NlpEntityUpdateDto {
|
||||
@ApiPropertyOptional({ type: String })
|
||||
@IsString()
|
||||
@IsOptional()
|
||||
foreign_id?: string;
|
||||
|
||||
@ApiPropertyOptional({
|
||||
description: 'Nlp entity associated weight for next block triggering',
|
||||
type: Number,
|
||||
})
|
||||
@IsOptional()
|
||||
@Validate((value: number) => value > 0, {
|
||||
message: 'Weight must be a strictly positive number',
|
||||
})
|
||||
@IsNumber({ allowNaN: false, allowInfinity: false })
|
||||
weight?: number;
|
||||
}
|
||||
|
||||
export type NlpEntityDto = DtoConfig<{
|
||||
create: NlpEntityCreateDto;
|
||||
update: NlpEntityUpdateDto;
|
||||
}>;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright © 2024 Hexastack. All rights reserved.
|
||||
* Copyright © 2025 Hexastack. All rights reserved.
|
||||
*
|
||||
* Licensed under the GNU Affero General Public License v3.0 (AGPLv3) with the following additional terms:
|
||||
* 1. The name "Hexabot" is a trademark of Hexastack. You may not use this name in derivative works without express written permission.
|
||||
|
||||
@@ -51,7 +51,7 @@ describe('NlpEntityRepository', () => {
|
||||
NlpValueRepository,
|
||||
]);
|
||||
firstNameNlpEntity = await nlpEntityRepository.findOne({
|
||||
name: 'first_name',
|
||||
name: 'firstname',
|
||||
});
|
||||
});
|
||||
|
||||
@@ -91,7 +91,7 @@ describe('NlpEntityRepository', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('findPageAndPopulate', () => {
|
||||
describe('findAndPopulate', () => {
|
||||
it('should return all nlp entities with populate', async () => {
|
||||
const pageQuery = getPageQuery<NlpEntity>({
|
||||
sort: ['name', 'desc'],
|
||||
@@ -99,7 +99,7 @@ describe('NlpEntityRepository', () => {
|
||||
const firstNameValues = await nlpValueRepository.find({
|
||||
entity: firstNameNlpEntity!.id,
|
||||
});
|
||||
const result = await nlpEntityRepository.findPageAndPopulate(
|
||||
const result = await nlpEntityRepository.findAndPopulate(
|
||||
{ _id: firstNameNlpEntity!.id },
|
||||
pageQuery,
|
||||
);
|
||||
|
||||
@@ -71,18 +71,15 @@ describe('NlpValueRepository', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('findPageAndPopulate', () => {
|
||||
it('should return all nlp entities with populate', async () => {
|
||||
describe('findAndPopulate', () => {
|
||||
it('should return all nlp values with populate', async () => {
|
||||
const pageQuery = getPageQuery<NlpValue>({
|
||||
sort: ['value', 'desc'],
|
||||
sort: ['createdAt', 'asc'],
|
||||
});
|
||||
const result = await nlpValueRepository.findPageAndPopulate(
|
||||
{},
|
||||
pageQuery,
|
||||
);
|
||||
const result = await nlpValueRepository.findAndPopulate({}, pageQuery);
|
||||
const nlpValueFixturesWithEntities = nlpValueFixtures.reduce(
|
||||
(acc, curr) => {
|
||||
const ValueWithEntities = {
|
||||
const fullValue: NlpValueFull = {
|
||||
...curr,
|
||||
entity: nlpEntityFixtures[
|
||||
parseInt(curr.entity!)
|
||||
@@ -90,13 +87,21 @@ describe('NlpValueRepository', () => {
|
||||
builtin: curr.builtin!,
|
||||
expressions: curr.expressions!,
|
||||
metadata: curr.metadata!,
|
||||
id: '',
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
};
|
||||
acc.push(ValueWithEntities);
|
||||
acc.push(fullValue);
|
||||
return acc;
|
||||
},
|
||||
[] as TFixtures<NlpValueFull>[],
|
||||
);
|
||||
expect(result).toEqualPayload(nlpValueFixturesWithEntities);
|
||||
expect(result).toEqualPayload(nlpValueFixturesWithEntities, [
|
||||
'id',
|
||||
'createdAt',
|
||||
'updatedAt',
|
||||
'metadata',
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright © 2024 Hexastack. All rights reserved.
|
||||
* Copyright © 2025 Hexastack. All rights reserved.
|
||||
*
|
||||
* Licensed under the GNU Affero General Public License v3.0 (AGPLv3) with the following additional terms:
|
||||
* 1. The name "Hexabot" is a trademark of Hexastack. You may not use this name in derivative works without express written permission.
|
||||
@@ -58,6 +58,19 @@ export class NlpEntityStub extends BaseSchema {
|
||||
@Prop({ type: Boolean, default: false })
|
||||
builtin: boolean;
|
||||
|
||||
/**
|
||||
* Entity's weight used to determine the next block to trigger in the conversational flow.
|
||||
*/
|
||||
@Prop({
|
||||
type: Number,
|
||||
default: 1,
|
||||
validate: {
|
||||
validator: (value: number) => value > 0,
|
||||
message: 'Weight must be a strictly positive number',
|
||||
},
|
||||
})
|
||||
weight: number;
|
||||
|
||||
/**
|
||||
* Returns a map object for entities
|
||||
* @param entities - Array of entities
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
/*
|
||||
* Copyright © 2024 Hexastack. All rights reserved.
|
||||
* Copyright © 2025 Hexastack. All rights reserved.
|
||||
*
|
||||
* Licensed under the GNU Affero General Public License v3.0 (AGPLv3) with the following additional terms:
|
||||
* 1. The name "Hexabot" is a trademark of Hexastack. You may not use this name in derivative works without express written permission.
|
||||
* 2. All derivative works must include clear attribution to the original creator and software, Hexastack and Hexabot, in a prominent location (e.g., in the software's "About" section, documentation, and README file).
|
||||
*/
|
||||
|
||||
import { NlpEntityStub } from './nlp-entity.schema';
|
||||
import { NlpEntityFull, NlpEntityStub } from './nlp-entity.schema';
|
||||
import { NlpValueStub } from './nlp-value.schema';
|
||||
|
||||
export interface NlpSampleEntityValue {
|
||||
@@ -25,3 +25,5 @@ export enum NlpSampleState {
|
||||
test = 'test',
|
||||
inbox = 'inbox',
|
||||
}
|
||||
|
||||
export type NlpCacheMap = Map<string, NlpEntityFull>;
|
||||
|
||||
@@ -6,8 +6,10 @@
|
||||
* 2. All derivative works must include clear attribution to the original creator and software, Hexastack and Hexabot, in a prominent location (e.g., in the software's "About" section, documentation, and README file).
|
||||
*/
|
||||
|
||||
import { CACHE_MANAGER } from '@nestjs/cache-manager';
|
||||
import { MongooseModule } from '@nestjs/mongoose';
|
||||
|
||||
import { NOT_FOUND_ID } from '@/utils/constants/mock';
|
||||
import { nlpEntityFixtures } from '@/utils/test/fixtures/nlpentity';
|
||||
import { installNlpValueFixtures } from '@/utils/test/fixtures/nlpvalue';
|
||||
import { getPageQuery } from '@/utils/test/pagination';
|
||||
@@ -27,7 +29,7 @@ import { NlpValueModel } from '../schemas/nlp-value.schema';
|
||||
import { NlpEntityService } from './nlp-entity.service';
|
||||
import { NlpValueService } from './nlp-value.service';
|
||||
|
||||
describe('nlpEntityService', () => {
|
||||
describe('NlpEntityService', () => {
|
||||
let nlpEntityService: NlpEntityService;
|
||||
let nlpEntityRepository: NlpEntityRepository;
|
||||
let nlpValueRepository: NlpValueRepository;
|
||||
@@ -48,6 +50,14 @@ describe('nlpEntityService', () => {
|
||||
NlpValueService,
|
||||
NlpValueRepository,
|
||||
NlpSampleEntityRepository,
|
||||
{
|
||||
provide: CACHE_MANAGER,
|
||||
useValue: {
|
||||
del: jest.fn(),
|
||||
set: jest.fn(),
|
||||
get: jest.fn(),
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
[nlpEntityService, nlpEntityRepository, nlpValueRepository] =
|
||||
@@ -77,7 +87,7 @@ describe('nlpEntityService', () => {
|
||||
describe('findOneAndPopulate', () => {
|
||||
it('should return a nlp entity with populate', async () => {
|
||||
const firstNameNlpEntity = await nlpEntityRepository.findOne({
|
||||
name: 'first_name',
|
||||
name: 'firstname',
|
||||
});
|
||||
const result = await nlpEntityService.findOneAndPopulate(
|
||||
firstNameNlpEntity!.id,
|
||||
@@ -98,7 +108,7 @@ describe('nlpEntityService', () => {
|
||||
it('should return all nlp entities with populate', async () => {
|
||||
const pageQuery = getPageQuery<NlpEntity>({ sort: ['name', 'desc'] });
|
||||
const firstNameNlpEntity = await nlpEntityRepository.findOne({
|
||||
name: 'first_name',
|
||||
name: 'firstname',
|
||||
});
|
||||
const result = await nlpEntityService.findPageAndPopulate(
|
||||
{ _id: firstNameNlpEntity!.id },
|
||||
@@ -117,6 +127,61 @@ describe('nlpEntityService', () => {
|
||||
expect(result).toEqualPayload(entitiesWithValues);
|
||||
});
|
||||
});
|
||||
describe('NlpEntityService - updateWeight', () => {
|
||||
let createdEntity: NlpEntity;
|
||||
beforeEach(async () => {
|
||||
createdEntity = await nlpEntityRepository.create({
|
||||
name: 'testentity',
|
||||
builtin: false,
|
||||
weight: 3,
|
||||
});
|
||||
});
|
||||
|
||||
it('should update the weight of an NLP entity', async () => {
|
||||
const newWeight = 8;
|
||||
|
||||
const updatedEntity = await nlpEntityService.updateWeight(
|
||||
createdEntity.id,
|
||||
newWeight,
|
||||
);
|
||||
|
||||
expect(updatedEntity.weight).toBe(newWeight);
|
||||
});
|
||||
|
||||
it('should handle updating weight of non-existent entity', async () => {
|
||||
const nonExistentId = NOT_FOUND_ID;
|
||||
|
||||
try {
|
||||
await nlpEntityService.updateWeight(nonExistentId, 5);
|
||||
fail('Expected error was not thrown');
|
||||
} catch (error) {
|
||||
expect(error).toBeDefined();
|
||||
}
|
||||
});
|
||||
|
||||
it('should use default weight of 1 when creating entity without weight', async () => {
|
||||
const createdEntity = await nlpEntityRepository.create({
|
||||
name: 'entityWithoutWeight',
|
||||
builtin: true,
|
||||
// weight not specified
|
||||
});
|
||||
|
||||
expect(createdEntity.weight).toBe(1);
|
||||
});
|
||||
|
||||
it('should throw an error if weight is negative', async () => {
|
||||
const invalidWeight = -3;
|
||||
|
||||
await expect(
|
||||
nlpEntityService.updateWeight(createdEntity.id, invalidWeight),
|
||||
).rejects.toThrow('Weight must be a strictly positive number');
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
// Clean the collection after each test
|
||||
await nlpEntityRepository.deleteOne(createdEntity.id);
|
||||
});
|
||||
});
|
||||
|
||||
describe('storeNewEntities', () => {
|
||||
it('should store new entities', async () => {
|
||||
@@ -150,4 +215,54 @@ describe('nlpEntityService', () => {
|
||||
expect(result).toEqualPayload(storedEntites);
|
||||
});
|
||||
});
|
||||
describe('getNlpMap', () => {
|
||||
it('should return a NlpCacheMap with the correct structure', async () => {
|
||||
// Act
|
||||
const result = await nlpEntityService.getNlpMap();
|
||||
|
||||
expect(result).toBeInstanceOf(Map);
|
||||
expect(result.get('firstname')).toEqualPayload(
|
||||
{
|
||||
name: 'firstname',
|
||||
lookups: ['keywords'],
|
||||
doc: '',
|
||||
builtin: false,
|
||||
weight: 0.85,
|
||||
values: [
|
||||
{
|
||||
value: 'jhon',
|
||||
expressions: ['john', 'joohn', 'jhonny'],
|
||||
builtin: true,
|
||||
doc: '',
|
||||
},
|
||||
],
|
||||
},
|
||||
['id', 'createdAt', 'updatedAt', 'metadata', 'entity'],
|
||||
);
|
||||
expect(result.get('subject')).toEqualPayload(
|
||||
{
|
||||
name: 'subject',
|
||||
lookups: ['trait'],
|
||||
doc: '',
|
||||
builtin: false,
|
||||
weight: 0.95,
|
||||
values: [
|
||||
{
|
||||
value: 'product',
|
||||
expressions: [],
|
||||
builtin: false,
|
||||
doc: '',
|
||||
},
|
||||
{
|
||||
value: 'claim',
|
||||
expressions: [],
|
||||
builtin: false,
|
||||
doc: '',
|
||||
},
|
||||
],
|
||||
},
|
||||
['id', 'createdAt', 'updatedAt', 'metadata', 'entity'],
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,13 +1,18 @@
|
||||
/*
|
||||
* Copyright © 2024 Hexastack. All rights reserved.
|
||||
* Copyright © 2025 Hexastack. All rights reserved.
|
||||
*
|
||||
* Licensed under the GNU Affero General Public License v3.0 (AGPLv3) with the following additional terms:
|
||||
* 1. The name "Hexabot" is a trademark of Hexastack. You may not use this name in derivative works without express written permission.
|
||||
* 2. All derivative works must include clear attribution to the original creator and software, Hexastack and Hexabot, in a prominent location (e.g., in the software's "About" section, documentation, and README file).
|
||||
*/
|
||||
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { CACHE_MANAGER } from '@nestjs/cache-manager';
|
||||
import { BadRequestException, Inject, Injectable } from '@nestjs/common';
|
||||
import { OnEvent } from '@nestjs/event-emitter';
|
||||
import { Cache } from 'cache-manager';
|
||||
|
||||
import { NLP_MAP_CACHE_KEY } from '@/utils/constants/cache';
|
||||
import { Cacheable } from '@/utils/decorators/cacheable.decorator';
|
||||
import { BaseService } from '@/utils/generics/base-service';
|
||||
|
||||
import { Lookup, NlpEntityDto } from '../dto/nlp-entity.dto';
|
||||
@@ -17,7 +22,7 @@ import {
|
||||
NlpEntityFull,
|
||||
NlpEntityPopulate,
|
||||
} from '../schemas/nlp-entity.schema';
|
||||
import { NlpSampleEntityValue } from '../schemas/types';
|
||||
import { NlpCacheMap, NlpSampleEntityValue } from '../schemas/types';
|
||||
|
||||
import { NlpValueService } from './nlp-value.service';
|
||||
|
||||
@@ -30,6 +35,7 @@ export class NlpEntityService extends BaseService<
|
||||
> {
|
||||
constructor(
|
||||
readonly repository: NlpEntityRepository,
|
||||
@Inject(CACHE_MANAGER) private readonly cacheManager: Cache,
|
||||
private readonly nlpValueService: NlpValueService,
|
||||
) {
|
||||
super(repository);
|
||||
@@ -46,6 +52,26 @@ export class NlpEntityService extends BaseService<
|
||||
return await this.repository.deleteOne(id);
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates the `weight` field of a specific NLP entity by its ID.
|
||||
*
|
||||
* This method is part of the NLP-based blocks prioritization strategy.
|
||||
* The weight influences the scoring of blocks when multiple blocks match a user's input.
|
||||
* @param id - The unique identifier of the entity to update.
|
||||
* @param updatedWeight - The new weight to assign. Must be a positive number.
|
||||
* @throws Error if the weight is not a positive number.
|
||||
* @returns A promise that resolves to the updated entity.
|
||||
*/
|
||||
async updateWeight(id: string, updatedWeight: number): Promise<NlpEntity> {
|
||||
if (updatedWeight <= 0) {
|
||||
throw new BadRequestException(
|
||||
'Weight must be a strictly positive number',
|
||||
);
|
||||
}
|
||||
|
||||
return await this.repository.updateOne(id, { weight: updatedWeight });
|
||||
}
|
||||
|
||||
/**
|
||||
* Stores new entities based on the sample text and sample entities.
|
||||
* Deletes all values relative to this entity before deleting the entity itself.
|
||||
@@ -97,4 +123,56 @@ export class NlpEntityService extends BaseService<
|
||||
);
|
||||
return Promise.all(findOrCreate);
|
||||
}
|
||||
|
||||
/**
|
||||
* Clears the NLP map cache
|
||||
*/
|
||||
async clearCache() {
|
||||
await this.cacheManager.del(NLP_MAP_CACHE_KEY);
|
||||
}
|
||||
|
||||
/**
|
||||
* Event handler for Nlp Entity updates. Listens to 'hook:nlpEntity:*' events
|
||||
* and invalidates the cache for nlp entities when triggered.
|
||||
*/
|
||||
@OnEvent('hook:nlpEntity:*')
|
||||
async handleNlpEntityUpdateEvent() {
|
||||
try {
|
||||
await this.clearCache();
|
||||
} catch (error) {
|
||||
this.logger.error('Failed to clear NLP entity cache', error);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Event handler for Nlp Value updates. Listens to 'hook:nlpValue:*' events
|
||||
* and invalidates the cache for nlp values when triggered.
|
||||
*/
|
||||
@OnEvent('hook:nlpValue:*')
|
||||
async handleNlpValueUpdateEvent() {
|
||||
try {
|
||||
await this.clearCache();
|
||||
} catch (error) {
|
||||
this.logger.error('Failed to clear NLP value cache', error);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves NLP entity lookup information for the given list of entity names.
|
||||
*
|
||||
* This method queries the database for nlp entities,
|
||||
* transforms the result into a map structure where each key is
|
||||
* the entity name and each value contains metadata (id, weight, and list of values),
|
||||
* and caches the result using the configured cache key.
|
||||
*
|
||||
* @returns A Promise that resolves to a map of entity name to its corresponding lookup metadata.
|
||||
*/
|
||||
@Cacheable(NLP_MAP_CACHE_KEY)
|
||||
async getNlpMap(): Promise<NlpCacheMap> {
|
||||
const entities = await this.findAllAndPopulate();
|
||||
return entities.reduce((acc, curr) => {
|
||||
acc.set(curr.name, curr);
|
||||
return acc;
|
||||
}, new Map());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,6 +6,7 @@
|
||||
* 2. All derivative works must include clear attribution to the original creator and software, Hexastack and Hexabot, in a prominent location (e.g., in the software's "About" section, documentation, and README file).
|
||||
*/
|
||||
|
||||
import { CACHE_MANAGER } from '@nestjs/cache-manager';
|
||||
import { MongooseModule } from '@nestjs/mongoose';
|
||||
|
||||
import { LanguageRepository } from '@/i18n/repositories/language.repository';
|
||||
@@ -76,6 +77,12 @@ describe('NlpSampleEntityService', () => {
|
||||
NlpSampleEntityService,
|
||||
NlpEntityService,
|
||||
NlpValueService,
|
||||
{
|
||||
provide: CACHE_MANAGER,
|
||||
useValue: {
|
||||
del: jest.fn(),
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
[
|
||||
|
||||
@@ -217,7 +217,10 @@ describe('NlpSampleService', () => {
|
||||
.mockResolvedValue([{ name: 'intent' } as NlpEntity]);
|
||||
jest
|
||||
.spyOn(languageService, 'getLanguages')
|
||||
.mockResolvedValue({ en: { id: '1' } });
|
||||
.mockResolvedValue({ en: { id: '1' } } as unknown as Record<
|
||||
string,
|
||||
Language
|
||||
>);
|
||||
jest
|
||||
.spyOn(languageService, 'getDefaultLanguage')
|
||||
.mockResolvedValue({ code: 'en' } as Language);
|
||||
@@ -240,7 +243,10 @@ describe('NlpSampleService', () => {
|
||||
.mockResolvedValue([{ name: 'intent' } as NlpEntity]);
|
||||
jest
|
||||
.spyOn(languageService, 'getLanguages')
|
||||
.mockResolvedValue({ en: { id: '1' } });
|
||||
.mockResolvedValue({ en: { id: '1' } } as unknown as Record<
|
||||
string,
|
||||
Language
|
||||
>);
|
||||
jest
|
||||
.spyOn(languageService, 'getDefaultLanguage')
|
||||
.mockResolvedValue({ code: 'en' } as Language);
|
||||
@@ -258,7 +264,10 @@ describe('NlpSampleService', () => {
|
||||
|
||||
it('should successfully process and save valid dataset rows', async () => {
|
||||
const mockData = 'text,intent,language\nHi,greet,en\nBye,bye,en';
|
||||
const mockLanguages = { en: { id: '1' } };
|
||||
const mockLanguages = { en: { id: '1' } } as unknown as Record<
|
||||
string,
|
||||
Language
|
||||
>;
|
||||
|
||||
jest
|
||||
.spyOn(languageService, 'getLanguages')
|
||||
|
||||
@@ -6,6 +6,7 @@
|
||||
* 2. All derivative works must include clear attribution to the original creator and software, Hexastack and Hexabot, in a prominent location (e.g., in the software's "About" section, documentation, and README file).
|
||||
*/
|
||||
|
||||
import { CACHE_MANAGER } from '@nestjs/cache-manager';
|
||||
import { MongooseModule } from '@nestjs/mongoose';
|
||||
|
||||
import { BaseSchema } from '@/utils/generics/base-schema';
|
||||
@@ -58,6 +59,14 @@ describe('NlpValueService', () => {
|
||||
NlpEntityRepository,
|
||||
NlpValueService,
|
||||
NlpEntityService,
|
||||
{
|
||||
provide: CACHE_MANAGER,
|
||||
useValue: {
|
||||
del: jest.fn(),
|
||||
set: jest.fn(),
|
||||
get: jest.fn(),
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
[
|
||||
@@ -89,25 +98,33 @@ describe('NlpValueService', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('findPageAndPopulate', () => {
|
||||
it('should return all nlp entities with populate', async () => {
|
||||
const pageQuery = getPageQuery<NlpValue>({ sort: ['value', 'desc'] });
|
||||
const result = await nlpValueService.findPageAndPopulate({}, pageQuery);
|
||||
describe('findAndPopulate', () => {
|
||||
it('should return all nlp values with populate', async () => {
|
||||
const pageQuery = getPageQuery<NlpValue>({ sort: ['createdAt', 'asc'] });
|
||||
const result = await nlpValueService.findAndPopulate({}, pageQuery);
|
||||
const nlpValueFixturesWithEntities = nlpValueFixtures.reduce(
|
||||
(acc, curr) => {
|
||||
const ValueWithEntities = {
|
||||
const fullValue: NlpValueFull = {
|
||||
...curr,
|
||||
entity: nlpEntityFixtures[parseInt(curr.entity!)] as NlpEntity,
|
||||
expressions: curr.expressions!,
|
||||
metadata: curr.metadata!,
|
||||
builtin: curr.builtin!,
|
||||
metadata: {},
|
||||
id: '',
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
};
|
||||
acc.push(ValueWithEntities);
|
||||
acc.push(fullValue);
|
||||
return acc;
|
||||
},
|
||||
[] as Omit<NlpValueFull, keyof BaseSchema>[],
|
||||
);
|
||||
expect(result).toEqualPayload(nlpValueFixturesWithEntities);
|
||||
expect(result).toEqualPayload(nlpValueFixturesWithEntities, [
|
||||
'id',
|
||||
'createdAt',
|
||||
'updatedAt',
|
||||
'metadata',
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -125,7 +142,7 @@ describe('NlpValueService', () => {
|
||||
'Hello do you see me',
|
||||
[
|
||||
{ entity: 'intent', value: 'greeting' },
|
||||
{ entity: 'first_name', value: 'jhon' },
|
||||
{ entity: 'firstname', value: 'jhon' },
|
||||
],
|
||||
storedEntities,
|
||||
);
|
||||
@@ -133,7 +150,7 @@ describe('NlpValueService', () => {
|
||||
name: 'intent',
|
||||
});
|
||||
const firstNameEntity = await nlpEntityRepository.findOne({
|
||||
name: 'first_name',
|
||||
name: 'firstname',
|
||||
});
|
||||
const greetingValue = await nlpValueRepository.findOne({
|
||||
value: 'greeting',
|
||||
|
||||
134
api/src/nlp/services/nlp.service.spec.ts
Normal file
134
api/src/nlp/services/nlp.service.spec.ts
Normal file
@@ -0,0 +1,134 @@
|
||||
/*
|
||||
* Copyright © 2025 Hexastack. All rights reserved.
|
||||
*
|
||||
* Licensed under the GNU Affero General Public License v3.0 (AGPLv3) with the following additional terms:
|
||||
* 1. The name "Hexabot" is a trademark of Hexastack. You may not use this name in derivative works without express written permission.
|
||||
* 2. All derivative works must include clear attribution to the original creator and software, Hexastack and Hexabot, in a prominent location (e.g., in the software's "About" section, documentation, and README file).
|
||||
*/
|
||||
|
||||
import { CACHE_MANAGER } from '@nestjs/cache-manager';
|
||||
import { MongooseModule } from '@nestjs/mongoose';
|
||||
|
||||
import { HelperService } from '@/helper/helper.service';
|
||||
import { LanguageRepository } from '@/i18n/repositories/language.repository';
|
||||
import { LanguageModel } from '@/i18n/schemas/language.schema';
|
||||
import { LanguageService } from '@/i18n/services/language.service';
|
||||
import { SettingRepository } from '@/setting/repositories/setting.repository';
|
||||
import { SettingModel } from '@/setting/schemas/setting.schema';
|
||||
import { SettingSeeder } from '@/setting/seeds/setting.seed';
|
||||
import { SettingService } from '@/setting/services/setting.service';
|
||||
import { installNlpValueFixtures } from '@/utils/test/fixtures/nlpvalue';
|
||||
import {
|
||||
closeInMongodConnection,
|
||||
rootMongooseTestModule,
|
||||
} from '@/utils/test/test';
|
||||
import { buildTestingMocks } from '@/utils/test/utils';
|
||||
|
||||
import { NlpEntityRepository } from '../repositories/nlp-entity.repository';
|
||||
import { NlpSampleEntityRepository } from '../repositories/nlp-sample-entity.repository';
|
||||
import { NlpSampleRepository } from '../repositories/nlp-sample.repository';
|
||||
import { NlpValueRepository } from '../repositories/nlp-value.repository';
|
||||
import { NlpEntityModel } from '../schemas/nlp-entity.schema';
|
||||
import { NlpSampleEntityModel } from '../schemas/nlp-sample-entity.schema';
|
||||
import { NlpSampleModel } from '../schemas/nlp-sample.schema';
|
||||
import { NlpValueModel } from '../schemas/nlp-value.schema';
|
||||
|
||||
import { NlpEntityService } from './nlp-entity.service';
|
||||
import { NlpSampleEntityService } from './nlp-sample-entity.service';
|
||||
import { NlpSampleService } from './nlp-sample.service';
|
||||
import { NlpValueService } from './nlp-value.service';
|
||||
import { NlpService } from './nlp.service';
|
||||
|
||||
describe('NlpService', () => {
|
||||
let nlpService: NlpService;
|
||||
|
||||
beforeAll(async () => {
|
||||
const { getMocks } = await buildTestingMocks({
|
||||
imports: [
|
||||
rootMongooseTestModule(installNlpValueFixtures),
|
||||
MongooseModule.forFeature([
|
||||
NlpEntityModel,
|
||||
NlpValueModel,
|
||||
NlpSampleEntityModel,
|
||||
NlpSampleModel,
|
||||
LanguageModel,
|
||||
SettingModel,
|
||||
]),
|
||||
],
|
||||
providers: [
|
||||
NlpService,
|
||||
NlpEntityService,
|
||||
NlpEntityRepository,
|
||||
NlpValueService,
|
||||
NlpSampleService,
|
||||
NlpSampleEntityService,
|
||||
HelperService,
|
||||
LanguageService,
|
||||
SettingService,
|
||||
NlpValueRepository,
|
||||
NlpSampleEntityRepository,
|
||||
NlpSampleRepository,
|
||||
SettingRepository,
|
||||
SettingSeeder,
|
||||
LanguageRepository,
|
||||
{
|
||||
provide: CACHE_MANAGER,
|
||||
useValue: {
|
||||
del: jest.fn(),
|
||||
set: jest.fn(),
|
||||
get: jest.fn(),
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
[nlpService] = await getMocks([NlpService]);
|
||||
});
|
||||
|
||||
afterAll(closeInMongodConnection);
|
||||
|
||||
afterEach(jest.clearAllMocks);
|
||||
|
||||
describe('computePredictionScore()', () => {
|
||||
it('should compute score as confidence * weight for matched entities', async () => {
|
||||
const result = await nlpService.computePredictionScore({
|
||||
entities: [
|
||||
{ entity: 'intent', value: 'greeting', confidence: 0.98 },
|
||||
{ entity: 'subject', value: 'product', confidence: 0.9 },
|
||||
{ entity: 'firstname', value: 'Jhon', confidence: 0.78 },
|
||||
{ entity: 'irrelevant', value: 'test', confidence: 1 },
|
||||
],
|
||||
});
|
||||
|
||||
expect(result).toEqual({
|
||||
entities: [
|
||||
{
|
||||
entity: 'intent',
|
||||
value: 'greeting',
|
||||
confidence: 0.98,
|
||||
score: 0.98,
|
||||
},
|
||||
{
|
||||
entity: 'subject',
|
||||
value: 'product',
|
||||
confidence: 0.9,
|
||||
score: 0.855,
|
||||
},
|
||||
{
|
||||
entity: 'firstname',
|
||||
value: 'Jhon',
|
||||
confidence: 0.78,
|
||||
score: 0.663,
|
||||
},
|
||||
],
|
||||
});
|
||||
});
|
||||
|
||||
it('should return empty array if no entity matches', async () => {
|
||||
const result = await nlpService.computePredictionScore({
|
||||
entities: [{ entity: 'unknown', value: 'x', confidence: 1 }],
|
||||
});
|
||||
|
||||
expect(result).toEqual({ entities: [] });
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -10,6 +10,7 @@ import { Injectable, NotFoundException } from '@nestjs/common';
|
||||
import { OnEvent } from '@nestjs/event-emitter';
|
||||
|
||||
import { HelperService } from '@/helper/helper.service';
|
||||
import { HelperType, NLU } from '@/helper/types';
|
||||
import { LoggerService } from '@/logger/logger.service';
|
||||
|
||||
import { NlpEntity, NlpEntityDocument } from '../schemas/nlp-entity.schema';
|
||||
@@ -29,6 +30,36 @@ export class NlpService {
|
||||
protected readonly helperService: HelperService,
|
||||
) {}
|
||||
|
||||
/**
|
||||
* Computes a prediction score for each parsed NLU entity based on its confidence and a predefined weight.
|
||||
*
|
||||
* `score = confidence * weight`
|
||||
*
|
||||
* If a weight is not defined for a given entity, a default of 1 is used.
|
||||
*
|
||||
* @param input - The input object containing parsed entities.
|
||||
* @param input.entities - The list of entities returned from NLU inference.
|
||||
*
|
||||
* @returns A promise that resolves to a list of scored entities.
|
||||
*/
|
||||
async computePredictionScore({
|
||||
entities,
|
||||
}: NLU.ParseEntities): Promise<NLU.ScoredEntities> {
|
||||
const nlpMap = await this.nlpEntityService.getNlpMap();
|
||||
|
||||
const scoredEntities = entities
|
||||
.filter(({ entity }) => nlpMap.has(entity))
|
||||
.map((e) => {
|
||||
const entity = nlpMap.get(e.entity)!;
|
||||
return {
|
||||
...e,
|
||||
score: e.confidence * (entity.weight || 1),
|
||||
};
|
||||
});
|
||||
|
||||
return { entities: scoredEntities };
|
||||
}
|
||||
|
||||
/**
|
||||
* Handles the event triggered when a new NLP entity is created. Synchronizes the entity with the external NLP provider.
|
||||
*
|
||||
@@ -39,7 +70,7 @@ export class NlpService {
|
||||
async handleEntityCreate(entity: NlpEntityDocument) {
|
||||
// Synchonize new entity with NLP
|
||||
try {
|
||||
const helper = await this.helperService.getDefaultNluHelper();
|
||||
const helper = await this.helperService.getDefaultHelper(HelperType.NLU);
|
||||
const foreignId = await helper.addEntity(entity);
|
||||
this.logger.debug('New entity successfully synced!', foreignId);
|
||||
return await this.nlpEntityService.updateOne(
|
||||
|
||||
@@ -18,3 +18,5 @@ export const LANGUAGES_CACHE_KEY = 'languages';
|
||||
export const DEFAULT_LANGUAGE_CACHE_KEY = 'default_language';
|
||||
|
||||
export const ALLOWED_ORIGINS_CACHE_KEY = 'allowed_origins';
|
||||
|
||||
export const NLP_MAP_CACHE_KEY = 'nlp_map';
|
||||
|
||||
14
api/src/utils/test/fixtures/nlpentity.ts
vendored
14
api/src/utils/test/fixtures/nlpentity.ts
vendored
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright © 2024 Hexastack. All rights reserved.
|
||||
* Copyright © 2025 Hexastack. All rights reserved.
|
||||
*
|
||||
* Licensed under the GNU Affero General Public License v3.0 (AGPLv3) with the following additional terms:
|
||||
* 1. The name "Hexabot" is a trademark of Hexastack. You may not use this name in derivative works without express written permission.
|
||||
@@ -17,18 +17,28 @@ export const nlpEntityFixtures: NlpEntityCreateDto[] = [
|
||||
lookups: ['trait'],
|
||||
doc: '',
|
||||
builtin: false,
|
||||
weight: 1,
|
||||
},
|
||||
{
|
||||
name: 'first_name',
|
||||
name: 'firstname',
|
||||
lookups: ['keywords'],
|
||||
doc: '',
|
||||
builtin: false,
|
||||
weight: 0.85,
|
||||
},
|
||||
{
|
||||
name: 'built_in',
|
||||
lookups: ['trait'],
|
||||
doc: '',
|
||||
builtin: true,
|
||||
weight: 1,
|
||||
},
|
||||
{
|
||||
name: 'subject',
|
||||
lookups: ['trait'],
|
||||
doc: '',
|
||||
builtin: false,
|
||||
weight: 0.95,
|
||||
},
|
||||
];
|
||||
|
||||
|
||||
31
api/src/utils/test/fixtures/nlpvalue.ts
vendored
31
api/src/utils/test/fixtures/nlpvalue.ts
vendored
@@ -11,7 +11,7 @@ import mongoose from 'mongoose';
|
||||
import { NlpValueCreateDto } from '@/nlp/dto/nlp-value.dto';
|
||||
import { NlpValueModel } from '@/nlp/schemas/nlp-value.schema';
|
||||
|
||||
import { installNlpEntityFixtures } from './nlpentity';
|
||||
import { installNlpEntityFixtures, nlpEntityFixtures } from './nlpentity';
|
||||
|
||||
export const nlpValueFixtures: NlpValueCreateDto[] = [
|
||||
{
|
||||
@@ -49,16 +49,43 @@ export const nlpValueFixtures: NlpValueCreateDto[] = [
|
||||
builtin: true,
|
||||
doc: '',
|
||||
},
|
||||
{
|
||||
entity: '0',
|
||||
value: 'affirmation',
|
||||
expressions: ['yes', 'oui', 'yeah'],
|
||||
builtin: false,
|
||||
doc: '',
|
||||
},
|
||||
{
|
||||
entity: '3',
|
||||
value: 'product',
|
||||
expressions: [],
|
||||
builtin: false,
|
||||
doc: '',
|
||||
},
|
||||
{
|
||||
entity: '3',
|
||||
value: 'claim',
|
||||
expressions: [],
|
||||
builtin: false,
|
||||
doc: '',
|
||||
},
|
||||
];
|
||||
|
||||
export const installNlpValueFixtures = async () => {
|
||||
const nlpEntities = await installNlpEntityFixtures();
|
||||
|
||||
const NlpValue = mongoose.model(NlpValueModel.name, NlpValueModel.schema);
|
||||
|
||||
const nlpValues = await NlpValue.insertMany(
|
||||
nlpValueFixtures.map((v) => ({
|
||||
...v,
|
||||
entity: v?.entity ? nlpEntities[parseInt(v.entity)].id : null,
|
||||
entity: v?.entity
|
||||
? nlpEntities.find(
|
||||
(e) =>
|
||||
e.name === nlpEntityFixtures[parseInt(v.entity as string)].name,
|
||||
).id
|
||||
: null,
|
||||
})),
|
||||
);
|
||||
return { nlpEntities, nlpValues };
|
||||
|
||||
@@ -16,7 +16,7 @@ import { ButtonType, PayloadType } from '@/chat/schemas/types/button';
|
||||
import { CaptureVar } from '@/chat/schemas/types/capture-var';
|
||||
import { OutgoingMessageFormat } from '@/chat/schemas/types/message';
|
||||
import { BlockOptions, ContentOptions } from '@/chat/schemas/types/options';
|
||||
import { Pattern } from '@/chat/schemas/types/pattern';
|
||||
import { NlpPattern, Pattern } from '@/chat/schemas/types/pattern';
|
||||
import { QuickReplyType } from '@/chat/schemas/types/quick-reply';
|
||||
|
||||
import { modelInstance } from './misc';
|
||||
@@ -230,12 +230,92 @@ export const blockGetStarted = {
|
||||
value: 'Livre',
|
||||
type: PayloadType.attachments,
|
||||
},
|
||||
],
|
||||
trigger_labels: customerLabelsMock,
|
||||
message: ['Welcome! How are you ? '],
|
||||
} as unknown as BlockFull;
|
||||
|
||||
export const mockNlpGreetingPatterns: NlpPattern[] = [
|
||||
{
|
||||
entity: 'intent',
|
||||
match: 'value',
|
||||
value: 'greeting',
|
||||
},
|
||||
];
|
||||
|
||||
export const mockNlpGreetingNamePatterns: NlpPattern[] = [
|
||||
{
|
||||
entity: 'intent',
|
||||
match: 'value',
|
||||
value: 'greeting',
|
||||
},
|
||||
{
|
||||
entity: 'firstname',
|
||||
match: 'value',
|
||||
value: 'jhon',
|
||||
},
|
||||
];
|
||||
|
||||
export const mockNlpGreetingWrongNamePatterns: NlpPattern[] = [
|
||||
{
|
||||
entity: 'intent',
|
||||
match: 'value',
|
||||
value: 'greeting',
|
||||
},
|
||||
{
|
||||
entity: 'firstname',
|
||||
match: 'value',
|
||||
value: 'doe',
|
||||
},
|
||||
];
|
||||
|
||||
export const mockNlpAffirmationPatterns: NlpPattern[] = [
|
||||
{
|
||||
entity: 'intent',
|
||||
match: 'value',
|
||||
value: 'affirmation',
|
||||
},
|
||||
{
|
||||
entity: 'firstname',
|
||||
match: 'value',
|
||||
value: 'mark',
|
||||
},
|
||||
];
|
||||
|
||||
export const mockNlpGreetingAnyNamePatterns: NlpPattern[] = [
|
||||
{
|
||||
entity: 'intent',
|
||||
match: 'value',
|
||||
value: 'greeting',
|
||||
},
|
||||
{
|
||||
entity: 'firstname',
|
||||
match: 'entity',
|
||||
},
|
||||
];
|
||||
|
||||
export const mockModifiedNlpBlock: BlockFull = {
|
||||
...baseBlockInstance,
|
||||
name: 'Modified Mock Nlp',
|
||||
patterns: [
|
||||
'Hello',
|
||||
'/we*lcome/',
|
||||
{ label: 'Modified Mock Nlp', value: 'MODIFIED_MOCK_NLP' },
|
||||
mockNlpGreetingAnyNamePatterns,
|
||||
],
|
||||
trigger_labels: customerLabelsMock,
|
||||
message: ['Hello there'],
|
||||
} as unknown as BlockFull;
|
||||
|
||||
export const mockModifiedNlpBlockOne: BlockFull = {
|
||||
...baseBlockInstance,
|
||||
name: 'Modified Mock Nlp One',
|
||||
patterns: [
|
||||
'Hello',
|
||||
'/we*lcome/',
|
||||
{ label: 'Modified Mock Nlp One', value: 'MODIFIED_MOCK_NLP_ONE' },
|
||||
mockNlpAffirmationPatterns,
|
||||
[
|
||||
{
|
||||
entity: 'intent',
|
||||
match: 'value',
|
||||
value: 'greeting',
|
||||
},
|
||||
{
|
||||
entity: 'firstname',
|
||||
match: 'entity',
|
||||
@@ -243,9 +323,27 @@ export const blockGetStarted = {
|
||||
],
|
||||
],
|
||||
trigger_labels: customerLabelsMock,
|
||||
message: ['Welcome! How are you ? '],
|
||||
message: ['Hello Sir'],
|
||||
} as unknown as BlockFull;
|
||||
|
||||
export const mockModifiedNlpBlockTwo: BlockFull = {
|
||||
...baseBlockInstance,
|
||||
name: 'Modified Mock Nlp Two',
|
||||
patterns: [
|
||||
'Hello',
|
||||
'/we*lcome/',
|
||||
{ label: 'Modified Mock Nlp Two', value: 'MODIFIED_MOCK_NLP_TWO' },
|
||||
[
|
||||
{
|
||||
entity: 'firstname',
|
||||
match: 'entity',
|
||||
},
|
||||
],
|
||||
mockNlpGreetingAnyNamePatterns,
|
||||
],
|
||||
trigger_labels: customerLabelsMock,
|
||||
message: ['Hello Madam'],
|
||||
} as unknown as BlockFull;
|
||||
const patternsProduct: Pattern[] = [
|
||||
'produit',
|
||||
[
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright © 2024 Hexastack. All rights reserved.
|
||||
* Copyright © 2025 Hexastack. All rights reserved.
|
||||
*
|
||||
* Licensed under the GNU Affero General Public License v3.0 (AGPLv3) with the following additional terms:
|
||||
* 1. The name "Hexabot" is a trademark of Hexastack. You may not use this name in derivative works without express written permission.
|
||||
@@ -8,7 +8,24 @@
|
||||
|
||||
import { NLU } from '@/helper/types';
|
||||
|
||||
export const nlpEntitiesGreeting: NLU.ParseEntities = {
|
||||
export const mockNlpGreetingNameEntities: NLU.ScoredEntities = {
|
||||
entities: [
|
||||
{
|
||||
entity: 'intent',
|
||||
value: 'greeting',
|
||||
confidence: 0.999,
|
||||
score: 0.999,
|
||||
},
|
||||
{
|
||||
entity: 'firstname',
|
||||
value: 'jhon',
|
||||
confidence: 0.5,
|
||||
score: 0.425,
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
export const mockNlpGreetingFullNameEntities: NLU.ParseEntities = {
|
||||
entities: [
|
||||
{
|
||||
entity: 'intent',
|
||||
|
||||
@@ -121,7 +121,9 @@
|
||||
"file_error": "File not found",
|
||||
"audio_error": "Audio not found",
|
||||
"video_error": "Video not found",
|
||||
"missing_fields_error": "Please make sure that all required fields are filled"
|
||||
"missing_fields_error": "Please make sure that all required fields are filled",
|
||||
"weight_required_error": "Weight is required or invalid",
|
||||
"weight_positive_number_error": "Weight must be a strictly positive number"
|
||||
},
|
||||
"menu": {
|
||||
"terms": "Terms of Use",
|
||||
@@ -348,6 +350,7 @@
|
||||
"nlp_lookup_trait": "Trait",
|
||||
"doc": "Documentation",
|
||||
"builtin": "Built-in?",
|
||||
"weight": "Weight",
|
||||
"dataset": "Dataset",
|
||||
"yes": "Yes",
|
||||
"no": "No",
|
||||
|
||||
@@ -121,7 +121,9 @@
|
||||
"file_error": "Fichier introuvable",
|
||||
"audio_error": "Audio introuvable",
|
||||
"video_error": "Vidéo introuvable",
|
||||
"missing_fields_error": "Veuillez vous assurer que tous les champs sont remplis correctement"
|
||||
"missing_fields_error": "Veuillez vous assurer que tous les champs sont remplis correctement",
|
||||
"weight_positive_number_error": "Le poids doit être un nombre strictement positif",
|
||||
"weight_required_error": "Le poids est requis ou bien invalide"
|
||||
},
|
||||
"menu": {
|
||||
"terms": "Conditions d'utilisation",
|
||||
@@ -347,6 +349,7 @@
|
||||
"nlp_lookup_trait": "Trait",
|
||||
"synonyms": "Synonymes",
|
||||
"doc": "Documentation",
|
||||
"weight": "Poids",
|
||||
"builtin": "Intégré?",
|
||||
"dataset": "Données",
|
||||
"yes": "Oui",
|
||||
|
||||
@@ -156,8 +156,7 @@ function StackComponent<T extends GridValidRowModel>({
|
||||
disabled={
|
||||
(isDisabled && isDisabled(params.row)) ||
|
||||
(params.row.builtin &&
|
||||
(requires.includes(PermissionAction.UPDATE) ||
|
||||
requires.includes(PermissionAction.DELETE)))
|
||||
requires.includes(PermissionAction.DELETE))
|
||||
}
|
||||
onClick={() => {
|
||||
action && action(params.row);
|
||||
|
||||
@@ -167,6 +167,16 @@ const NlpEntity = () => {
|
||||
resizable: false,
|
||||
renderHeader,
|
||||
},
|
||||
{
|
||||
maxWidth: 210,
|
||||
field: "weight",
|
||||
headerName: t("label.weight"),
|
||||
renderCell: (val) => <Chip label={val.value} variant="title" />,
|
||||
sortable: true,
|
||||
disableColumnMenu: true,
|
||||
resizable: false,
|
||||
renderHeader,
|
||||
},
|
||||
{
|
||||
maxWidth: 90,
|
||||
field: "builtin",
|
||||
|
||||
@@ -60,6 +60,7 @@ export const NlpEntityVarForm: FC<ComponentFormProps<INlpEntity>> = ({
|
||||
name: nlpEntity?.name || "",
|
||||
doc: nlpEntity?.doc || "",
|
||||
lookups: nlpEntity?.lookups || ["keywords"],
|
||||
weight: nlpEntity?.weight || 1,
|
||||
},
|
||||
});
|
||||
const validationRules = {
|
||||
@@ -82,6 +83,7 @@ export const NlpEntityVarForm: FC<ComponentFormProps<INlpEntity>> = ({
|
||||
reset({
|
||||
name: nlpEntity.name,
|
||||
doc: nlpEntity.doc,
|
||||
weight: nlpEntity.weight,
|
||||
});
|
||||
} else {
|
||||
reset();
|
||||
@@ -121,6 +123,7 @@ export const NlpEntityVarForm: FC<ComponentFormProps<INlpEntity>> = ({
|
||||
required
|
||||
autoFocus
|
||||
helperText={errors.name ? errors.name.message : null}
|
||||
disabled={nlpEntity?.builtin}
|
||||
/>
|
||||
</ContentItem>
|
||||
<ContentItem>
|
||||
@@ -128,8 +131,34 @@ export const NlpEntityVarForm: FC<ComponentFormProps<INlpEntity>> = ({
|
||||
label={t("label.doc")}
|
||||
{...register("doc")}
|
||||
multiline={true}
|
||||
disabled={nlpEntity?.builtin}
|
||||
/>
|
||||
</ContentItem>
|
||||
<ContentItem>
|
||||
<Input
|
||||
label={t("label.weight")}
|
||||
{...register("weight", {
|
||||
valueAsNumber: true,
|
||||
required: t("message.weight_required_error"),
|
||||
min: {
|
||||
value: 0.01,
|
||||
message: t("message.weight_positive_number_error"),
|
||||
},
|
||||
validate: (value) =>
|
||||
value && value > 0
|
||||
? true
|
||||
: t("message.weight_positive_number_error"),
|
||||
})}
|
||||
type="number"
|
||||
inputProps={{
|
||||
min: 0,
|
||||
step: 0.01,
|
||||
inputMode: "numeric",
|
||||
}}
|
||||
error={!!errors.weight}
|
||||
helperText={errors.weight?.message}
|
||||
/>
|
||||
</ContentItem>
|
||||
</ContentContainer>
|
||||
</form>
|
||||
</Wrapper>
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright © 2024 Hexastack. All rights reserved.
|
||||
* Copyright © 2025 Hexastack. All rights reserved.
|
||||
*
|
||||
* Licensed under the GNU Affero General Public License v3.0 (AGPLv3) with the following additional terms:
|
||||
* 1. The name "Hexabot" is a trademark of Hexastack. You may not use this name in derivative works without express written permission.
|
||||
@@ -19,6 +19,7 @@ export interface INlpEntityAttributes {
|
||||
lookups: Lookup[];
|
||||
doc?: string;
|
||||
builtin?: boolean;
|
||||
weight?: number;
|
||||
}
|
||||
|
||||
export enum NlpLookups {
|
||||
|
||||
Reference in New Issue
Block a user