feat: refactor helpers (nlu)

This commit is contained in:
Mohamed Marrouchi
2024-10-21 15:09:59 +01:00
parent b2c32fe27d
commit b7eef89981
53 changed files with 901 additions and 731 deletions

View File

@@ -16,7 +16,6 @@ import { SubscriberService } from '@/chat/services/subscriber.service';
import { MenuService } from '@/cms/services/menu.service';
import { I18nService } from '@/i18n/services/i18n.service';
import { LoggerService } from '@/logger/logger.service';
import { NlpService } from '@/nlp/services/nlp.service';
import { SettingService } from '@/setting/services/setting.service';
import { WebsocketGateway } from '@/websocket/websocket.gateway';
@@ -34,7 +33,6 @@ export default class LiveChatTesterHandler extends BaseWebChannelHandler<
constructor(
settingService: SettingService,
channelService: ChannelService,
nlpService: NlpService,
logger: LoggerService,
eventEmitter: EventEmitter2,
i18n: I18nService,
@@ -49,7 +47,6 @@ export default class LiveChatTesterHandler extends BaseWebChannelHandler<
DEFAULT_LIVE_CHAT_TEST_SETTINGS,
settingService,
channelService,
nlpService,
logger,
eventEmitter,
i18n,

View File

@@ -36,7 +36,6 @@ import { MenuModel } from '@/cms/schemas/menu.schema';
import { MenuService } from '@/cms/services/menu.service';
import { I18nService } from '@/i18n/services/i18n.service';
import { LoggerService } from '@/logger/logger.service';
import { NlpService } from '@/nlp/services/nlp.service';
import { SettingService } from '@/setting/services/setting.service';
import { UserModel } from '@/user/schemas/user.schema';
import { installMessageFixtures } from '@/utils/test/fixtures/message';
@@ -92,12 +91,6 @@ describe('Offline Handler', () => {
})),
},
},
{
provide: NlpService,
useValue: {
getNLP: jest.fn(() => undefined),
},
},
ChannelService,
WebsocketGateway,
SocketEventDispatcherService,

View File

@@ -53,7 +53,6 @@ import { MenuService } from '@/cms/services/menu.service';
import { config } from '@/config';
import { I18nService } from '@/i18n/services/i18n.service';
import { LoggerService } from '@/logger/logger.service';
import { NlpService } from '@/nlp/services/nlp.service';
import { SettingService } from '@/setting/services/setting.service';
import { SocketRequest } from '@/websocket/utils/socket-request';
import { SocketResponse } from '@/websocket/utils/socket-response';
@@ -72,7 +71,6 @@ export default class BaseWebChannelHandler<
settings: ChannelSetting<N>[],
settingService: SettingService,
channelService: ChannelService,
nlpService: NlpService,
logger: LoggerService,
protected readonly eventEmitter: EventEmitter2,
protected readonly i18n: I18nService,
@@ -82,7 +80,7 @@ export default class BaseWebChannelHandler<
protected readonly menuService: MenuService,
private readonly websocketGateway: WebsocketGateway,
) {
super(name, settings, settingService, channelService, nlpService, logger);
super(name, settings, settingService, channelService, logger);
}
/**

View File

@@ -16,7 +16,6 @@ import { SubscriberService } from '@/chat/services/subscriber.service';
import { MenuService } from '@/cms/services/menu.service';
import { I18nService } from '@/i18n/services/i18n.service';
import { LoggerService } from '@/logger/logger.service';
import { NlpService } from '@/nlp/services/nlp.service';
import { SettingService } from '@/setting/services/setting.service';
import { WebsocketGateway } from '@/websocket/websocket.gateway';
@@ -30,7 +29,6 @@ export default class OfflineHandler extends BaseWebChannelHandler<
constructor(
settingService: SettingService,
channelService: ChannelService,
nlpService: NlpService,
logger: LoggerService,
eventEmitter: EventEmitter2,
i18n: I18nService,
@@ -45,7 +43,6 @@ export default class OfflineHandler extends BaseWebChannelHandler<
DEFAULT_OFFLINE_SETTINGS,
settingService,
channelService,
nlpService,
logger,
eventEmitter,
i18n,

View File

@@ -6,11 +6,11 @@
* 2. All derivative works must include clear attribution to the original creator and software, Hexastack and Hexabot, in a prominent location (e.g., in the software's "About" section, documentation, and README file).
*/
import { Nlp } from '@/nlp/lib/types';
import { Nlp } from '@/helper/types';
import { DatasetType, NlpParseResultType } from '../types';
import { NlpParseResultType, RasaNlu } from '../types';
export const nlpEmptyFormated: DatasetType = {
export const nlpEmptyFormated: RasaNlu.Dataset = {
common_examples: [],
regex_features: [],
lookup_tables: [
@@ -35,7 +35,7 @@ export const nlpEmptyFormated: DatasetType = {
],
};
export const nlpFormatted: DatasetType = {
export const nlpFormatted: RasaNlu.Dataset = {
common_examples: [
{
text: 'Hello',

View File

@@ -12,31 +12,19 @@ import { EventEmitter2 } from '@nestjs/event-emitter';
import { MongooseModule } from '@nestjs/mongoose';
import { Test, TestingModule } from '@nestjs/testing';
import { HelperService } from '@/helper/helper.service';
import { LanguageRepository } from '@/i18n/repositories/language.repository';
import { LanguageModel } from '@/i18n/schemas/language.schema';
import { LanguageService } from '@/i18n/services/language.service';
import { LoggerService } from '@/logger/logger.service';
import { NlpEntityRepository } from '@/nlp/repositories/nlp-entity.repository';
import { NlpSampleEntityRepository } from '@/nlp/repositories/nlp-sample-entity.repository';
import { NlpSampleRepository } from '@/nlp/repositories/nlp-sample.repository';
import { NlpValueRepository } from '@/nlp/repositories/nlp-value.repository';
import { NlpEntityModel } from '@/nlp/schemas/nlp-entity.schema';
import { NlpSampleEntityModel } from '@/nlp/schemas/nlp-sample-entity.schema';
import { NlpSampleModel } from '@/nlp/schemas/nlp-sample.schema';
import { NlpValueModel } from '@/nlp/schemas/nlp-value.schema';
import { NlpEntityService } from '@/nlp/services/nlp-entity.service';
import { NlpSampleEntityService } from '@/nlp/services/nlp-sample-entity.service';
import { NlpSampleService } from '@/nlp/services/nlp-sample.service';
import { NlpValueService } from '@/nlp/services/nlp-value.service';
import { NlpService } from '@/nlp/services/nlp.service';
import { SettingService } from '@/setting/services/setting.service';
import { installNlpSampleEntityFixtures } from '@/utils/test/fixtures/nlpsampleentity';
import { installLanguageFixtures } from '@/utils/test/fixtures/language';
import {
closeInMongodConnection,
rootMongooseTestModule,
} from '@/utils/test/test';
import DefaultNlpHelper from '../index.nlp.helper';
import CoreNluHelper from '../index.helper';
import { entitiesMock, samplesMock } from './__mock__/base.mock';
import {
@@ -46,45 +34,31 @@ import {
nlpParseResult,
} from './index.mock';
describe('NLP Default Helper', () => {
describe('Core NLU Helper', () => {
let settingService: SettingService;
let nlpService: NlpService;
let defaultNlpHelper: DefaultNlpHelper;
let defaultNlpHelper: CoreNluHelper;
beforeAll(async () => {
const module: TestingModule = await Test.createTestingModule({
imports: [
rootMongooseTestModule(installNlpSampleEntityFixtures),
MongooseModule.forFeature([
NlpEntityModel,
NlpValueModel,
NlpSampleModel,
NlpSampleEntityModel,
LanguageModel,
]),
rootMongooseTestModule(async () => {
await installLanguageFixtures();
}),
MongooseModule.forFeature([LanguageModel]),
HttpModule,
],
providers: [
NlpService,
NlpSampleService,
NlpSampleRepository,
NlpEntityService,
NlpEntityRepository,
NlpValueService,
NlpValueRepository,
NlpSampleEntityService,
NlpSampleEntityRepository,
LanguageService,
LanguageRepository,
EventEmitter2,
DefaultNlpHelper,
HelperService,
CoreNluHelper,
LoggerService,
{
provide: SettingService,
useValue: {
getSettings: jest.fn(() => ({
nlp_settings: {
provider: 'default',
core_nlu: {
endpoint: 'path',
token: 'token',
threshold: '0.5',
@@ -103,56 +77,51 @@ describe('NLP Default Helper', () => {
],
}).compile();
settingService = module.get<SettingService>(SettingService);
nlpService = module.get<NlpService>(NlpService);
defaultNlpHelper = module.get<DefaultNlpHelper>(DefaultNlpHelper);
nlpService.setHelper('default', defaultNlpHelper);
nlpService.initNLP();
defaultNlpHelper = module.get<CoreNluHelper>(CoreNluHelper);
});
afterAll(closeInMongodConnection);
it('should init() properly', () => {
const nlp = nlpService.getNLP();
expect(nlp).toBeDefined();
});
it('should format empty training set properly', async () => {
const nlp = nlpService.getNLP();
const results = await nlp.format([], entitiesMock);
const results = await defaultNlpHelper.format([], entitiesMock);
expect(results).toEqual(nlpEmptyFormated);
});
it('should format training set properly', async () => {
const nlp = nlpService.getNLP();
const results = await nlp.format(samplesMock, entitiesMock);
const results = await defaultNlpHelper.format(samplesMock, entitiesMock);
expect(results).toEqual(nlpFormatted);
});
it('should return best guess from empty parse results', () => {
const nlp = nlpService.getNLP();
const results = nlp.bestGuess(
it('should return best guess from empty parse results', async () => {
const results = await defaultNlpHelper.filterEntitiesByConfidence(
{
entities: [],
intent: {},
intent: { name: 'greeting', confidence: 0 },
intent_ranking: [],
text: 'test',
},
false,
);
expect(results).toEqual({ entities: [] });
expect(results).toEqual({
entities: [{ entity: 'intent', value: 'greeting', confidence: 0 }],
});
});
it('should return best guess from parse results', () => {
const nlp = nlpService.getNLP();
const results = nlp.bestGuess(nlpParseResult, false);
it('should return best guess from parse results', async () => {
const results = await defaultNlpHelper.filterEntitiesByConfidence(
nlpParseResult,
false,
);
expect(results).toEqual(nlpBestGuess);
});
it('should return best guess from parse results with threshold', async () => {
const nlp = nlpService.getNLP();
const results = nlp.bestGuess(nlpParseResult, true);
const results = await defaultNlpHelper.filterEntitiesByConfidence(
nlpParseResult,
true,
);
const settings = await settingService.getSettings();
const threshold = settings.nlp_settings.threshold;
const threshold = settings.core_nlu.threshold;
const thresholdGuess = {
entities: nlpBestGuess.entities.filter(
(g) =>

View File

@@ -0,0 +1,5 @@
{
"endpoint": "Enter the endpoint URL for the Core NLU API where requests will be sent.",
"token": "Provide the API token for authenticating requests to the Core NLU API.",
"threshold": "Set the minimum confidence score for predictions to be considered valid."
}

View File

@@ -0,0 +1,5 @@
{
"endpoint": "Core NLU API",
"token": "API Token",
"threshold": "Confidence Threshold"
}

View File

@@ -0,0 +1,3 @@
{
"core_nlu": "Core NLU Engine"
}

View File

@@ -0,0 +1,5 @@
{
"endpoint": "Entrez l'URL de point de terminaison pour l'API NLU Core où les requêtes seront envoyées.",
"token": "Fournissez le jeton d'API pour authentifier les requêtes à l'API NLU Core.",
"threshold": "Définissez le score de confiance minimum pour que les prédictions soient considérées comme valides."
}

View File

@@ -0,0 +1,5 @@
{
"endpoint": "API NLU Core",
"token": "Jeton d'API",
"threshold": "Seuil de Confiance"
}

View File

@@ -0,0 +1,3 @@
{
"core_nlu": "Core NLU Engine"
}

View File

@@ -0,0 +1,14 @@
import { CORE_NLU_HELPER_GROUP, CORE_NLU_HELPER_SETTINGS } from './settings';
declare global {
interface Settings extends SettingTree<typeof CORE_NLU_HELPER_SETTINGS> {}
}
declare module '@nestjs/event-emitter' {
interface IHookExtensionsOperationMap {
[CORE_NLU_HELPER_GROUP]: TDefinition<
object,
SettingMapByType<typeof CORE_NLU_HELPER_SETTINGS>
>;
}
}

View File

@@ -0,0 +1,283 @@
/*
* Copyright © 2024 Hexastack. All rights reserved.
*
* Licensed under the GNU Affero General Public License v3.0 (AGPLv3) with the following additional terms:
* 1. The name "Hexabot" is a trademark of Hexastack. You may not use this name in derivative works without express written permission.
* 2. All derivative works must include clear attribution to the original creator and software, Hexastack and Hexabot, in a prominent location (e.g., in the software's "About" section, documentation, and README file).
*/
import { HttpService } from '@nestjs/axios';
import { Injectable } from '@nestjs/common';
import { HelperService } from '@/helper/helper.service';
import BaseNlpHelper from '@/helper/lib/base-nlp-helper';
import { Nlp } from '@/helper/types';
import { LanguageService } from '@/i18n/services/language.service';
import { LoggerService } from '@/logger/logger.service';
import { NlpEntity, NlpEntityFull } from '@/nlp/schemas/nlp-entity.schema';
import { NlpSampleFull } from '@/nlp/schemas/nlp-sample.schema';
import { NlpValue } from '@/nlp/schemas/nlp-value.schema';
import { SettingService } from '@/setting/services/setting.service';
import { buildURL } from '@/utils/helpers/URL';
import { CORE_NLU_HELPER_NAME, CORE_NLU_HELPER_SETTINGS } from './settings';
import { NlpParseResultType, RasaNlu } from './types';
@Injectable()
export default class CoreNluHelper extends BaseNlpHelper<
typeof CORE_NLU_HELPER_NAME
> {
constructor(
settingService: SettingService,
helperService: HelperService,
logger: LoggerService,
private readonly httpService: HttpService,
private readonly languageService: LanguageService,
) {
super(
CORE_NLU_HELPER_NAME,
CORE_NLU_HELPER_SETTINGS,
settingService,
helperService,
logger,
);
}
/**
* Formats a set of NLP samples into the Rasa NLU-compatible training dataset format.
*
* @param samples - The NLP samples to format.
* @param entities - The NLP entities available in the dataset.
*
* @returns The formatted Rasa NLU training dataset.
*/
async format(
samples: NlpSampleFull[],
entities: NlpEntityFull[],
): Promise<RasaNlu.Dataset> {
const entityMap = NlpEntity.getEntityMap(entities);
const valueMap = NlpValue.getValueMap(
NlpValue.getValuesFromEntities(entities),
);
const common_examples: RasaNlu.CommonExample[] = samples
.filter((s) => s.entities.length > 0)
.map((s) => {
const intent = s.entities.find(
(e) => entityMap[e.entity].name === 'intent',
);
if (!intent) {
throw new Error('Unable to find the `intent` nlp entity.');
}
const sampleEntities: RasaNlu.ExampleEntity[] = s.entities
.filter((e) => entityMap[<string>e.entity].name !== 'intent')
.map((e) => {
const res: RasaNlu.ExampleEntity = {
entity: entityMap[<string>e.entity].name,
value: valueMap[<string>e.value].value,
};
if ('start' in e && 'end' in e) {
Object.assign(res, {
start: e.start,
end: e.end,
});
}
return res;
})
// TODO : place language at the same level as the intent
.concat({
entity: 'language',
value: s.language.code,
});
return {
text: s.text,
intent: valueMap[intent.value].value,
entities: sampleEntities,
};
});
const languages = await this.languageService.getLanguages();
const lookup_tables: RasaNlu.LookupTable[] = entities
.map((e) => {
return {
name: e.name,
elements: e.values.map((v) => {
return v.value;
}),
};
})
.concat({
name: 'language',
elements: Object.keys(languages),
});
const entity_synonyms = entities
.reduce((acc, e) => {
const synonyms = e.values.map((v) => {
return {
value: v.value,
synonyms: v.expressions,
};
});
return acc.concat(synonyms);
}, [] as RasaNlu.EntitySynonym[])
.filter((s) => {
return s.synonyms.length > 0;
});
return {
common_examples,
regex_features: [],
lookup_tables,
entity_synonyms,
};
}
/**
* Perform a training request
*
* @param samples - Samples to train
* @param entities - All available entities
* @returns The training result
*/
async train(
samples: NlpSampleFull[],
entities: NlpEntityFull[],
): Promise<any> {
const nluData: RasaNlu.Dataset = await this.format(samples, entities);
const settings = await this.getSettings();
// Train samples
return await this.httpService.axiosRef.post(
buildURL(settings.endpoint, `/train`),
nluData,
{
params: {
token: settings.token,
},
},
);
}
/**
* Perform evaluation request
*
* @param samples - Samples to evaluate
* @param entities - All available entities
* @returns Evaluation results
*/
async evaluate(
samples: NlpSampleFull[],
entities: NlpEntityFull[],
): Promise<any> {
const settings = await this.getSettings();
const nluTestData: RasaNlu.Dataset = await this.format(samples, entities);
// Evaluate model with test samples
return await this.httpService.axiosRef.post(
buildURL(settings.endpoint, `/evaluate`),
nluTestData,
{
params: {
token: settings.token,
},
},
);
}
/**
* Returns only the entities that have strong confidence (> than the threshold), can return an empty result
*
* @param nlp - The nlp returned result
* @param threshold - Whenever to apply threshold filter or not
*
* @returns The parsed entities
*/
async filterEntitiesByConfidence(
nlp: NlpParseResultType,
threshold: boolean,
): Promise<Nlp.ParseEntities> {
try {
let minConfidence = 0;
const guess: Nlp.ParseEntities = {
entities: nlp.entities.slice(),
};
if (threshold) {
const settings = await this.getSettings();
const threshold = settings.threshold;
minConfidence =
typeof threshold === 'string'
? Number.parseFloat(threshold)
: threshold;
guess.entities = guess.entities
.map((e) => {
e.confidence =
typeof e.confidence === 'string'
? Number.parseFloat(e.confidence)
: e.confidence;
return e;
})
.filter((e) => e.confidence >= minConfidence);
// Get past threshold and the highest confidence for the same entity
// .filter((e, idx, self) => {
// const sameEntities = self.filter((s) => s.entity === e.entity);
// const max = Math.max.apply(Math, sameEntities.map((e) => { return e.confidence; }));
// return e.confidence === max;
// });
}
['intent', 'language'].forEach((trait) => {
if (trait in nlp && (nlp as any)[trait].confidence >= minConfidence) {
guess.entities.push({
entity: trait,
value: (nlp as any)[trait].name,
confidence: (nlp as any)[trait].confidence,
});
}
});
return guess;
} catch (e) {
this.logger.error(
'Core NLU Helper : Unable to parse nlp result to extract best guess!',
e,
);
return {
entities: [],
};
}
}
/**
* Returns only the entities that have strong confidence (> than the threshold), can return an empty result
*
* @param text - The text to parse
* @param threshold - Whenever to apply threshold filter or not
* @param project - Whenever to request a specific model
*
* @returns The prediction
*/
async predict(
text: string,
threshold: boolean,
project: string = 'current',
): Promise<Nlp.ParseEntities> {
try {
const settings = await this.getSettings();
const { data: nlp } =
await this.httpService.axiosRef.post<NlpParseResultType>(
buildURL(settings.endpoint, '/parse'),
{
q: text,
project,
},
{
params: {
token: settings.token,
},
},
);
return this.filterEntitiesByConfidence(nlp, threshold);
} catch (err) {
this.logger.error('Core NLU Helper : Unable to parse nlp', err);
throw err;
}
}
}

View File

@@ -0,0 +1,8 @@
{
"name": "hexabot-core-nlu",
"version": "2.0.0",
"description": "The Core NLU Helper Extension for Hexabot Chatbot / Agent Builder to enable the Intent Classification and Language Detection",
"dependencies": {},
"author": "Hexastack",
"license": "AGPL-3.0-only"
}

View File

@@ -0,0 +1,32 @@
import { HelperSetting } from '@/helper/types';
import { SettingType } from '@/setting/schemas/types';
export const CORE_NLU_HELPER_NAME = 'core-nlu';
export const CORE_NLU_HELPER_GROUP = 'core_nlu';
export const CORE_NLU_HELPER_SETTINGS = [
{
group: CORE_NLU_HELPER_GROUP,
label: 'endpoint',
value: 'http://nlu-api:5000/',
type: SettingType.text,
},
{
group: CORE_NLU_HELPER_GROUP,
label: 'token',
value: 'token123',
type: SettingType.text,
},
{
group: CORE_NLU_HELPER_GROUP,
label: 'threshold',
value: 0.1,
type: SettingType.number,
config: {
min: 0,
max: 1,
step: 0.01,
},
},
] as const satisfies HelperSetting<typeof CORE_NLU_HELPER_NAME>[];

View File

@@ -6,34 +6,36 @@
* 2. All derivative works must include clear attribution to the original creator and software, Hexastack and Hexabot, in a prominent location (e.g., in the software's "About" section, documentation, and README file).
*/
export interface ExampleEntity {
entity: string;
value: string;
start?: number;
end?: number;
}
export namespace RasaNlu {
export interface ExampleEntity {
entity: string;
value: string;
start?: number;
end?: number;
}
export interface CommonExample {
text: string;
intent: string;
entities: ExampleEntity[];
}
export interface CommonExample {
text: string;
intent: string;
entities: ExampleEntity[];
}
export interface LookupTable {
name: string;
elements: string[];
}
export interface LookupTable {
name: string;
elements: string[];
}
export interface EntitySynonym {
value: string;
synonyms: string[];
}
export interface EntitySynonym {
value: string;
synonyms: string[];
}
export interface DatasetType {
common_examples: CommonExample[];
regex_features: any[];
lookup_tables: LookupTable[];
entity_synonyms: EntitySynonym[];
export interface Dataset {
common_examples: CommonExample[];
regex_features: any[];
lookup_tables: LookupTable[];
entity_synonyms: EntitySynonym[];
}
}
export interface ParseEntity {

View File

@@ -1,215 +0,0 @@
/*
* Copyright © 2024 Hexastack. All rights reserved.
*
* Licensed under the GNU Affero General Public License v3.0 (AGPLv3) with the following additional terms:
* 1. The name "Hexabot" is a trademark of Hexastack. You may not use this name in derivative works without express written permission.
* 2. All derivative works must include clear attribution to the original creator and software, Hexastack and Hexabot, in a prominent location (e.g., in the software's "About" section, documentation, and README file).
*/
import { HttpService } from '@nestjs/axios';
import { Injectable } from '@nestjs/common';
import { LoggerService } from '@/logger/logger.service';
import BaseNlpHelper from '@/nlp/lib/BaseNlpHelper';
import { Nlp } from '@/nlp/lib/types';
import { NlpEntityFull } from '@/nlp/schemas/nlp-entity.schema';
import { NlpSampleFull } from '@/nlp/schemas/nlp-sample.schema';
import { NlpEntityService } from '@/nlp/services/nlp-entity.service';
import { NlpSampleService } from '@/nlp/services/nlp-sample.service';
import { NlpService } from '@/nlp/services/nlp.service';
import { buildURL } from '@/utils/helpers/URL';
import { DatasetType, NlpParseResultType } from './types';
@Injectable()
export default class DefaultNlpHelper extends BaseNlpHelper {
/**
* Instantiate a nlp helper
*
* @param settings - NLP settings
*/
constructor(
logger: LoggerService,
nlpService: NlpService,
nlpSampleService: NlpSampleService,
nlpEntityService: NlpEntityService,
protected readonly httpService: HttpService,
) {
super(logger, nlpService, nlpSampleService, nlpEntityService);
}
onModuleInit() {
this.nlpService.setHelper(this.getName(), this);
}
getName() {
return 'default';
}
/**
* Return training dataset in compatible format
*
* @param samples - Sample to train
* @param entities - All available entities
* @returns {DatasetType} - The formatted RASA training set
*/
async format(
samples: NlpSampleFull[],
entities: NlpEntityFull[],
): Promise<DatasetType> {
const nluData = await this.nlpSampleService.formatRasaNlu(
samples,
entities,
);
return nluData;
}
/**
* Perform Rasa training request
*
* @param samples - Samples to train
* @param entities - All available entities
* @returns {Promise<any>} - Rasa training result
*/
async train(
samples: NlpSampleFull[],
entities: NlpEntityFull[],
): Promise<any> {
const self = this;
const nluData: DatasetType = await self.format(samples, entities);
// Train samples
const result = await this.httpService.axiosRef.post(
buildURL(this.settings.endpoint, `/train`),
nluData,
{
params: {
token: this.settings.token,
},
},
);
// Mark samples as trained
await this.nlpSampleService.updateMany(
{ type: 'train' },
{ trained: true },
);
return result;
}
/**
* Perform evaluation request
*
* @param samples - Samples to evaluate
* @param entities - All available entities
* @returns {Promise<any>} - Evaluation results
*/
async evaluate(
samples: NlpSampleFull[],
entities: NlpEntityFull[],
): Promise<any> {
const self = this;
const nluTestData: DatasetType = await self.format(samples, entities);
// Evaluate model with test samples
return await this.httpService.axiosRef.post(
buildURL(this.settings.endpoint, `/evaluate`),
nluTestData,
{
params: {
token: this.settings.token,
},
},
);
}
/**
* Returns only the entities that have strong confidence (> than the threshold), can return an empty result
*
* @param nlp - The nlp returned result
* @param threshold - Whenever to apply threshold filter or not
* @returns {Nlp.ParseEntities}
*/
bestGuess(nlp: NlpParseResultType, threshold: boolean): Nlp.ParseEntities {
try {
let minConfidence = 0;
const guess: Nlp.ParseEntities = {
entities: nlp.entities.slice(),
};
if (threshold) {
const threshold = this.settings.threshold;
minConfidence =
typeof threshold === 'string'
? Number.parseFloat(threshold)
: threshold;
guess.entities = guess.entities
.map((e) => {
e.confidence =
typeof e.confidence === 'string'
? Number.parseFloat(e.confidence)
: e.confidence;
return e;
})
.filter((e) => e.confidence >= minConfidence);
// Get past threshold and the highest confidence for the same entity
// .filter((e, idx, self) => {
// const sameEntities = self.filter((s) => s.entity === e.entity);
// const max = Math.max.apply(Math, sameEntities.map((e) => { return e.confidence; }));
// return e.confidence === max;
// });
}
['intent', 'language'].forEach((trait) => {
if (trait in nlp && (nlp as any)[trait].confidence >= minConfidence) {
guess.entities.push({
entity: trait,
value: (nlp as any)[trait].name,
confidence: (nlp as any)[trait].confidence,
});
}
});
return guess;
} catch (e) {
this.logger.error(
'NLP RasaAdapter : Unable to parse nlp result to extract best guess!',
e,
);
return {
entities: [],
};
}
}
/**
* Returns only the entities that have strong confidence (> than the threshold), can return an empty result
*
* @param text - The text to parse
* @param threshold - Whenever to apply threshold filter or not
* @param project - Whenever to request a specific model
* @returns {Promise<Nlp.ParseEntities>}
*/
async parse(
text: string,
threshold: boolean,
project: string = 'current',
): Promise<Nlp.ParseEntities> {
try {
const { data: nlp } =
await this.httpService.axiosRef.post<NlpParseResultType>(
buildURL(this.settings.endpoint, '/parse'),
{
q: text,
project,
},
{
params: {
token: this.settings.token,
},
},
);
return this.bestGuess(nlp, threshold);
} catch (err) {
this.logger.error('NLP RasaAdapter : Unable to parse nlp', err);
throw err;
}
}
}