mirror of
https://github.com/hexastack/hexabot
synced 2025-06-26 18:27:28 +00:00
feat: define language as attribute instead of entity
This commit is contained in:
parent
432193cad9
commit
56c6a5306b
@ -11,6 +11,7 @@ import { Injectable, Optional } from '@nestjs/common';
|
|||||||
import { InjectModel } from '@nestjs/mongoose';
|
import { InjectModel } from '@nestjs/mongoose';
|
||||||
import { Model } from 'mongoose';
|
import { Model } from 'mongoose';
|
||||||
|
|
||||||
|
import { LanguageService } from '@/i18n/services/language.service';
|
||||||
import { LoggerService } from '@/logger/logger.service';
|
import { LoggerService } from '@/logger/logger.service';
|
||||||
import { NlpSampleCreateDto } from '@/nlp/dto/nlp-sample.dto';
|
import { NlpSampleCreateDto } from '@/nlp/dto/nlp-sample.dto';
|
||||||
import { NlpSampleState } from '@/nlp/schemas/types';
|
import { NlpSampleState } from '@/nlp/schemas/types';
|
||||||
@ -36,10 +37,13 @@ export class MessageRepository extends BaseRepository<
|
|||||||
|
|
||||||
private readonly logger: LoggerService;
|
private readonly logger: LoggerService;
|
||||||
|
|
||||||
|
private readonly languageService: LanguageService;
|
||||||
|
|
||||||
constructor(
|
constructor(
|
||||||
@InjectModel(Message.name) readonly model: Model<AnyMessage>,
|
@InjectModel(Message.name) readonly model: Model<AnyMessage>,
|
||||||
@Optional() nlpSampleService?: NlpSampleService,
|
@Optional() nlpSampleService?: NlpSampleService,
|
||||||
@Optional() logger?: LoggerService,
|
@Optional() logger?: LoggerService,
|
||||||
|
@Optional() languageService?: LanguageService,
|
||||||
) {
|
) {
|
||||||
super(
|
super(
|
||||||
model,
|
model,
|
||||||
@ -49,6 +53,7 @@ export class MessageRepository extends BaseRepository<
|
|||||||
);
|
);
|
||||||
this.logger = logger;
|
this.logger = logger;
|
||||||
this.nlpSampleService = nlpSampleService;
|
this.nlpSampleService = nlpSampleService;
|
||||||
|
this.languageService = languageService;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -72,10 +77,12 @@ export class MessageRepository extends BaseRepository<
|
|||||||
'message' in _doc &&
|
'message' in _doc &&
|
||||||
'text' in _doc.message
|
'text' in _doc.message
|
||||||
) {
|
) {
|
||||||
|
const defaultLang = await this.languageService?.getDefaultLanguage();
|
||||||
const record: NlpSampleCreateDto = {
|
const record: NlpSampleCreateDto = {
|
||||||
text: _doc.message.text,
|
text: _doc.message.text,
|
||||||
type: NlpSampleState.inbox,
|
type: NlpSampleState.inbox,
|
||||||
trained: false,
|
trained: false,
|
||||||
|
language: defaultLang.id,
|
||||||
};
|
};
|
||||||
try {
|
try {
|
||||||
await this.nlpSampleService.findOneOrCreate(record, record);
|
await this.nlpSampleService.findOneOrCreate(record, record);
|
||||||
|
|||||||
@ -29,6 +29,13 @@ export const baseNlpEntity = {
|
|||||||
builtin: true,
|
builtin: true,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
export const baseLanguage = {
|
||||||
|
...modelInstance,
|
||||||
|
title: 'English',
|
||||||
|
code: 'en',
|
||||||
|
default: true,
|
||||||
|
};
|
||||||
|
|
||||||
export const entitiesMock: NlpEntityFull[] = [
|
export const entitiesMock: NlpEntityFull[] = [
|
||||||
{
|
{
|
||||||
...baseNlpEntity,
|
...baseNlpEntity,
|
||||||
@ -89,6 +96,7 @@ export const samplesMock: NlpSampleFull[] = [
|
|||||||
],
|
],
|
||||||
trained: false,
|
trained: false,
|
||||||
type: NlpSampleState.train,
|
type: NlpSampleState.train,
|
||||||
|
language: baseLanguage,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
...modelInstance,
|
...modelInstance,
|
||||||
@ -112,5 +120,6 @@ export const samplesMock: NlpSampleFull[] = [
|
|||||||
],
|
],
|
||||||
trained: false,
|
trained: false,
|
||||||
type: NlpSampleState.train,
|
type: NlpSampleState.train,
|
||||||
|
language: baseLanguage,
|
||||||
},
|
},
|
||||||
];
|
];
|
||||||
|
|||||||
@ -73,7 +73,7 @@ export class I18nModule extends NativeI18nModule {
|
|||||||
TranslationService,
|
TranslationService,
|
||||||
TranslationSeeder,
|
TranslationSeeder,
|
||||||
]),
|
]),
|
||||||
exports: exports.concat(I18nService),
|
exports: exports.concat(I18nService, LanguageService),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -7,8 +7,15 @@
|
|||||||
* 3. SaaS Restriction: This software, or any derivative of it, may not be used to offer a competing product or service (SaaS) without prior written consent from Hexastack. Offering the software as a service or using it in a commercial cloud environment without express permission is strictly prohibited.
|
* 3. SaaS Restriction: This software, or any derivative of it, may not be used to offer a competing product or service (SaaS) without prior written consent from Hexastack. Offering the software as a service or using it in a commercial cloud environment without express permission is strictly prohibited.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { Injectable } from '@nestjs/common';
|
import { CACHE_MANAGER } from '@nestjs/cache-manager';
|
||||||
|
import { Inject, Injectable } from '@nestjs/common';
|
||||||
|
import { Cache } from 'cache-manager';
|
||||||
|
|
||||||
|
import {
|
||||||
|
DEFAULT_LANGUAGE_CACHE_KEY,
|
||||||
|
LANGUAGES_CACHE_KEY,
|
||||||
|
} from '@/utils/constants/cache';
|
||||||
|
import { Cacheable } from '@/utils/decorators/cacheable.decorator';
|
||||||
import { BaseService } from '@/utils/generics/base-service';
|
import { BaseService } from '@/utils/generics/base-service';
|
||||||
|
|
||||||
import { LanguageRepository } from '../repositories/language.repository';
|
import { LanguageRepository } from '../repositories/language.repository';
|
||||||
@ -16,7 +23,37 @@ import { Language } from '../schemas/language.schema';
|
|||||||
|
|
||||||
@Injectable()
|
@Injectable()
|
||||||
export class LanguageService extends BaseService<Language> {
|
export class LanguageService extends BaseService<Language> {
|
||||||
constructor(readonly repository: LanguageRepository) {
|
constructor(
|
||||||
|
readonly repository: LanguageRepository,
|
||||||
|
@Inject(CACHE_MANAGER) private readonly cacheManager: Cache,
|
||||||
|
) {
|
||||||
super(repository);
|
super(repository);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Retrieves all available languages from the repository.
|
||||||
|
*
|
||||||
|
* @returns A promise that resolves to an object where each key is a language code
|
||||||
|
* and the corresponding value is the `Language` object.
|
||||||
|
*/
|
||||||
|
@Cacheable(LANGUAGES_CACHE_KEY)
|
||||||
|
async getLanguages() {
|
||||||
|
const languages = await this.findAll();
|
||||||
|
return languages.reduce((acc, curr) => {
|
||||||
|
return {
|
||||||
|
...acc,
|
||||||
|
[curr.code]: curr,
|
||||||
|
};
|
||||||
|
}, {});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Retrieves the default language from the repository.
|
||||||
|
*
|
||||||
|
* @returns A promise that resolves to the default `Language` object.
|
||||||
|
*/
|
||||||
|
@Cacheable(DEFAULT_LANGUAGE_CACHE_KEY)
|
||||||
|
async getDefaultLanguage() {
|
||||||
|
return await this.findOne({ default: true });
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -18,7 +18,10 @@ import { Test, TestingModule } from '@nestjs/testing';
|
|||||||
import { AttachmentRepository } from '@/attachment/repositories/attachment.repository';
|
import { AttachmentRepository } from '@/attachment/repositories/attachment.repository';
|
||||||
import { AttachmentModel } from '@/attachment/schemas/attachment.schema';
|
import { AttachmentModel } from '@/attachment/schemas/attachment.schema';
|
||||||
import { AttachmentService } from '@/attachment/services/attachment.service';
|
import { AttachmentService } from '@/attachment/services/attachment.service';
|
||||||
|
import { LanguageRepository } from '@/i18n/repositories/language.repository';
|
||||||
|
import { Language, LanguageModel } from '@/i18n/schemas/language.schema';
|
||||||
import { I18nService } from '@/i18n/services/i18n.service';
|
import { I18nService } from '@/i18n/services/i18n.service';
|
||||||
|
import { LanguageService } from '@/i18n/services/language.service';
|
||||||
import { LoggerService } from '@/logger/logger.service';
|
import { LoggerService } from '@/logger/logger.service';
|
||||||
import { SettingRepository } from '@/setting/repositories/setting.repository';
|
import { SettingRepository } from '@/setting/repositories/setting.repository';
|
||||||
import { SettingModel } from '@/setting/schemas/setting.schema';
|
import { SettingModel } from '@/setting/schemas/setting.schema';
|
||||||
@ -57,7 +60,9 @@ describe('NlpSampleController', () => {
|
|||||||
let nlpEntityService: NlpEntityService;
|
let nlpEntityService: NlpEntityService;
|
||||||
let nlpValueService: NlpValueService;
|
let nlpValueService: NlpValueService;
|
||||||
let attachmentService: AttachmentService;
|
let attachmentService: AttachmentService;
|
||||||
|
let languageService: LanguageService;
|
||||||
let byeJhonSampleId: string;
|
let byeJhonSampleId: string;
|
||||||
|
let languages: Language[];
|
||||||
|
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
const module: TestingModule = await Test.createTestingModule({
|
const module: TestingModule = await Test.createTestingModule({
|
||||||
@ -74,6 +79,7 @@ describe('NlpSampleController', () => {
|
|||||||
NlpEntityModel,
|
NlpEntityModel,
|
||||||
NlpValueModel,
|
NlpValueModel,
|
||||||
SettingModel,
|
SettingModel,
|
||||||
|
LanguageModel,
|
||||||
]),
|
]),
|
||||||
],
|
],
|
||||||
providers: [
|
providers: [
|
||||||
@ -88,6 +94,8 @@ describe('NlpSampleController', () => {
|
|||||||
NlpValueRepository,
|
NlpValueRepository,
|
||||||
NlpSampleService,
|
NlpSampleService,
|
||||||
NlpSampleEntityService,
|
NlpSampleEntityService,
|
||||||
|
LanguageRepository,
|
||||||
|
LanguageService,
|
||||||
EventEmitter2,
|
EventEmitter2,
|
||||||
NlpService,
|
NlpService,
|
||||||
SettingRepository,
|
SettingRepository,
|
||||||
@ -122,6 +130,8 @@ describe('NlpSampleController', () => {
|
|||||||
})
|
})
|
||||||
).id;
|
).id;
|
||||||
attachmentService = module.get<AttachmentService>(AttachmentService);
|
attachmentService = module.get<AttachmentService>(AttachmentService);
|
||||||
|
languageService = module.get<LanguageService>(LanguageService);
|
||||||
|
languages = await languageService.findAll();
|
||||||
});
|
});
|
||||||
afterAll(async () => {
|
afterAll(async () => {
|
||||||
await closeInMongodConnection();
|
await closeInMongodConnection();
|
||||||
@ -134,7 +144,7 @@ describe('NlpSampleController', () => {
|
|||||||
const pageQuery = getPageQuery<NlpSample>({ sort: ['text', 'desc'] });
|
const pageQuery = getPageQuery<NlpSample>({ sort: ['text', 'desc'] });
|
||||||
const result = await nlpSampleController.findPage(
|
const result = await nlpSampleController.findPage(
|
||||||
pageQuery,
|
pageQuery,
|
||||||
['entities'],
|
['language', 'entities'],
|
||||||
{},
|
{},
|
||||||
);
|
);
|
||||||
const nlpSamples = await nlpSampleService.findAll();
|
const nlpSamples = await nlpSampleService.findAll();
|
||||||
@ -146,6 +156,7 @@ describe('NlpSampleController', () => {
|
|||||||
entities: nlpSampleEntities.filter((currSampleEntity) => {
|
entities: nlpSampleEntities.filter((currSampleEntity) => {
|
||||||
return currSampleEntity.sample === currSample.id;
|
return currSampleEntity.sample === currSample.id;
|
||||||
}),
|
}),
|
||||||
|
language: languages.find((lang) => lang.id === currSample.language),
|
||||||
};
|
};
|
||||||
acc.push(sampleWithEntities);
|
acc.push(sampleWithEntities);
|
||||||
return acc;
|
return acc;
|
||||||
@ -163,7 +174,12 @@ describe('NlpSampleController', () => {
|
|||||||
['invalidCriteria'],
|
['invalidCriteria'],
|
||||||
{},
|
{},
|
||||||
);
|
);
|
||||||
expect(result).toEqualPayload(nlpSampleFixtures);
|
expect(result).toEqualPayload(
|
||||||
|
nlpSampleFixtures.map((sample) => ({
|
||||||
|
...sample,
|
||||||
|
language: languages[sample.language].id,
|
||||||
|
})),
|
||||||
|
);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -177,14 +193,19 @@ describe('NlpSampleController', () => {
|
|||||||
|
|
||||||
describe('create', () => {
|
describe('create', () => {
|
||||||
it('should create nlp sample', async () => {
|
it('should create nlp sample', async () => {
|
||||||
|
const enLang = await languageService.findOne({ code: 'en' });
|
||||||
const nlSample: NlpSampleDto = {
|
const nlSample: NlpSampleDto = {
|
||||||
text: 'text1',
|
text: 'text1',
|
||||||
trained: true,
|
trained: true,
|
||||||
type: NlpSampleState.test,
|
type: NlpSampleState.test,
|
||||||
entities: [],
|
entities: [],
|
||||||
|
language: enLang.id,
|
||||||
};
|
};
|
||||||
const result = await nlpSampleController.create(nlSample);
|
const result = await nlpSampleController.create(nlSample);
|
||||||
expect(result).toEqualPayload(nlSample);
|
expect(result).toEqualPayload({
|
||||||
|
...nlSample,
|
||||||
|
language: enLang,
|
||||||
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -209,7 +230,10 @@ describe('NlpSampleController', () => {
|
|||||||
const result = await nlpSampleController.findOne(yessSample.id, [
|
const result = await nlpSampleController.findOne(yessSample.id, [
|
||||||
'invalidCreteria',
|
'invalidCreteria',
|
||||||
]);
|
]);
|
||||||
expect(result).toEqualPayload(nlpSampleFixtures[0]);
|
expect(result).toEqualPayload({
|
||||||
|
...nlpSampleFixtures[0],
|
||||||
|
language: languages[nlpSampleFixtures[0].language].id,
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should find a nlp sample and populate its entities', async () => {
|
it('should find a nlp sample and populate its entities', async () => {
|
||||||
@ -225,6 +249,7 @@ describe('NlpSampleController', () => {
|
|||||||
const samplesWithEntities = {
|
const samplesWithEntities = {
|
||||||
...nlpSampleFixtures[0],
|
...nlpSampleFixtures[0],
|
||||||
entities: [yessSampleEntity],
|
entities: [yessSampleEntity],
|
||||||
|
language: languages[nlpSampleFixtures[0].language],
|
||||||
};
|
};
|
||||||
expect(result).toEqualPayload(samplesWithEntities);
|
expect(result).toEqualPayload(samplesWithEntities);
|
||||||
});
|
});
|
||||||
@ -241,6 +266,9 @@ describe('NlpSampleController', () => {
|
|||||||
const yessSample = await nlpSampleService.findOne({
|
const yessSample = await nlpSampleService.findOne({
|
||||||
text: 'yess',
|
text: 'yess',
|
||||||
});
|
});
|
||||||
|
const frLang = await languageService.findOne({
|
||||||
|
code: 'fr',
|
||||||
|
});
|
||||||
const result = await nlpSampleController.updateOne(yessSample.id, {
|
const result = await nlpSampleController.updateOne(yessSample.id, {
|
||||||
text: 'updated',
|
text: 'updated',
|
||||||
trained: true,
|
trained: true,
|
||||||
@ -251,6 +279,7 @@ describe('NlpSampleController', () => {
|
|||||||
value: 'update',
|
value: 'update',
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
|
language: frLang.id,
|
||||||
});
|
});
|
||||||
const updatedSample = {
|
const updatedSample = {
|
||||||
text: 'updated',
|
text: 'updated',
|
||||||
@ -263,19 +292,25 @@ describe('NlpSampleController', () => {
|
|||||||
value: expect.stringMatching(/^[a-z0-9]+$/),
|
value: expect.stringMatching(/^[a-z0-9]+$/),
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
|
language: frLang,
|
||||||
};
|
};
|
||||||
expect(result.text).toEqual(updatedSample.text);
|
expect(result.text).toEqual(updatedSample.text);
|
||||||
expect(result.type).toEqual(updatedSample.type);
|
expect(result.type).toEqual(updatedSample.type);
|
||||||
expect(result.trained).toEqual(updatedSample.trained);
|
expect(result.trained).toEqual(updatedSample.trained);
|
||||||
expect(result.entities).toMatchObject(updatedSample.entities);
|
expect(result.entities).toMatchObject(updatedSample.entities);
|
||||||
|
expect(result.language).toEqualPayload(updatedSample.language);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should throw exception when nlp sample id not found', async () => {
|
it('should throw exception when nlp sample id not found', async () => {
|
||||||
|
const frLang = await languageService.findOne({
|
||||||
|
code: 'fr',
|
||||||
|
});
|
||||||
await expect(
|
await expect(
|
||||||
nlpSampleController.updateOne(byeJhonSampleId, {
|
nlpSampleController.updateOne(byeJhonSampleId, {
|
||||||
text: 'updated',
|
text: 'updated',
|
||||||
trained: true,
|
trained: true,
|
||||||
type: NlpSampleState.test,
|
type: NlpSampleState.test,
|
||||||
|
language: frLang.id,
|
||||||
}),
|
}),
|
||||||
).rejects.toThrow(NotFoundException);
|
).rejects.toThrow(NotFoundException);
|
||||||
});
|
});
|
||||||
@ -352,7 +387,7 @@ describe('NlpSampleController', () => {
|
|||||||
).id;
|
).id;
|
||||||
const mockCsvData: string = [
|
const mockCsvData: string = [
|
||||||
`text,intent,language`,
|
`text,intent,language`,
|
||||||
`Was kostet dieser bmw,preis,de`,
|
`How much does a BMW cost?,price,en`,
|
||||||
].join('\n');
|
].join('\n');
|
||||||
jest.spyOn(fs, 'existsSync').mockReturnValueOnce(true);
|
jest.spyOn(fs, 'existsSync').mockReturnValueOnce(true);
|
||||||
jest.spyOn(fs, 'readFileSync').mockReturnValueOnce(mockCsvData);
|
jest.spyOn(fs, 'readFileSync').mockReturnValueOnce(mockCsvData);
|
||||||
@ -361,17 +396,14 @@ describe('NlpSampleController', () => {
|
|||||||
const intentEntityResult = await nlpEntityService.findOne({
|
const intentEntityResult = await nlpEntityService.findOne({
|
||||||
name: 'intent',
|
name: 'intent',
|
||||||
});
|
});
|
||||||
const languageEntityResult = await nlpEntityService.findOne({
|
const priceValueResult = await nlpValueService.findOne({
|
||||||
name: 'language',
|
value: 'price',
|
||||||
});
|
|
||||||
const preisValueResult = await nlpValueService.findOne({
|
|
||||||
value: 'preis',
|
|
||||||
});
|
|
||||||
const deValueResult = await nlpValueService.findOne({
|
|
||||||
value: 'de',
|
|
||||||
});
|
});
|
||||||
const textSampleResult = await nlpSampleService.findOne({
|
const textSampleResult = await nlpSampleService.findOne({
|
||||||
text: 'Was kostet dieser bmw',
|
text: 'How much does a BMW cost?',
|
||||||
|
});
|
||||||
|
const language = await languageService.findOne({
|
||||||
|
code: 'en',
|
||||||
});
|
});
|
||||||
const intentEntity = {
|
const intentEntity = {
|
||||||
name: 'intent',
|
name: 'intent',
|
||||||
@ -379,40 +411,24 @@ describe('NlpSampleController', () => {
|
|||||||
doc: '',
|
doc: '',
|
||||||
builtin: false,
|
builtin: false,
|
||||||
};
|
};
|
||||||
const languageEntity = {
|
const priceValueEntity = await nlpEntityService.findOne({
|
||||||
name: 'language',
|
|
||||||
lookups: ['trait'],
|
|
||||||
builtin: false,
|
|
||||||
doc: '',
|
|
||||||
};
|
|
||||||
const preisVlueEntity = await nlpEntityService.findOne({
|
|
||||||
name: 'intent',
|
name: 'intent',
|
||||||
});
|
});
|
||||||
const preisValue = {
|
const priceValue = {
|
||||||
value: 'preis',
|
value: 'price',
|
||||||
expressions: [],
|
expressions: [],
|
||||||
builtin: false,
|
builtin: false,
|
||||||
entity: preisVlueEntity.id,
|
entity: priceValueEntity.id,
|
||||||
};
|
|
||||||
const deValueEntity = await nlpEntityService.findOne({
|
|
||||||
name: 'language',
|
|
||||||
});
|
|
||||||
const deValue = {
|
|
||||||
value: 'de',
|
|
||||||
expressions: [],
|
|
||||||
builtin: false,
|
|
||||||
entity: deValueEntity.id,
|
|
||||||
};
|
};
|
||||||
const textSample = {
|
const textSample = {
|
||||||
text: 'Was kostet dieser bmw',
|
text: 'How much does a BMW cost?',
|
||||||
trained: false,
|
trained: false,
|
||||||
type: 'train',
|
type: 'train',
|
||||||
|
language: language.id,
|
||||||
};
|
};
|
||||||
|
|
||||||
expect(languageEntityResult).toEqualPayload(languageEntity);
|
|
||||||
expect(intentEntityResult).toEqualPayload(intentEntity);
|
expect(intentEntityResult).toEqualPayload(intentEntity);
|
||||||
expect(preisValueResult).toEqualPayload(preisValue);
|
expect(priceValueResult).toEqualPayload(priceValue);
|
||||||
expect(deValueResult).toEqualPayload(deValue);
|
|
||||||
expect(textSampleResult).toEqualPayload(textSample);
|
expect(textSampleResult).toEqualPayload(textSample);
|
||||||
expect(result).toEqual({ success: true });
|
expect(result).toEqual({ success: true });
|
||||||
});
|
});
|
||||||
|
|||||||
@ -34,6 +34,7 @@ import Papa from 'papaparse';
|
|||||||
|
|
||||||
import { AttachmentService } from '@/attachment/services/attachment.service';
|
import { AttachmentService } from '@/attachment/services/attachment.service';
|
||||||
import { config } from '@/config';
|
import { config } from '@/config';
|
||||||
|
import { LanguageService } from '@/i18n/services/language.service';
|
||||||
import { CsrfInterceptor } from '@/interceptors/csrf.interceptor';
|
import { CsrfInterceptor } from '@/interceptors/csrf.interceptor';
|
||||||
import { LoggerService } from '@/logger/logger.service';
|
import { LoggerService } from '@/logger/logger.service';
|
||||||
import { BaseController } from '@/utils/generics/base-controller';
|
import { BaseController } from '@/utils/generics/base-controller';
|
||||||
@ -70,6 +71,7 @@ export class NlpSampleController extends BaseController<
|
|||||||
private readonly nlpEntityService: NlpEntityService,
|
private readonly nlpEntityService: NlpEntityService,
|
||||||
private readonly logger: LoggerService,
|
private readonly logger: LoggerService,
|
||||||
private readonly nlpService: NlpService,
|
private readonly nlpService: NlpService,
|
||||||
|
private readonly languageService: LanguageService,
|
||||||
) {
|
) {
|
||||||
super(nlpSampleService);
|
super(nlpSampleService);
|
||||||
}
|
}
|
||||||
@ -131,9 +133,14 @@ export class NlpSampleController extends BaseController<
|
|||||||
nlpEntities,
|
nlpEntities,
|
||||||
);
|
);
|
||||||
|
|
||||||
|
const language = await this.languageService.findOne(
|
||||||
|
createNlpSampleDto.language,
|
||||||
|
);
|
||||||
|
|
||||||
return {
|
return {
|
||||||
...nlpSample,
|
...nlpSample,
|
||||||
entities,
|
entities,
|
||||||
|
language,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -265,10 +272,9 @@ export class NlpSampleController extends BaseController<
|
|||||||
@Param('id') id: string,
|
@Param('id') id: string,
|
||||||
@Body() updateNlpSampleDto: NlpSampleDto,
|
@Body() updateNlpSampleDto: NlpSampleDto,
|
||||||
): Promise<NlpSampleFull> {
|
): Promise<NlpSampleFull> {
|
||||||
const { text, type, entities } = updateNlpSampleDto;
|
const { entities, ...sampleAttrs } = updateNlpSampleDto;
|
||||||
const sample = await this.nlpSampleService.updateOne(id, {
|
const sample = await this.nlpSampleService.updateOne(id, {
|
||||||
text,
|
...sampleAttrs,
|
||||||
type,
|
|
||||||
trained: false,
|
trained: false,
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -282,8 +288,11 @@ export class NlpSampleController extends BaseController<
|
|||||||
const updatedSampleEntities =
|
const updatedSampleEntities =
|
||||||
await this.nlpSampleEntityService.storeSampleEntities(sample, entities);
|
await this.nlpSampleEntityService.storeSampleEntities(sample, entities);
|
||||||
|
|
||||||
|
const language = await this.languageService.findOne(sampleAttrs.language);
|
||||||
|
|
||||||
return {
|
return {
|
||||||
...sample,
|
...sample,
|
||||||
|
language,
|
||||||
entities: updatedSampleEntities,
|
entities: updatedSampleEntities,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
@ -366,6 +375,8 @@ export class NlpSampleController extends BaseController<
|
|||||||
}
|
}
|
||||||
// Remove data with no intent
|
// Remove data with no intent
|
||||||
const filteredData = result.data.filter((d) => d.intent !== 'none');
|
const filteredData = result.data.filter((d) => d.intent !== 'none');
|
||||||
|
const languages = await this.languageService.getLanguages();
|
||||||
|
const defaultLanguage = await this.languageService.getDefaultLanguage();
|
||||||
// Reduce function to ensure executing promises one by one
|
// Reduce function to ensure executing promises one by one
|
||||||
for (const d of filteredData) {
|
for (const d of filteredData) {
|
||||||
try {
|
try {
|
||||||
@ -375,15 +386,25 @@ export class NlpSampleController extends BaseController<
|
|||||||
});
|
});
|
||||||
|
|
||||||
// Skip if sample already exists
|
// Skip if sample already exists
|
||||||
|
|
||||||
if (Array.isArray(existingSamples) && existingSamples.length > 0) {
|
if (Array.isArray(existingSamples) && existingSamples.length > 0) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Fallback to default language if 'language' is missing or invalid
|
||||||
|
if (!d.language || !(d.language in languages)) {
|
||||||
|
if (d.language) {
|
||||||
|
this.logger.warn(
|
||||||
|
`Language "${d.language}" does not exist, falling back to default.`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
d.language = defaultLanguage.code;
|
||||||
|
}
|
||||||
|
|
||||||
// Create a new sample dto
|
// Create a new sample dto
|
||||||
const sample: NlpSampleCreateDto = {
|
const sample: NlpSampleCreateDto = {
|
||||||
text: d.text,
|
text: d.text,
|
||||||
trained: false,
|
trained: false,
|
||||||
|
language: languages[d.language].id,
|
||||||
};
|
};
|
||||||
|
|
||||||
// Create a new sample entity dto
|
// Create a new sample entity dto
|
||||||
|
|||||||
@ -16,27 +16,38 @@ import {
|
|||||||
IsString,
|
IsString,
|
||||||
} from 'class-validator';
|
} from 'class-validator';
|
||||||
|
|
||||||
|
import { IsObjectId } from '@/utils/validation-rules/is-object-id';
|
||||||
|
|
||||||
import { NlpSampleEntityValue, NlpSampleState } from '../schemas/types';
|
import { NlpSampleEntityValue, NlpSampleState } from '../schemas/types';
|
||||||
|
|
||||||
export class NlpSampleCreateDto {
|
export class NlpSampleCreateDto {
|
||||||
@ApiProperty({ description: 'nlp sample text', type: String })
|
@ApiProperty({ description: 'NLP sample text', type: String })
|
||||||
@IsString()
|
@IsString()
|
||||||
@IsNotEmpty()
|
@IsNotEmpty()
|
||||||
text: string;
|
text: string;
|
||||||
|
|
||||||
@ApiPropertyOptional({ description: 'nlp sample is trained', type: Boolean })
|
@ApiPropertyOptional({
|
||||||
|
description: 'If NLP sample is trained',
|
||||||
|
type: Boolean,
|
||||||
|
})
|
||||||
@IsBoolean()
|
@IsBoolean()
|
||||||
@IsOptional()
|
@IsOptional()
|
||||||
trained?: boolean;
|
trained?: boolean;
|
||||||
|
|
||||||
@ApiPropertyOptional({
|
@ApiPropertyOptional({
|
||||||
description: 'nlp sample type',
|
description: 'NLP sample type',
|
||||||
enum: Object.values(NlpSampleState),
|
enum: Object.values(NlpSampleState),
|
||||||
})
|
})
|
||||||
@IsString()
|
@IsString()
|
||||||
@IsIn(Object.values(NlpSampleState))
|
@IsIn(Object.values(NlpSampleState))
|
||||||
@IsOptional()
|
@IsOptional()
|
||||||
type?: NlpSampleState;
|
type?: NlpSampleState;
|
||||||
|
|
||||||
|
@ApiProperty({ description: 'NLP sample language', type: String })
|
||||||
|
@IsString()
|
||||||
|
@IsNotEmpty()
|
||||||
|
@IsObjectId({ message: 'Language must be a valid ObjectId' })
|
||||||
|
language: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
export class NlpSampleDto extends NlpSampleCreateDto {
|
export class NlpSampleDto extends NlpSampleCreateDto {
|
||||||
|
|||||||
@ -11,6 +11,8 @@ import { EventEmitter2 } from '@nestjs/event-emitter';
|
|||||||
import { MongooseModule } from '@nestjs/mongoose';
|
import { MongooseModule } from '@nestjs/mongoose';
|
||||||
import { Test, TestingModule } from '@nestjs/testing';
|
import { Test, TestingModule } from '@nestjs/testing';
|
||||||
|
|
||||||
|
import { LanguageRepository } from '@/i18n/repositories/language.repository';
|
||||||
|
import { Language, LanguageModel } from '@/i18n/schemas/language.schema';
|
||||||
import { nlpSampleFixtures } from '@/utils/test/fixtures/nlpsample';
|
import { nlpSampleFixtures } from '@/utils/test/fixtures/nlpsample';
|
||||||
import {
|
import {
|
||||||
installNlpSampleEntityFixtures,
|
installNlpSampleEntityFixtures,
|
||||||
@ -37,8 +39,10 @@ import { NlpValueModel } from '../schemas/nlp-value.schema';
|
|||||||
describe('NlpSampleEntityRepository', () => {
|
describe('NlpSampleEntityRepository', () => {
|
||||||
let nlpSampleEntityRepository: NlpSampleEntityRepository;
|
let nlpSampleEntityRepository: NlpSampleEntityRepository;
|
||||||
let nlpEntityRepository: NlpEntityRepository;
|
let nlpEntityRepository: NlpEntityRepository;
|
||||||
|
let languageRepository: LanguageRepository;
|
||||||
let nlpSampleEntities: NlpSampleEntity[];
|
let nlpSampleEntities: NlpSampleEntity[];
|
||||||
let nlpEntities: NlpEntity[];
|
let nlpEntities: NlpEntity[];
|
||||||
|
let languages: Language[];
|
||||||
|
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
const module: TestingModule = await Test.createTestingModule({
|
const module: TestingModule = await Test.createTestingModule({
|
||||||
@ -49,12 +53,14 @@ describe('NlpSampleEntityRepository', () => {
|
|||||||
NlpEntityModel,
|
NlpEntityModel,
|
||||||
NlpValueModel,
|
NlpValueModel,
|
||||||
NlpSampleModel,
|
NlpSampleModel,
|
||||||
|
LanguageModel,
|
||||||
]),
|
]),
|
||||||
],
|
],
|
||||||
providers: [
|
providers: [
|
||||||
NlpSampleEntityRepository,
|
NlpSampleEntityRepository,
|
||||||
NlpEntityRepository,
|
NlpEntityRepository,
|
||||||
NlpValueRepository,
|
NlpValueRepository,
|
||||||
|
LanguageRepository,
|
||||||
EventEmitter2,
|
EventEmitter2,
|
||||||
],
|
],
|
||||||
}).compile();
|
}).compile();
|
||||||
@ -62,8 +68,10 @@ describe('NlpSampleEntityRepository', () => {
|
|||||||
NlpSampleEntityRepository,
|
NlpSampleEntityRepository,
|
||||||
);
|
);
|
||||||
nlpEntityRepository = module.get<NlpEntityRepository>(NlpEntityRepository);
|
nlpEntityRepository = module.get<NlpEntityRepository>(NlpEntityRepository);
|
||||||
|
languageRepository = module.get<LanguageRepository>(LanguageRepository);
|
||||||
nlpSampleEntities = await nlpSampleEntityRepository.findAll();
|
nlpSampleEntities = await nlpSampleEntityRepository.findAll();
|
||||||
nlpEntities = await nlpEntityRepository.findAll();
|
nlpEntities = await nlpEntityRepository.findAll();
|
||||||
|
languages = await languageRepository.findAll();
|
||||||
});
|
});
|
||||||
|
|
||||||
afterAll(async () => {
|
afterAll(async () => {
|
||||||
@ -81,7 +89,10 @@ describe('NlpSampleEntityRepository', () => {
|
|||||||
...nlpSampleEntityFixtures[0],
|
...nlpSampleEntityFixtures[0],
|
||||||
entity: nlpEntities[0],
|
entity: nlpEntities[0],
|
||||||
value: { ...nlpValueFixtures[0], entity: nlpEntities[0].id },
|
value: { ...nlpValueFixtures[0], entity: nlpEntities[0].id },
|
||||||
sample: nlpSampleFixtures[0],
|
sample: {
|
||||||
|
...nlpSampleFixtures[0],
|
||||||
|
language: languages[nlpSampleFixtures[0].language].id,
|
||||||
|
},
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
@ -117,7 +128,10 @@ describe('NlpSampleEntityRepository', () => {
|
|||||||
...curr,
|
...curr,
|
||||||
entity: nlpEntities[curr.entity],
|
entity: nlpEntities[curr.entity],
|
||||||
value: nlpValueFixturesWithEntities[curr.value],
|
value: nlpValueFixturesWithEntities[curr.value],
|
||||||
sample: nlpSampleFixtures[curr.sample],
|
sample: {
|
||||||
|
...nlpSampleFixtures[curr.sample],
|
||||||
|
language: languages[nlpSampleFixtures[curr.sample].language].id,
|
||||||
|
},
|
||||||
};
|
};
|
||||||
acc.push(sampleEntityWithPopulate);
|
acc.push(sampleEntityWithPopulate);
|
||||||
return acc;
|
return acc;
|
||||||
|
|||||||
@ -11,6 +11,8 @@ import { EventEmitter2 } from '@nestjs/event-emitter';
|
|||||||
import { MongooseModule } from '@nestjs/mongoose';
|
import { MongooseModule } from '@nestjs/mongoose';
|
||||||
import { Test, TestingModule } from '@nestjs/testing';
|
import { Test, TestingModule } from '@nestjs/testing';
|
||||||
|
|
||||||
|
import { LanguageRepository } from '@/i18n/repositories/language.repository';
|
||||||
|
import { Language, LanguageModel } from '@/i18n/schemas/language.schema';
|
||||||
import { nlpSampleFixtures } from '@/utils/test/fixtures/nlpsample';
|
import { nlpSampleFixtures } from '@/utils/test/fixtures/nlpsample';
|
||||||
import { installNlpSampleEntityFixtures } from '@/utils/test/fixtures/nlpsampleentity';
|
import { installNlpSampleEntityFixtures } from '@/utils/test/fixtures/nlpsampleentity';
|
||||||
import { getPageQuery } from '@/utils/test/pagination';
|
import { getPageQuery } from '@/utils/test/pagination';
|
||||||
@ -30,18 +32,25 @@ import { NlpSampleModel, NlpSample } from '../schemas/nlp-sample.schema';
|
|||||||
describe('NlpSampleRepository', () => {
|
describe('NlpSampleRepository', () => {
|
||||||
let nlpSampleRepository: NlpSampleRepository;
|
let nlpSampleRepository: NlpSampleRepository;
|
||||||
let nlpSampleEntityRepository: NlpSampleEntityRepository;
|
let nlpSampleEntityRepository: NlpSampleEntityRepository;
|
||||||
|
let languageRepository: LanguageRepository;
|
||||||
let nlpSampleEntity: NlpSampleEntity;
|
let nlpSampleEntity: NlpSampleEntity;
|
||||||
let noNlpSample: NlpSample;
|
let noNlpSample: NlpSample;
|
||||||
|
let languages: Language[];
|
||||||
|
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
const module: TestingModule = await Test.createTestingModule({
|
const module: TestingModule = await Test.createTestingModule({
|
||||||
imports: [
|
imports: [
|
||||||
rootMongooseTestModule(installNlpSampleEntityFixtures),
|
rootMongooseTestModule(installNlpSampleEntityFixtures),
|
||||||
MongooseModule.forFeature([NlpSampleModel, NlpSampleEntityModel]),
|
MongooseModule.forFeature([
|
||||||
|
NlpSampleModel,
|
||||||
|
NlpSampleEntityModel,
|
||||||
|
LanguageModel,
|
||||||
|
]),
|
||||||
],
|
],
|
||||||
providers: [
|
providers: [
|
||||||
NlpSampleRepository,
|
NlpSampleRepository,
|
||||||
NlpSampleEntityRepository,
|
NlpSampleEntityRepository,
|
||||||
|
LanguageRepository,
|
||||||
EventEmitter2,
|
EventEmitter2,
|
||||||
],
|
],
|
||||||
}).compile();
|
}).compile();
|
||||||
@ -49,10 +58,12 @@ describe('NlpSampleRepository', () => {
|
|||||||
nlpSampleEntityRepository = module.get<NlpSampleEntityRepository>(
|
nlpSampleEntityRepository = module.get<NlpSampleEntityRepository>(
|
||||||
NlpSampleEntityRepository,
|
NlpSampleEntityRepository,
|
||||||
);
|
);
|
||||||
|
languageRepository = module.get<LanguageRepository>(LanguageRepository);
|
||||||
noNlpSample = await nlpSampleRepository.findOne({ text: 'No' });
|
noNlpSample = await nlpSampleRepository.findOne({ text: 'No' });
|
||||||
nlpSampleEntity = await nlpSampleEntityRepository.findOne({
|
nlpSampleEntity = await nlpSampleEntityRepository.findOne({
|
||||||
sample: noNlpSample.id,
|
sample: noNlpSample.id,
|
||||||
});
|
});
|
||||||
|
languages = await languageRepository.findAll();
|
||||||
});
|
});
|
||||||
|
|
||||||
afterAll(async () => {
|
afterAll(async () => {
|
||||||
@ -69,6 +80,7 @@ describe('NlpSampleRepository', () => {
|
|||||||
expect(result).toEqualPayload({
|
expect(result).toEqualPayload({
|
||||||
...nlpSampleFixtures[1],
|
...nlpSampleFixtures[1],
|
||||||
entities: [nlpSampleEntity],
|
entities: [nlpSampleEntity],
|
||||||
|
language: languages[nlpSampleFixtures[1].language],
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
@ -92,6 +104,7 @@ describe('NlpSampleRepository', () => {
|
|||||||
entities: nlpSampleEntities.filter((currSampleEntity) => {
|
entities: nlpSampleEntities.filter((currSampleEntity) => {
|
||||||
return currSampleEntity.sample === currSample.id;
|
return currSampleEntity.sample === currSample.id;
|
||||||
}),
|
}),
|
||||||
|
language: languages.find((lang) => currSample.language === lang.id),
|
||||||
};
|
};
|
||||||
acc.push(sampleWithEntities);
|
acc.push(sampleWithEntities);
|
||||||
return acc;
|
return acc;
|
||||||
|
|||||||
@ -8,9 +8,10 @@
|
|||||||
*/
|
*/
|
||||||
|
|
||||||
import { ModelDefinition, Prop, Schema, SchemaFactory } from '@nestjs/mongoose';
|
import { ModelDefinition, Prop, Schema, SchemaFactory } from '@nestjs/mongoose';
|
||||||
import { Exclude, Type } from 'class-transformer';
|
import { Exclude, Transform, Type } from 'class-transformer';
|
||||||
import { THydratedDocument } from 'mongoose';
|
import { THydratedDocument, Schema as MongooseSchema } from 'mongoose';
|
||||||
|
|
||||||
|
import { Language } from '@/i18n/schemas/language.schema';
|
||||||
import { BaseSchema } from '@/utils/generics/base-schema';
|
import { BaseSchema } from '@/utils/generics/base-schema';
|
||||||
import { LifecycleHookManager } from '@/utils/generics/lifecycle-hook-manager';
|
import { LifecycleHookManager } from '@/utils/generics/lifecycle-hook-manager';
|
||||||
import { TFilterPopulateFields } from '@/utils/types/filter.types';
|
import { TFilterPopulateFields } from '@/utils/types/filter.types';
|
||||||
@ -41,16 +42,32 @@ export class NlpSampleStub extends BaseSchema {
|
|||||||
default: NlpSampleState.train,
|
default: NlpSampleState.train,
|
||||||
})
|
})
|
||||||
type?: keyof typeof NlpSampleState;
|
type?: keyof typeof NlpSampleState;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The language of the sample.
|
||||||
|
*/
|
||||||
|
@Prop({
|
||||||
|
type: MongooseSchema.Types.ObjectId,
|
||||||
|
ref: 'Language',
|
||||||
|
required: true,
|
||||||
|
})
|
||||||
|
language: unknown;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Schema({ timestamps: true })
|
@Schema({ timestamps: true })
|
||||||
export class NlpSample extends NlpSampleStub {
|
export class NlpSample extends NlpSampleStub {
|
||||||
|
@Transform(({ obj }) => obj.language.toString())
|
||||||
|
language: string;
|
||||||
|
|
||||||
@Exclude()
|
@Exclude()
|
||||||
entities?: never;
|
entities?: never;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Schema({ timestamps: true })
|
@Schema({ timestamps: true })
|
||||||
export class NlpSampleFull extends NlpSampleStub {
|
export class NlpSampleFull extends NlpSampleStub {
|
||||||
|
@Type(() => Language)
|
||||||
|
language: Language;
|
||||||
|
|
||||||
@Type(() => NlpSampleEntity)
|
@Type(() => NlpSampleEntity)
|
||||||
entities: NlpSampleEntity[];
|
entities: NlpSampleEntity[];
|
||||||
}
|
}
|
||||||
@ -75,4 +92,7 @@ export type NlpSamplePopulate = keyof TFilterPopulateFields<
|
|||||||
NlpSampleStub
|
NlpSampleStub
|
||||||
>;
|
>;
|
||||||
|
|
||||||
export const NLP_SAMPLE_POPULATE: NlpSamplePopulate[] = ['entities'];
|
export const NLP_SAMPLE_POPULATE: NlpSamplePopulate[] = [
|
||||||
|
'language',
|
||||||
|
'entities',
|
||||||
|
];
|
||||||
|
|||||||
@ -10,12 +10,6 @@
|
|||||||
import { NlpEntityCreateDto } from '../dto/nlp-entity.dto';
|
import { NlpEntityCreateDto } from '../dto/nlp-entity.dto';
|
||||||
|
|
||||||
export const nlpEntityModels: NlpEntityCreateDto[] = [
|
export const nlpEntityModels: NlpEntityCreateDto[] = [
|
||||||
{
|
|
||||||
name: 'language',
|
|
||||||
lookups: ['trait'],
|
|
||||||
doc: `"language" refers to the language of the text sent by the end user`,
|
|
||||||
builtin: true,
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
name: 'intent',
|
name: 'intent',
|
||||||
lookups: ['trait'],
|
lookups: ['trait'],
|
||||||
|
|||||||
@ -7,16 +7,6 @@
|
|||||||
* 3. SaaS Restriction: This software, or any derivative of it, may not be used to offer a competing product or service (SaaS) without prior written consent from Hexastack. Offering the software as a service or using it in a commercial cloud environment without express permission is strictly prohibited.
|
* 3. SaaS Restriction: This software, or any derivative of it, may not be used to offer a competing product or service (SaaS) without prior written consent from Hexastack. Offering the software as a service or using it in a commercial cloud environment without express permission is strictly prohibited.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { config } from '@/config';
|
|
||||||
|
|
||||||
import { NlpValueCreateDto } from '../dto/nlp-value.dto';
|
import { NlpValueCreateDto } from '../dto/nlp-value.dto';
|
||||||
|
|
||||||
export const nlpValueModels: NlpValueCreateDto[] = [
|
export const nlpValueModels: NlpValueCreateDto[] = [];
|
||||||
...config.chatbot.lang.available.map((lang: string) => {
|
|
||||||
return {
|
|
||||||
entity: 'language',
|
|
||||||
value: lang,
|
|
||||||
builtin: true,
|
|
||||||
};
|
|
||||||
}),
|
|
||||||
];
|
|
||||||
|
|||||||
@ -11,6 +11,8 @@ import { EventEmitter2 } from '@nestjs/event-emitter';
|
|||||||
import { MongooseModule } from '@nestjs/mongoose';
|
import { MongooseModule } from '@nestjs/mongoose';
|
||||||
import { Test, TestingModule } from '@nestjs/testing';
|
import { Test, TestingModule } from '@nestjs/testing';
|
||||||
|
|
||||||
|
import { LanguageRepository } from '@/i18n/repositories/language.repository';
|
||||||
|
import { Language, LanguageModel } from '@/i18n/schemas/language.schema';
|
||||||
import { nlpSampleFixtures } from '@/utils/test/fixtures/nlpsample';
|
import { nlpSampleFixtures } from '@/utils/test/fixtures/nlpsample';
|
||||||
import {
|
import {
|
||||||
installNlpSampleEntityFixtures,
|
installNlpSampleEntityFixtures,
|
||||||
@ -42,7 +44,9 @@ describe('NlpSampleEntityService', () => {
|
|||||||
let nlpSampleEntityRepository: NlpSampleEntityRepository;
|
let nlpSampleEntityRepository: NlpSampleEntityRepository;
|
||||||
let nlpSampleEntities: NlpSampleEntity[];
|
let nlpSampleEntities: NlpSampleEntity[];
|
||||||
let nlpEntityRepository: NlpEntityRepository;
|
let nlpEntityRepository: NlpEntityRepository;
|
||||||
|
let languageRepository: LanguageRepository;
|
||||||
let nlpEntities: NlpEntity[];
|
let nlpEntities: NlpEntity[];
|
||||||
|
let languages: Language[];
|
||||||
let nlpEntityService: NlpEntityService;
|
let nlpEntityService: NlpEntityService;
|
||||||
let nlpValueService: NlpValueService;
|
let nlpValueService: NlpValueService;
|
||||||
|
|
||||||
@ -55,12 +59,14 @@ describe('NlpSampleEntityService', () => {
|
|||||||
NlpEntityModel,
|
NlpEntityModel,
|
||||||
NlpSampleModel,
|
NlpSampleModel,
|
||||||
NlpValueModel,
|
NlpValueModel,
|
||||||
|
LanguageModel,
|
||||||
]),
|
]),
|
||||||
],
|
],
|
||||||
providers: [
|
providers: [
|
||||||
NlpSampleEntityRepository,
|
NlpSampleEntityRepository,
|
||||||
NlpEntityRepository,
|
NlpEntityRepository,
|
||||||
NlpValueRepository,
|
NlpValueRepository,
|
||||||
|
LanguageRepository,
|
||||||
NlpSampleEntityService,
|
NlpSampleEntityService,
|
||||||
NlpEntityService,
|
NlpEntityService,
|
||||||
NlpValueService,
|
NlpValueService,
|
||||||
@ -74,6 +80,7 @@ describe('NlpSampleEntityService', () => {
|
|||||||
NlpSampleEntityRepository,
|
NlpSampleEntityRepository,
|
||||||
);
|
);
|
||||||
nlpEntityRepository = module.get<NlpEntityRepository>(NlpEntityRepository);
|
nlpEntityRepository = module.get<NlpEntityRepository>(NlpEntityRepository);
|
||||||
|
languageRepository = module.get<LanguageRepository>(LanguageRepository);
|
||||||
nlpSampleEntityService = module.get<NlpSampleEntityService>(
|
nlpSampleEntityService = module.get<NlpSampleEntityService>(
|
||||||
NlpSampleEntityService,
|
NlpSampleEntityService,
|
||||||
);
|
);
|
||||||
@ -81,6 +88,7 @@ describe('NlpSampleEntityService', () => {
|
|||||||
nlpValueService = module.get<NlpValueService>(NlpValueService);
|
nlpValueService = module.get<NlpValueService>(NlpValueService);
|
||||||
nlpSampleEntities = await nlpSampleEntityRepository.findAll();
|
nlpSampleEntities = await nlpSampleEntityRepository.findAll();
|
||||||
nlpEntities = await nlpEntityRepository.findAll();
|
nlpEntities = await nlpEntityRepository.findAll();
|
||||||
|
languages = await languageRepository.findAll();
|
||||||
});
|
});
|
||||||
|
|
||||||
afterAll(async () => {
|
afterAll(async () => {
|
||||||
@ -98,7 +106,10 @@ describe('NlpSampleEntityService', () => {
|
|||||||
...nlpSampleEntityFixtures[0],
|
...nlpSampleEntityFixtures[0],
|
||||||
entity: nlpEntities[0],
|
entity: nlpEntities[0],
|
||||||
value: { ...nlpValueFixtures[0], entity: nlpEntities[0].id },
|
value: { ...nlpValueFixtures[0], entity: nlpEntities[0].id },
|
||||||
sample: nlpSampleFixtures[0],
|
sample: {
|
||||||
|
...nlpSampleFixtures[0],
|
||||||
|
language: languages[nlpSampleFixtures[0].language].id,
|
||||||
|
},
|
||||||
};
|
};
|
||||||
expect(result).toEqualPayload(sampleEntityWithPopulate);
|
expect(result).toEqualPayload(sampleEntityWithPopulate);
|
||||||
});
|
});
|
||||||
@ -135,7 +146,10 @@ describe('NlpSampleEntityService', () => {
|
|||||||
...curr,
|
...curr,
|
||||||
entity: nlpEntities[curr.entity],
|
entity: nlpEntities[curr.entity],
|
||||||
value: nlpValueFixturesWithEntities[curr.value],
|
value: nlpValueFixturesWithEntities[curr.value],
|
||||||
sample: nlpSampleFixtures[curr.sample],
|
sample: {
|
||||||
|
...nlpSampleFixtures[curr.sample],
|
||||||
|
language: languages[nlpSampleFixtures[curr.sample].language].id,
|
||||||
|
},
|
||||||
};
|
};
|
||||||
acc.push(sampleEntityWithPopulate);
|
acc.push(sampleEntityWithPopulate);
|
||||||
return acc;
|
return acc;
|
||||||
|
|||||||
@ -11,6 +11,8 @@ import { EventEmitter2 } from '@nestjs/event-emitter';
|
|||||||
import { MongooseModule } from '@nestjs/mongoose';
|
import { MongooseModule } from '@nestjs/mongoose';
|
||||||
import { Test, TestingModule } from '@nestjs/testing';
|
import { Test, TestingModule } from '@nestjs/testing';
|
||||||
|
|
||||||
|
import { LanguageRepository } from '@/i18n/repositories/language.repository';
|
||||||
|
import { Language, LanguageModel } from '@/i18n/schemas/language.schema';
|
||||||
import { nlpSampleFixtures } from '@/utils/test/fixtures/nlpsample';
|
import { nlpSampleFixtures } from '@/utils/test/fixtures/nlpsample';
|
||||||
import { installNlpSampleEntityFixtures } from '@/utils/test/fixtures/nlpsampleentity';
|
import { installNlpSampleEntityFixtures } from '@/utils/test/fixtures/nlpsampleentity';
|
||||||
import { getPageQuery } from '@/utils/test/pagination';
|
import { getPageQuery } from '@/utils/test/pagination';
|
||||||
@ -39,8 +41,10 @@ describe('NlpSampleService', () => {
|
|||||||
let nlpSampleService: NlpSampleService;
|
let nlpSampleService: NlpSampleService;
|
||||||
let nlpSampleEntityRepository: NlpSampleEntityRepository;
|
let nlpSampleEntityRepository: NlpSampleEntityRepository;
|
||||||
let nlpSampleRepository: NlpSampleRepository;
|
let nlpSampleRepository: NlpSampleRepository;
|
||||||
|
let languageRepository: LanguageRepository;
|
||||||
let noNlpSample: NlpSample;
|
let noNlpSample: NlpSample;
|
||||||
let nlpSampleEntity: NlpSampleEntity;
|
let nlpSampleEntity: NlpSampleEntity;
|
||||||
|
let languages: Language[];
|
||||||
|
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
const module: TestingModule = await Test.createTestingModule({
|
const module: TestingModule = await Test.createTestingModule({
|
||||||
@ -51,6 +55,7 @@ describe('NlpSampleService', () => {
|
|||||||
NlpSampleEntityModel,
|
NlpSampleEntityModel,
|
||||||
NlpValueModel,
|
NlpValueModel,
|
||||||
NlpEntityModel,
|
NlpEntityModel,
|
||||||
|
LanguageModel,
|
||||||
]),
|
]),
|
||||||
],
|
],
|
||||||
providers: [
|
providers: [
|
||||||
@ -58,6 +63,7 @@ describe('NlpSampleService', () => {
|
|||||||
NlpSampleEntityRepository,
|
NlpSampleEntityRepository,
|
||||||
NlpEntityRepository,
|
NlpEntityRepository,
|
||||||
NlpValueRepository,
|
NlpValueRepository,
|
||||||
|
LanguageRepository,
|
||||||
NlpSampleService,
|
NlpSampleService,
|
||||||
NlpSampleEntityService,
|
NlpSampleEntityService,
|
||||||
NlpEntityService,
|
NlpEntityService,
|
||||||
@ -73,10 +79,12 @@ describe('NlpSampleService', () => {
|
|||||||
nlpSampleEntityRepository = module.get<NlpSampleEntityRepository>(
|
nlpSampleEntityRepository = module.get<NlpSampleEntityRepository>(
|
||||||
NlpSampleEntityRepository,
|
NlpSampleEntityRepository,
|
||||||
);
|
);
|
||||||
|
languageRepository = module.get<LanguageRepository>(LanguageRepository);
|
||||||
noNlpSample = await nlpSampleService.findOne({ text: 'No' });
|
noNlpSample = await nlpSampleService.findOne({ text: 'No' });
|
||||||
nlpSampleEntity = await nlpSampleEntityRepository.findOne({
|
nlpSampleEntity = await nlpSampleEntityRepository.findOne({
|
||||||
sample: noNlpSample.id,
|
sample: noNlpSample.id,
|
||||||
});
|
});
|
||||||
|
languages = await languageRepository.findAll();
|
||||||
});
|
});
|
||||||
|
|
||||||
afterAll(async () => {
|
afterAll(async () => {
|
||||||
@ -91,6 +99,7 @@ describe('NlpSampleService', () => {
|
|||||||
const sampleWithEntities = {
|
const sampleWithEntities = {
|
||||||
...nlpSampleFixtures[1],
|
...nlpSampleFixtures[1],
|
||||||
entities: [nlpSampleEntity],
|
entities: [nlpSampleEntity],
|
||||||
|
language: languages[nlpSampleFixtures[1].language],
|
||||||
};
|
};
|
||||||
expect(result).toEqualPayload(sampleWithEntities);
|
expect(result).toEqualPayload(sampleWithEntities);
|
||||||
});
|
});
|
||||||
@ -110,6 +119,7 @@ describe('NlpSampleService', () => {
|
|||||||
entities: nlpSampleEntities.filter((currSampleEntity) => {
|
entities: nlpSampleEntities.filter((currSampleEntity) => {
|
||||||
return currSampleEntity.sample === currSample.id;
|
return currSampleEntity.sample === currSample.id;
|
||||||
}),
|
}),
|
||||||
|
language: languages.find((lang) => lang.id === currSample.language),
|
||||||
};
|
};
|
||||||
acc.push(sampleWithEntities);
|
acc.push(sampleWithEntities);
|
||||||
return acc;
|
return acc;
|
||||||
|
|||||||
@ -13,3 +13,7 @@ export const SETTING_CACHE_KEY = 'settings';
|
|||||||
export const PERMISSION_CACHE_KEY = 'permissions';
|
export const PERMISSION_CACHE_KEY = 'permissions';
|
||||||
|
|
||||||
export const MENU_CACHE_KEY = 'menu';
|
export const MENU_CACHE_KEY = 'menu';
|
||||||
|
|
||||||
|
export const LANGUAGES_CACHE_KEY = 'languages';
|
||||||
|
|
||||||
|
export const DEFAULT_LANGUAGE_CACHE_KEY = 'default_language';
|
||||||
|
|||||||
6
api/src/utils/test/fixtures/nlpentity.ts
vendored
6
api/src/utils/test/fixtures/nlpentity.ts
vendored
@ -25,12 +25,6 @@ export const nlpEntityFixtures: NlpEntityCreateDto[] = [
|
|||||||
doc: '',
|
doc: '',
|
||||||
builtin: false,
|
builtin: false,
|
||||||
},
|
},
|
||||||
{
|
|
||||||
name: 'language',
|
|
||||||
lookups: ['trait'],
|
|
||||||
doc: '',
|
|
||||||
builtin: false,
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
name: 'built_in',
|
name: 'built_in',
|
||||||
lookups: ['trait'],
|
lookups: ['trait'],
|
||||||
|
|||||||
16
api/src/utils/test/fixtures/nlpsample.ts
vendored
16
api/src/utils/test/fixtures/nlpsample.ts
vendored
@ -13,23 +13,28 @@ import { NlpSampleCreateDto } from '@/nlp/dto/nlp-sample.dto';
|
|||||||
import { NlpSampleModel, NlpSample } from '@/nlp/schemas/nlp-sample.schema';
|
import { NlpSampleModel, NlpSample } from '@/nlp/schemas/nlp-sample.schema';
|
||||||
import { NlpSampleState } from '@/nlp/schemas/types';
|
import { NlpSampleState } from '@/nlp/schemas/types';
|
||||||
|
|
||||||
|
import { installLanguageFixtures } from './language';
|
||||||
import { getFixturesWithDefaultValues } from '../defaultValues';
|
import { getFixturesWithDefaultValues } from '../defaultValues';
|
||||||
import { TFixturesDefaultValues } from '../types';
|
import { TFixturesDefaultValues } from '../types';
|
||||||
|
|
||||||
const nlpSamples: NlpSampleCreateDto[] = [
|
const nlpSamples: NlpSampleCreateDto[] = [
|
||||||
{
|
{
|
||||||
text: 'yess',
|
text: 'yess',
|
||||||
|
language: '0',
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
text: 'No',
|
text: 'No',
|
||||||
|
language: '0',
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
text: 'Hello',
|
text: 'Hello',
|
||||||
trained: true,
|
trained: true,
|
||||||
|
language: '0',
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
text: 'Bye Jhon',
|
text: 'Bye Jhon',
|
||||||
trained: true,
|
trained: true,
|
||||||
|
language: '0',
|
||||||
},
|
},
|
||||||
];
|
];
|
||||||
|
|
||||||
@ -44,6 +49,15 @@ export const nlpSampleFixtures = getFixturesWithDefaultValues<NlpSample>({
|
|||||||
});
|
});
|
||||||
|
|
||||||
export const installNlpSampleFixtures = async () => {
|
export const installNlpSampleFixtures = async () => {
|
||||||
|
const languages = await installLanguageFixtures();
|
||||||
|
|
||||||
const NlpSample = mongoose.model(NlpSampleModel.name, NlpSampleModel.schema);
|
const NlpSample = mongoose.model(NlpSampleModel.name, NlpSampleModel.schema);
|
||||||
return await NlpSample.insertMany(nlpSampleFixtures);
|
return await NlpSample.insertMany(
|
||||||
|
nlpSampleFixtures.map((v) => {
|
||||||
|
return {
|
||||||
|
...v,
|
||||||
|
language: languages[parseInt(v.language)].id,
|
||||||
|
};
|
||||||
|
}),
|
||||||
|
);
|
||||||
};
|
};
|
||||||
|
|||||||
6
api/src/utils/test/fixtures/nlpvalue.ts
vendored
6
api/src/utils/test/fixtures/nlpvalue.ts
vendored
@ -45,12 +45,6 @@ export const nlpValueFixtures: NlpValueCreateDto[] = [
|
|||||||
expressions: ['bye', 'bye bye'],
|
expressions: ['bye', 'bye bye'],
|
||||||
builtin: true,
|
builtin: true,
|
||||||
},
|
},
|
||||||
{
|
|
||||||
entity: '2',
|
|
||||||
value: 'en',
|
|
||||||
expressions: [],
|
|
||||||
builtin: true,
|
|
||||||
},
|
|
||||||
];
|
];
|
||||||
|
|
||||||
export const installNlpValueFixtures = async () => {
|
export const installNlpValueFixtures = async () => {
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user