mirror of
https://github.com/hexastack/hexabot
synced 2025-06-26 18:27:28 +00:00
fix: apply strict null checks updates to the NLP Module
This commit is contained in:
parent
3c1f115a7c
commit
1128fbf379
@ -27,15 +27,16 @@ import {
|
|||||||
closeInMongodConnection,
|
closeInMongodConnection,
|
||||||
rootMongooseTestModule,
|
rootMongooseTestModule,
|
||||||
} from '@/utils/test/test';
|
} from '@/utils/test/test';
|
||||||
|
import { TFixtures } from '@/utils/test/types';
|
||||||
|
|
||||||
import { NlpEntityCreateDto } from '../dto/nlp-entity.dto';
|
import { NlpEntityCreateDto } from '../dto/nlp-entity.dto';
|
||||||
import { NlpEntityRepository } from '../repositories/nlp-entity.repository';
|
import { NlpEntityRepository } from '../repositories/nlp-entity.repository';
|
||||||
import { NlpSampleEntityRepository } from '../repositories/nlp-sample-entity.repository';
|
import { NlpSampleEntityRepository } from '../repositories/nlp-sample-entity.repository';
|
||||||
import { NlpValueRepository } from '../repositories/nlp-value.repository';
|
import { NlpValueRepository } from '../repositories/nlp-value.repository';
|
||||||
import {
|
import {
|
||||||
NlpEntityModel,
|
|
||||||
NlpEntity,
|
NlpEntity,
|
||||||
NlpEntityFull,
|
NlpEntityFull,
|
||||||
|
NlpEntityModel,
|
||||||
} from '../schemas/nlp-entity.schema';
|
} from '../schemas/nlp-entity.schema';
|
||||||
import { NlpSampleEntityModel } from '../schemas/nlp-sample-entity.schema';
|
import { NlpSampleEntityModel } from '../schemas/nlp-sample-entity.schema';
|
||||||
import { NlpValueModel } from '../schemas/nlp-value.schema';
|
import { NlpValueModel } from '../schemas/nlp-value.schema';
|
||||||
@ -48,8 +49,8 @@ describe('NlpEntityController', () => {
|
|||||||
let nlpEntityController: NlpEntityController;
|
let nlpEntityController: NlpEntityController;
|
||||||
let nlpValueService: NlpValueService;
|
let nlpValueService: NlpValueService;
|
||||||
let nlpEntityService: NlpEntityService;
|
let nlpEntityService: NlpEntityService;
|
||||||
let intentEntityId: string;
|
let intentEntityId: string | null;
|
||||||
let buitInEntityId: string;
|
let buitInEntityId: string | null;
|
||||||
|
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
const module: TestingModule = await Test.createTestingModule({
|
const module: TestingModule = await Test.createTestingModule({
|
||||||
@ -76,16 +77,18 @@ describe('NlpEntityController', () => {
|
|||||||
nlpValueService = module.get<NlpValueService>(NlpValueService);
|
nlpValueService = module.get<NlpValueService>(NlpValueService);
|
||||||
nlpEntityService = module.get<NlpEntityService>(NlpEntityService);
|
nlpEntityService = module.get<NlpEntityService>(NlpEntityService);
|
||||||
|
|
||||||
intentEntityId = (
|
intentEntityId =
|
||||||
await nlpEntityService.findOne({
|
(
|
||||||
name: 'intent',
|
await nlpEntityService.findOne({
|
||||||
})
|
name: 'intent',
|
||||||
).id;
|
})
|
||||||
buitInEntityId = (
|
)?.id || null;
|
||||||
await nlpEntityService.findOne({
|
buitInEntityId =
|
||||||
name: 'built_in',
|
(
|
||||||
})
|
await nlpEntityService.findOne({
|
||||||
).id;
|
name: 'built_in',
|
||||||
|
})
|
||||||
|
)?.id || null;
|
||||||
});
|
});
|
||||||
afterAll(async () => {
|
afterAll(async () => {
|
||||||
await closeInMongodConnection();
|
await closeInMongodConnection();
|
||||||
@ -107,11 +110,11 @@ describe('NlpEntityController', () => {
|
|||||||
...curr,
|
...curr,
|
||||||
values: nlpValueFixtures.filter(
|
values: nlpValueFixtures.filter(
|
||||||
({ entity }) => parseInt(entity) === index,
|
({ entity }) => parseInt(entity) === index,
|
||||||
),
|
) as NlpEntityFull['values'],
|
||||||
});
|
});
|
||||||
return acc;
|
return acc;
|
||||||
},
|
},
|
||||||
[],
|
[] as TFixtures<NlpEntityFull>[],
|
||||||
);
|
);
|
||||||
expect(result).toEqualPayload(
|
expect(result).toEqualPayload(
|
||||||
entitiesWithValues.sort((a, b) => {
|
entitiesWithValues.sort((a, b) => {
|
||||||
@ -170,19 +173,19 @@ describe('NlpEntityController', () => {
|
|||||||
|
|
||||||
describe('deleteOne', () => {
|
describe('deleteOne', () => {
|
||||||
it('should delete a nlp entity', async () => {
|
it('should delete a nlp entity', async () => {
|
||||||
const result = await nlpEntityController.deleteOne(intentEntityId);
|
const result = await nlpEntityController.deleteOne(intentEntityId!);
|
||||||
expect(result.deletedCount).toEqual(1);
|
expect(result.deletedCount).toEqual(1);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should throw exception when nlp entity id not found', async () => {
|
it('should throw exception when nlp entity id not found', async () => {
|
||||||
await expect(
|
await expect(
|
||||||
nlpEntityController.deleteOne(intentEntityId),
|
nlpEntityController.deleteOne(intentEntityId!),
|
||||||
).rejects.toThrow(NotFoundException);
|
).rejects.toThrow(NotFoundException);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should throw exception when nlp entity is builtin', async () => {
|
it('should throw exception when nlp entity is builtin', async () => {
|
||||||
await expect(
|
await expect(
|
||||||
nlpEntityController.deleteOne(buitInEntityId),
|
nlpEntityController.deleteOne(buitInEntityId!),
|
||||||
).rejects.toThrow(MethodNotAllowedException);
|
).rejects.toThrow(MethodNotAllowedException);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
@ -192,10 +195,10 @@ describe('NlpEntityController', () => {
|
|||||||
const firstNameEntity = await nlpEntityService.findOne({
|
const firstNameEntity = await nlpEntityService.findOne({
|
||||||
name: 'first_name',
|
name: 'first_name',
|
||||||
});
|
});
|
||||||
const result = await nlpEntityController.findOne(firstNameEntity.id, []);
|
const result = await nlpEntityController.findOne(firstNameEntity!.id, []);
|
||||||
|
|
||||||
expect(result).toEqualPayload(
|
expect(result).toEqualPayload(
|
||||||
nlpEntityFixtures.find(({ name }) => name === 'first_name'),
|
nlpEntityFixtures.find(({ name }) => name === 'first_name')!,
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -206,9 +209,13 @@ describe('NlpEntityController', () => {
|
|||||||
const firstNameValues = await nlpValueService.findOne({ value: 'jhon' });
|
const firstNameValues = await nlpValueService.findOne({ value: 'jhon' });
|
||||||
const firstNameWithValues: NlpEntityFull = {
|
const firstNameWithValues: NlpEntityFull = {
|
||||||
...firstNameEntity,
|
...firstNameEntity,
|
||||||
values: [firstNameValues],
|
values: firstNameValues ? [firstNameValues] : [],
|
||||||
|
name: firstNameEntity!.name,
|
||||||
|
id: firstNameEntity!.id,
|
||||||
|
createdAt: firstNameEntity!.createdAt,
|
||||||
|
updatedAt: firstNameEntity!.updatedAt,
|
||||||
};
|
};
|
||||||
const result = await nlpEntityController.findOne(firstNameEntity.id, [
|
const result = await nlpEntityController.findOne(firstNameEntity!.id, [
|
||||||
'values',
|
'values',
|
||||||
]);
|
]);
|
||||||
expect(result).toEqualPayload(firstNameWithValues);
|
expect(result).toEqualPayload(firstNameWithValues);
|
||||||
@ -216,7 +223,7 @@ describe('NlpEntityController', () => {
|
|||||||
|
|
||||||
it('should throw NotFoundException when Id does not exist', async () => {
|
it('should throw NotFoundException when Id does not exist', async () => {
|
||||||
await expect(
|
await expect(
|
||||||
nlpEntityController.findOne(intentEntityId, ['values']),
|
nlpEntityController.findOne(intentEntityId!, ['values']),
|
||||||
).rejects.toThrow(NotFoundException);
|
).rejects.toThrow(NotFoundException);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
@ -233,7 +240,7 @@ describe('NlpEntityController', () => {
|
|||||||
builtin: false,
|
builtin: false,
|
||||||
};
|
};
|
||||||
const result = await nlpEntityController.updateOne(
|
const result = await nlpEntityController.updateOne(
|
||||||
firstNameEntity.id,
|
firstNameEntity!.id,
|
||||||
updatedNlpEntity,
|
updatedNlpEntity,
|
||||||
);
|
);
|
||||||
expect(result).toEqualPayload(updatedNlpEntity);
|
expect(result).toEqualPayload(updatedNlpEntity);
|
||||||
@ -247,7 +254,7 @@ describe('NlpEntityController', () => {
|
|||||||
builtin: false,
|
builtin: false,
|
||||||
};
|
};
|
||||||
await expect(
|
await expect(
|
||||||
nlpEntityController.updateOne(intentEntityId, updateNlpEntity),
|
nlpEntityController.updateOne(intentEntityId!, updateNlpEntity),
|
||||||
).rejects.toThrow(NotFoundException);
|
).rejects.toThrow(NotFoundException);
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -259,7 +266,7 @@ describe('NlpEntityController', () => {
|
|||||||
builtin: false,
|
builtin: false,
|
||||||
};
|
};
|
||||||
await expect(
|
await expect(
|
||||||
nlpEntityController.updateOne(buitInEntityId, updateNlpEntity),
|
nlpEntityController.updateOne(buitInEntityId!, updateNlpEntity),
|
||||||
).rejects.toThrow(MethodNotAllowedException);
|
).rejects.toThrow(MethodNotAllowedException);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
@ -276,7 +283,7 @@ describe('NlpEntityController', () => {
|
|||||||
name: 'updated',
|
name: 'updated',
|
||||||
})
|
})
|
||||||
)?.id,
|
)?.id,
|
||||||
];
|
] as string[];
|
||||||
|
|
||||||
const result = await nlpEntityController.deleteMany(entitiesToDelete);
|
const result = await nlpEntityController.deleteMany(entitiesToDelete);
|
||||||
|
|
||||||
|
|||||||
@ -30,6 +30,7 @@ import {
|
|||||||
closeInMongodConnection,
|
closeInMongodConnection,
|
||||||
rootMongooseTestModule,
|
rootMongooseTestModule,
|
||||||
} from '@/utils/test/test';
|
} from '@/utils/test/test';
|
||||||
|
import { TFixtures } from '@/utils/test/types';
|
||||||
|
|
||||||
import { NlpSampleDto } from '../dto/nlp-sample.dto';
|
import { NlpSampleDto } from '../dto/nlp-sample.dto';
|
||||||
import { NlpEntityRepository } from '../repositories/nlp-entity.repository';
|
import { NlpEntityRepository } from '../repositories/nlp-entity.repository';
|
||||||
@ -38,7 +39,11 @@ import { NlpSampleRepository } from '../repositories/nlp-sample.repository';
|
|||||||
import { NlpValueRepository } from '../repositories/nlp-value.repository';
|
import { NlpValueRepository } from '../repositories/nlp-value.repository';
|
||||||
import { NlpEntityModel } from '../schemas/nlp-entity.schema';
|
import { NlpEntityModel } from '../schemas/nlp-entity.schema';
|
||||||
import { NlpSampleEntityModel } from '../schemas/nlp-sample-entity.schema';
|
import { NlpSampleEntityModel } from '../schemas/nlp-sample-entity.schema';
|
||||||
import { NlpSample, NlpSampleModel } from '../schemas/nlp-sample.schema';
|
import {
|
||||||
|
NlpSample,
|
||||||
|
NlpSampleFull,
|
||||||
|
NlpSampleModel,
|
||||||
|
} from '../schemas/nlp-sample.schema';
|
||||||
import { NlpValueModel } from '../schemas/nlp-value.schema';
|
import { NlpValueModel } from '../schemas/nlp-value.schema';
|
||||||
import { NlpSampleState } from '../schemas/types';
|
import { NlpSampleState } from '../schemas/types';
|
||||||
import { NlpEntityService } from '../services/nlp-entity.service';
|
import { NlpEntityService } from '../services/nlp-entity.service';
|
||||||
@ -55,7 +60,7 @@ describe('NlpSampleController', () => {
|
|||||||
let nlpEntityService: NlpEntityService;
|
let nlpEntityService: NlpEntityService;
|
||||||
let nlpValueService: NlpValueService;
|
let nlpValueService: NlpValueService;
|
||||||
let languageService: LanguageService;
|
let languageService: LanguageService;
|
||||||
let byeJhonSampleId: string;
|
let byeJhonSampleId: string | null;
|
||||||
let languages: Language[];
|
let languages: Language[];
|
||||||
|
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
@ -115,11 +120,12 @@ describe('NlpSampleController', () => {
|
|||||||
nlpSampleService = module.get<NlpSampleService>(NlpSampleService);
|
nlpSampleService = module.get<NlpSampleService>(NlpSampleService);
|
||||||
nlpEntityService = module.get<NlpEntityService>(NlpEntityService);
|
nlpEntityService = module.get<NlpEntityService>(NlpEntityService);
|
||||||
nlpValueService = module.get<NlpValueService>(NlpValueService);
|
nlpValueService = module.get<NlpValueService>(NlpValueService);
|
||||||
byeJhonSampleId = (
|
byeJhonSampleId =
|
||||||
await nlpSampleService.findOne({
|
(
|
||||||
text: 'Bye Jhon',
|
await nlpSampleService.findOne({
|
||||||
})
|
text: 'Bye Jhon',
|
||||||
).id;
|
})
|
||||||
|
)?.id || null;
|
||||||
languageService = module.get<LanguageService>(LanguageService);
|
languageService = module.get<LanguageService>(LanguageService);
|
||||||
languages = await languageService.findAll();
|
languages = await languageService.findAll();
|
||||||
});
|
});
|
||||||
@ -143,15 +149,16 @@ describe('NlpSampleController', () => {
|
|||||||
(acc, currSample) => {
|
(acc, currSample) => {
|
||||||
const sampleWithEntities = {
|
const sampleWithEntities = {
|
||||||
...currSample,
|
...currSample,
|
||||||
entities: nlpSampleEntities.filter((currSampleEntity) => {
|
entities: nlpSampleEntities.filter(
|
||||||
return currSampleEntity.sample === currSample.id;
|
(currSampleEntity) => currSampleEntity.sample === currSample.id,
|
||||||
}),
|
),
|
||||||
language: languages.find((lang) => lang.id === currSample.language),
|
language:
|
||||||
|
languages.find((lang) => lang.id === currSample.language) || null,
|
||||||
};
|
};
|
||||||
acc.push(sampleWithEntities);
|
acc.push(sampleWithEntities);
|
||||||
return acc;
|
return acc;
|
||||||
},
|
},
|
||||||
[],
|
[] as TFixtures<NlpSampleFull>[],
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(result).toEqualPayload(nlpSampleFixturesWithEntities);
|
expect(result).toEqualPayload(nlpSampleFixturesWithEntities);
|
||||||
@ -167,7 +174,7 @@ describe('NlpSampleController', () => {
|
|||||||
expect(result).toEqualPayload(
|
expect(result).toEqualPayload(
|
||||||
nlpSampleFixtures.map((sample) => ({
|
nlpSampleFixtures.map((sample) => ({
|
||||||
...sample,
|
...sample,
|
||||||
language: languages[sample.language].id,
|
language: sample.language ? languages[sample.language].id : null,
|
||||||
})),
|
})),
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
@ -201,13 +208,13 @@ describe('NlpSampleController', () => {
|
|||||||
|
|
||||||
describe('deleteOne', () => {
|
describe('deleteOne', () => {
|
||||||
it('should delete a nlp sample', async () => {
|
it('should delete a nlp sample', async () => {
|
||||||
const result = await nlpSampleController.deleteOne(byeJhonSampleId);
|
const result = await nlpSampleController.deleteOne(byeJhonSampleId!);
|
||||||
expect(result.deletedCount).toEqual(1);
|
expect(result.deletedCount).toEqual(1);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should throw exception when nlp sample id not found', async () => {
|
it('should throw exception when nlp sample id not found', async () => {
|
||||||
await expect(
|
await expect(
|
||||||
nlpSampleController.deleteOne(byeJhonSampleId),
|
nlpSampleController.deleteOne(byeJhonSampleId!),
|
||||||
).rejects.toThrow(NotFoundException);
|
).rejects.toThrow(NotFoundException);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
@ -217,12 +224,14 @@ describe('NlpSampleController', () => {
|
|||||||
const yessSample = await nlpSampleService.findOne({
|
const yessSample = await nlpSampleService.findOne({
|
||||||
text: 'yess',
|
text: 'yess',
|
||||||
});
|
});
|
||||||
const result = await nlpSampleController.findOne(yessSample.id, [
|
const result = await nlpSampleController.findOne(yessSample!.id, [
|
||||||
'invalidCreteria',
|
'invalidCreteria',
|
||||||
]);
|
]);
|
||||||
expect(result).toEqualPayload({
|
expect(result).toEqualPayload({
|
||||||
...nlpSampleFixtures[0],
|
...nlpSampleFixtures[0],
|
||||||
language: languages[nlpSampleFixtures[0].language].id,
|
language: nlpSampleFixtures[0].language
|
||||||
|
? languages[nlpSampleFixtures?.[0]?.language]?.id
|
||||||
|
: null,
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -231,22 +240,24 @@ describe('NlpSampleController', () => {
|
|||||||
text: 'yess',
|
text: 'yess',
|
||||||
});
|
});
|
||||||
const yessSampleEntity = await nlpSampleEntityService.findOne({
|
const yessSampleEntity = await nlpSampleEntityService.findOne({
|
||||||
sample: yessSample.id,
|
sample: yessSample!.id,
|
||||||
});
|
});
|
||||||
const result = await nlpSampleController.findOne(yessSample.id, [
|
const result = await nlpSampleController.findOne(yessSample!.id, [
|
||||||
'entities',
|
'entities',
|
||||||
]);
|
]);
|
||||||
const samplesWithEntities = {
|
const samplesWithEntities = {
|
||||||
...nlpSampleFixtures[0],
|
...nlpSampleFixtures[0],
|
||||||
entities: [yessSampleEntity],
|
entities: [yessSampleEntity],
|
||||||
language: languages[nlpSampleFixtures[0].language],
|
language: nlpSampleFixtures[0].language
|
||||||
|
? languages[nlpSampleFixtures[0].language]
|
||||||
|
: null,
|
||||||
};
|
};
|
||||||
expect(result).toEqualPayload(samplesWithEntities);
|
expect(result).toEqualPayload(samplesWithEntities);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should throw NotFoundException when Id does not exist', async () => {
|
it('should throw NotFoundException when Id does not exist', async () => {
|
||||||
await expect(
|
await expect(
|
||||||
nlpSampleController.findOne(byeJhonSampleId, ['entities']),
|
nlpSampleController.findOne(byeJhonSampleId!, ['entities']),
|
||||||
).rejects.toThrow(NotFoundException);
|
).rejects.toThrow(NotFoundException);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
@ -259,7 +270,7 @@ describe('NlpSampleController', () => {
|
|||||||
const frLang = await languageService.findOne({
|
const frLang = await languageService.findOne({
|
||||||
code: 'fr',
|
code: 'fr',
|
||||||
});
|
});
|
||||||
const result = await nlpSampleController.updateOne(yessSample.id, {
|
const result = await nlpSampleController.updateOne(yessSample!.id, {
|
||||||
text: 'updated',
|
text: 'updated',
|
||||||
trained: true,
|
trained: true,
|
||||||
type: NlpSampleState.test,
|
type: NlpSampleState.test,
|
||||||
@ -288,12 +299,12 @@ describe('NlpSampleController', () => {
|
|||||||
expect(result.type).toEqual(updatedSample.type);
|
expect(result.type).toEqual(updatedSample.type);
|
||||||
expect(result.trained).toEqual(updatedSample.trained);
|
expect(result.trained).toEqual(updatedSample.trained);
|
||||||
expect(result.entities).toMatchObject(updatedSample.entities);
|
expect(result.entities).toMatchObject(updatedSample.entities);
|
||||||
expect(result.language).toEqualPayload(updatedSample.language);
|
expect(result.language).toEqualPayload(updatedSample.language!);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should throw exception when nlp sample id not found', async () => {
|
it('should throw exception when nlp sample id not found', async () => {
|
||||||
await expect(
|
await expect(
|
||||||
nlpSampleController.updateOne(byeJhonSampleId, {
|
nlpSampleController.updateOne(byeJhonSampleId!, {
|
||||||
text: 'updated',
|
text: 'updated',
|
||||||
trained: true,
|
trained: true,
|
||||||
type: NlpSampleState.test,
|
type: NlpSampleState.test,
|
||||||
@ -366,13 +377,13 @@ describe('NlpSampleController', () => {
|
|||||||
value: 'price',
|
value: 'price',
|
||||||
expressions: [],
|
expressions: [],
|
||||||
builtin: false,
|
builtin: false,
|
||||||
entity: priceValueEntity.id,
|
entity: priceValueEntity!.id,
|
||||||
};
|
};
|
||||||
const textSample = {
|
const textSample = {
|
||||||
text: 'How much does a BMW cost?',
|
text: 'How much does a BMW cost?',
|
||||||
trained: false,
|
trained: false,
|
||||||
type: 'train',
|
type: 'train',
|
||||||
language: language.id,
|
language: language!.id,
|
||||||
};
|
};
|
||||||
|
|
||||||
expect(intentEntityResult).toEqualPayload(intentEntity);
|
expect(intentEntityResult).toEqualPayload(intentEntity);
|
||||||
@ -389,13 +400,13 @@ describe('NlpSampleController', () => {
|
|||||||
await nlpSampleService.findOne({
|
await nlpSampleService.findOne({
|
||||||
text: 'How much does a BMW cost?',
|
text: 'How much does a BMW cost?',
|
||||||
})
|
})
|
||||||
).id,
|
)?.id,
|
||||||
(
|
(
|
||||||
await nlpSampleService.findOne({
|
await nlpSampleService.findOne({
|
||||||
text: 'text1',
|
text: 'text1',
|
||||||
})
|
})
|
||||||
).id,
|
)?.id,
|
||||||
];
|
] as string[];
|
||||||
|
|
||||||
const result = await nlpSampleController.deleteMany(samplesToDelete);
|
const result = await nlpSampleController.deleteMany(samplesToDelete);
|
||||||
|
|
||||||
|
|||||||
@ -91,7 +91,7 @@ export class NlpSampleController extends BaseController<
|
|||||||
);
|
);
|
||||||
const entities = await this.nlpEntityService.findAllAndPopulate();
|
const entities = await this.nlpEntityService.findAllAndPopulate();
|
||||||
const helper = await this.helperService.getDefaultNluHelper();
|
const helper = await this.helperService.getDefaultNluHelper();
|
||||||
const result = await helper.format(samples, entities);
|
const result = await helper.format?.(samples, entities);
|
||||||
|
|
||||||
// Sending the JSON data as a file
|
// Sending the JSON data as a file
|
||||||
const buffer = Buffer.from(JSON.stringify(result));
|
const buffer = Buffer.from(JSON.stringify(result));
|
||||||
@ -128,15 +128,23 @@ export class NlpSampleController extends BaseController<
|
|||||||
}: NlpSampleDto,
|
}: NlpSampleDto,
|
||||||
): Promise<NlpSampleFull> {
|
): Promise<NlpSampleFull> {
|
||||||
const language = await this.languageService.getLanguageByCode(languageCode);
|
const language = await this.languageService.getLanguageByCode(languageCode);
|
||||||
|
|
||||||
|
if (!language)
|
||||||
|
throw new NotFoundException(
|
||||||
|
`Language with code ${languageCode} not found`,
|
||||||
|
);
|
||||||
|
|
||||||
const nlpSample = await this.nlpSampleService.create({
|
const nlpSample = await this.nlpSampleService.create({
|
||||||
...createNlpSampleDto,
|
...createNlpSampleDto,
|
||||||
language: language.id,
|
language: language.id,
|
||||||
});
|
});
|
||||||
|
|
||||||
const entities = await this.nlpSampleEntityService.storeSampleEntities(
|
const entities = nlpEntities
|
||||||
nlpSample,
|
? await this.nlpSampleEntityService.storeSampleEntities(
|
||||||
nlpEntities,
|
nlpSample,
|
||||||
);
|
nlpEntities,
|
||||||
|
)
|
||||||
|
: [];
|
||||||
|
|
||||||
return {
|
return {
|
||||||
...nlpSample,
|
...nlpSample,
|
||||||
@ -202,7 +210,7 @@ export class NlpSampleController extends BaseController<
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
const helper = await this.helperService.getDefaultNluHelper();
|
const helper = await this.helperService.getDefaultNluHelper();
|
||||||
const response = await helper.train(samples, entities);
|
const response = await helper.train?.(samples, entities);
|
||||||
// Mark samples as trained
|
// Mark samples as trained
|
||||||
await this.nlpSampleService.updateMany(
|
await this.nlpSampleService.updateMany(
|
||||||
{ type: 'train' },
|
{ type: 'train' },
|
||||||
@ -228,7 +236,7 @@ export class NlpSampleController extends BaseController<
|
|||||||
await this.getSamplesAndEntitiesByType('test');
|
await this.getSamplesAndEntitiesByType('test');
|
||||||
|
|
||||||
const helper = await this.helperService.getDefaultNluHelper();
|
const helper = await this.helperService.getDefaultNluHelper();
|
||||||
return await helper.evaluate(samples, entities);
|
return await helper.evaluate?.(samples, entities);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -294,6 +302,14 @@ export class NlpSampleController extends BaseController<
|
|||||||
@Body() { entities, language: languageCode, ...sampleAttrs }: NlpSampleDto,
|
@Body() { entities, language: languageCode, ...sampleAttrs }: NlpSampleDto,
|
||||||
): Promise<NlpSampleFull> {
|
): Promise<NlpSampleFull> {
|
||||||
const language = await this.languageService.getLanguageByCode(languageCode);
|
const language = await this.languageService.getLanguageByCode(languageCode);
|
||||||
|
|
||||||
|
if (!language) {
|
||||||
|
this.logger.warn(`Unable to Language by languageCode ${languageCode}`);
|
||||||
|
throw new NotFoundException(
|
||||||
|
`Language with languageCode ${languageCode} not found`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
const sample = await this.nlpSampleService.updateOne(id, {
|
const sample = await this.nlpSampleService.updateOne(id, {
|
||||||
...sampleAttrs,
|
...sampleAttrs,
|
||||||
language: language.id,
|
language: language.id,
|
||||||
@ -308,7 +324,10 @@ export class NlpSampleController extends BaseController<
|
|||||||
await this.nlpSampleEntityService.deleteMany({ sample: id });
|
await this.nlpSampleEntityService.deleteMany({ sample: id });
|
||||||
|
|
||||||
const updatedSampleEntities =
|
const updatedSampleEntities =
|
||||||
await this.nlpSampleEntityService.storeSampleEntities(sample, entities);
|
await this.nlpSampleEntityService.storeSampleEntities(
|
||||||
|
sample,
|
||||||
|
entities || [],
|
||||||
|
);
|
||||||
|
|
||||||
return {
|
return {
|
||||||
...sample,
|
...sample,
|
||||||
|
|||||||
@ -22,6 +22,7 @@ import {
|
|||||||
closeInMongodConnection,
|
closeInMongodConnection,
|
||||||
rootMongooseTestModule,
|
rootMongooseTestModule,
|
||||||
} from '@/utils/test/test';
|
} from '@/utils/test/test';
|
||||||
|
import { TFixtures } from '@/utils/test/types';
|
||||||
|
|
||||||
import { NlpValueCreateDto } from '../dto/nlp-value.dto';
|
import { NlpValueCreateDto } from '../dto/nlp-value.dto';
|
||||||
import { NlpEntityRepository } from '../repositories/nlp-entity.repository';
|
import { NlpEntityRepository } from '../repositories/nlp-entity.repository';
|
||||||
@ -29,7 +30,11 @@ import { NlpSampleEntityRepository } from '../repositories/nlp-sample-entity.rep
|
|||||||
import { NlpValueRepository } from '../repositories/nlp-value.repository';
|
import { NlpValueRepository } from '../repositories/nlp-value.repository';
|
||||||
import { NlpEntityModel } from '../schemas/nlp-entity.schema';
|
import { NlpEntityModel } from '../schemas/nlp-entity.schema';
|
||||||
import { NlpSampleEntityModel } from '../schemas/nlp-sample-entity.schema';
|
import { NlpSampleEntityModel } from '../schemas/nlp-sample-entity.schema';
|
||||||
import { NlpValueModel, NlpValue } from '../schemas/nlp-value.schema';
|
import {
|
||||||
|
NlpValue,
|
||||||
|
NlpValueFull,
|
||||||
|
NlpValueModel,
|
||||||
|
} from '../schemas/nlp-value.schema';
|
||||||
import { NlpEntityService } from '../services/nlp-entity.service';
|
import { NlpEntityService } from '../services/nlp-entity.service';
|
||||||
import { NlpValueService } from '../services/nlp-value.service';
|
import { NlpValueService } from '../services/nlp-value.service';
|
||||||
|
|
||||||
@ -39,9 +44,9 @@ describe('NlpValueController', () => {
|
|||||||
let nlpValueController: NlpValueController;
|
let nlpValueController: NlpValueController;
|
||||||
let nlpValueService: NlpValueService;
|
let nlpValueService: NlpValueService;
|
||||||
let nlpEntityService: NlpEntityService;
|
let nlpEntityService: NlpEntityService;
|
||||||
let jhonNlpValue: NlpValue;
|
let jhonNlpValue: NlpValue | null;
|
||||||
let positiveValue: NlpValue;
|
let positiveValue: NlpValue | null;
|
||||||
let negativeValue: NlpValue;
|
let negativeValue: NlpValue | null;
|
||||||
|
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
const module: TestingModule = await Test.createTestingModule({
|
const module: TestingModule = await Test.createTestingModule({
|
||||||
@ -92,11 +97,13 @@ describe('NlpValueController', () => {
|
|||||||
(acc, curr) => {
|
(acc, curr) => {
|
||||||
acc.push({
|
acc.push({
|
||||||
...curr,
|
...curr,
|
||||||
entity: nlpEntityFixtures[parseInt(curr.entity)],
|
entity: nlpEntityFixtures[
|
||||||
|
parseInt(curr.entity)
|
||||||
|
] as NlpValueFull['entity'],
|
||||||
});
|
});
|
||||||
return acc;
|
return acc;
|
||||||
},
|
},
|
||||||
[],
|
[] as TFixtures<NlpValueFull>[],
|
||||||
);
|
);
|
||||||
expect(result).toEqualPayload(nlpValueFixturesWithEntities);
|
expect(result).toEqualPayload(nlpValueFixturesWithEntities);
|
||||||
});
|
});
|
||||||
@ -120,7 +127,7 @@ describe('NlpValueController', () => {
|
|||||||
acc.push(ValueWithEntities);
|
acc.push(ValueWithEntities);
|
||||||
return acc;
|
return acc;
|
||||||
},
|
},
|
||||||
[],
|
[] as TFixtures<NlpValue>[],
|
||||||
);
|
);
|
||||||
expect(result).toEqualPayload(nlpValueFixturesWithEntities);
|
expect(result).toEqualPayload(nlpValueFixturesWithEntities);
|
||||||
});
|
});
|
||||||
@ -151,20 +158,20 @@ describe('NlpValueController', () => {
|
|||||||
|
|
||||||
describe('deleteOne', () => {
|
describe('deleteOne', () => {
|
||||||
it('should delete a nlp Value', async () => {
|
it('should delete a nlp Value', async () => {
|
||||||
const result = await nlpValueController.deleteOne(jhonNlpValue.id);
|
const result = await nlpValueController.deleteOne(jhonNlpValue!.id);
|
||||||
expect(result.deletedCount).toEqual(1);
|
expect(result.deletedCount).toEqual(1);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should throw exception when nlp Value id not found', async () => {
|
it('should throw exception when nlp Value id not found', async () => {
|
||||||
await expect(
|
await expect(
|
||||||
nlpValueController.deleteOne(jhonNlpValue.id),
|
nlpValueController.deleteOne(jhonNlpValue!.id),
|
||||||
).rejects.toThrow(NotFoundException);
|
).rejects.toThrow(NotFoundException);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('findOne', () => {
|
describe('findOne', () => {
|
||||||
it('should get a nlp Value', async () => {
|
it('should get a nlp Value', async () => {
|
||||||
const result = await nlpValueController.findOne(positiveValue.id, [
|
const result = await nlpValueController.findOne(positiveValue!.id, [
|
||||||
'invalidCreteria',
|
'invalidCreteria',
|
||||||
]);
|
]);
|
||||||
const intentNlpEntity = await nlpEntityService.findOne({
|
const intentNlpEntity = await nlpEntityService.findOne({
|
||||||
@ -172,7 +179,7 @@ describe('NlpValueController', () => {
|
|||||||
});
|
});
|
||||||
const valueWithEntity = {
|
const valueWithEntity = {
|
||||||
...nlpValueFixtures[0],
|
...nlpValueFixtures[0],
|
||||||
entity: intentNlpEntity.id,
|
entity: intentNlpEntity!.id,
|
||||||
};
|
};
|
||||||
|
|
||||||
expect(result).toEqualPayload(valueWithEntity);
|
expect(result).toEqualPayload(valueWithEntity);
|
||||||
@ -182,7 +189,7 @@ describe('NlpValueController', () => {
|
|||||||
const intentNlpEntity = await nlpEntityService.findOne({
|
const intentNlpEntity = await nlpEntityService.findOne({
|
||||||
name: 'intent',
|
name: 'intent',
|
||||||
});
|
});
|
||||||
const result = await nlpValueController.findOne(positiveValue.id, [
|
const result = await nlpValueController.findOne(positiveValue!.id, [
|
||||||
'entity',
|
'entity',
|
||||||
]);
|
]);
|
||||||
const valueWithEntity = {
|
const valueWithEntity = {
|
||||||
@ -194,7 +201,7 @@ describe('NlpValueController', () => {
|
|||||||
|
|
||||||
it('should throw NotFoundException when Id does not exist', async () => {
|
it('should throw NotFoundException when Id does not exist', async () => {
|
||||||
await expect(
|
await expect(
|
||||||
nlpValueController.findOne(jhonNlpValue.id, ['entity']),
|
nlpValueController.findOne(jhonNlpValue!.id, ['entity']),
|
||||||
).rejects.toThrow(NotFoundException);
|
).rejects.toThrow(NotFoundException);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
@ -205,13 +212,13 @@ describe('NlpValueController', () => {
|
|||||||
name: 'intent',
|
name: 'intent',
|
||||||
});
|
});
|
||||||
const updatedValue = {
|
const updatedValue = {
|
||||||
entity: intentNlpEntity.id,
|
entity: intentNlpEntity!.id,
|
||||||
value: 'updated',
|
value: 'updated',
|
||||||
expressions: [],
|
expressions: [],
|
||||||
builtin: true,
|
builtin: true,
|
||||||
};
|
};
|
||||||
const result = await nlpValueController.updateOne(
|
const result = await nlpValueController.updateOne(
|
||||||
positiveValue.id,
|
positiveValue!.id,
|
||||||
updatedValue,
|
updatedValue,
|
||||||
);
|
);
|
||||||
expect(result).toEqualPayload(updatedValue);
|
expect(result).toEqualPayload(updatedValue);
|
||||||
@ -222,8 +229,8 @@ describe('NlpValueController', () => {
|
|||||||
name: 'intent',
|
name: 'intent',
|
||||||
});
|
});
|
||||||
await expect(
|
await expect(
|
||||||
nlpValueController.updateOne(jhonNlpValue.id, {
|
nlpValueController.updateOne(jhonNlpValue!.id, {
|
||||||
entity: intentNlpEntity.id,
|
entity: intentNlpEntity!.id,
|
||||||
value: 'updated',
|
value: 'updated',
|
||||||
expressions: [],
|
expressions: [],
|
||||||
builtin: true,
|
builtin: true,
|
||||||
@ -233,7 +240,7 @@ describe('NlpValueController', () => {
|
|||||||
});
|
});
|
||||||
describe('deleteMany', () => {
|
describe('deleteMany', () => {
|
||||||
it('should delete multiple nlp values', async () => {
|
it('should delete multiple nlp values', async () => {
|
||||||
const valuesToDelete = [positiveValue.id, negativeValue.id];
|
const valuesToDelete = [positiveValue!.id, negativeValue!.id];
|
||||||
|
|
||||||
const result = await nlpValueController.deleteMany(valuesToDelete);
|
const result = await nlpValueController.deleteMany(valuesToDelete);
|
||||||
|
|
||||||
|
|||||||
@ -18,7 +18,7 @@ import {
|
|||||||
rootMongooseTestModule,
|
rootMongooseTestModule,
|
||||||
} from '@/utils/test/test';
|
} from '@/utils/test/test';
|
||||||
|
|
||||||
import { NlpEntityModel, NlpEntity } from '../schemas/nlp-entity.schema';
|
import { NlpEntity, NlpEntityModel } from '../schemas/nlp-entity.schema';
|
||||||
import { NlpSampleEntityModel } from '../schemas/nlp-sample-entity.schema';
|
import { NlpSampleEntityModel } from '../schemas/nlp-sample-entity.schema';
|
||||||
import { NlpValueModel } from '../schemas/nlp-value.schema';
|
import { NlpValueModel } from '../schemas/nlp-value.schema';
|
||||||
|
|
||||||
@ -29,7 +29,7 @@ import { NlpValueRepository } from './nlp-value.repository';
|
|||||||
describe('NlpEntityRepository', () => {
|
describe('NlpEntityRepository', () => {
|
||||||
let nlpEntityRepository: NlpEntityRepository;
|
let nlpEntityRepository: NlpEntityRepository;
|
||||||
let nlpValueRepository: NlpValueRepository;
|
let nlpValueRepository: NlpValueRepository;
|
||||||
let firstNameNlpEntity: NlpEntity;
|
let firstNameNlpEntity: NlpEntity | null;
|
||||||
|
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
const module: TestingModule = await Test.createTestingModule({
|
const module: TestingModule = await Test.createTestingModule({
|
||||||
@ -66,12 +66,12 @@ describe('NlpEntityRepository', () => {
|
|||||||
const intentNlpEntity = await nlpEntityRepository.findOne({
|
const intentNlpEntity = await nlpEntityRepository.findOne({
|
||||||
name: 'intent',
|
name: 'intent',
|
||||||
});
|
});
|
||||||
const result = await nlpEntityRepository.deleteOne(intentNlpEntity.id);
|
const result = await nlpEntityRepository.deleteOne(intentNlpEntity!.id);
|
||||||
|
|
||||||
expect(result.deletedCount).toEqual(1);
|
expect(result.deletedCount).toEqual(1);
|
||||||
|
|
||||||
const intentNlpValues = await nlpValueRepository.find({
|
const intentNlpValues = await nlpValueRepository.find({
|
||||||
entity: intentNlpEntity.id,
|
entity: intentNlpEntity!.id,
|
||||||
});
|
});
|
||||||
|
|
||||||
expect(intentNlpValues.length).toEqual(0);
|
expect(intentNlpValues.length).toEqual(0);
|
||||||
@ -81,10 +81,10 @@ describe('NlpEntityRepository', () => {
|
|||||||
describe('findOneAndPopulate', () => {
|
describe('findOneAndPopulate', () => {
|
||||||
it('should return a nlp entity with populate', async () => {
|
it('should return a nlp entity with populate', async () => {
|
||||||
const firstNameValues = await nlpValueRepository.find({
|
const firstNameValues = await nlpValueRepository.find({
|
||||||
entity: firstNameNlpEntity.id,
|
entity: firstNameNlpEntity!.id,
|
||||||
});
|
});
|
||||||
const result = await nlpEntityRepository.findOneAndPopulate(
|
const result = await nlpEntityRepository.findOneAndPopulate(
|
||||||
firstNameNlpEntity.id,
|
firstNameNlpEntity!.id,
|
||||||
);
|
);
|
||||||
expect(result).toEqualPayload({
|
expect(result).toEqualPayload({
|
||||||
...nlpEntityFixtures[1],
|
...nlpEntityFixtures[1],
|
||||||
@ -99,15 +99,15 @@ describe('NlpEntityRepository', () => {
|
|||||||
sort: ['name', 'desc'],
|
sort: ['name', 'desc'],
|
||||||
});
|
});
|
||||||
const firstNameValues = await nlpValueRepository.find({
|
const firstNameValues = await nlpValueRepository.find({
|
||||||
entity: firstNameNlpEntity.id,
|
entity: firstNameNlpEntity!.id,
|
||||||
});
|
});
|
||||||
const result = await nlpEntityRepository.findPageAndPopulate(
|
const result = await nlpEntityRepository.findPageAndPopulate(
|
||||||
{ _id: firstNameNlpEntity.id },
|
{ _id: firstNameNlpEntity!.id },
|
||||||
pageQuery,
|
pageQuery,
|
||||||
);
|
);
|
||||||
expect(result).toEqualPayload([
|
expect(result).toEqualPayload([
|
||||||
{
|
{
|
||||||
id: firstNameNlpEntity.id,
|
id: firstNameNlpEntity!.id,
|
||||||
...nlpEntityFixtures[1],
|
...nlpEntityFixtures[1],
|
||||||
values: firstNameValues,
|
values: firstNameValues,
|
||||||
},
|
},
|
||||||
|
|||||||
@ -23,14 +23,16 @@ import {
|
|||||||
closeInMongodConnection,
|
closeInMongodConnection,
|
||||||
rootMongooseTestModule,
|
rootMongooseTestModule,
|
||||||
} from '@/utils/test/test';
|
} from '@/utils/test/test';
|
||||||
|
import { TFixtures } from '@/utils/test/types';
|
||||||
|
|
||||||
import { NlpEntityModel, NlpEntity } from '../schemas/nlp-entity.schema';
|
import { NlpEntity, NlpEntityModel } from '../schemas/nlp-entity.schema';
|
||||||
import {
|
import {
|
||||||
NlpSampleEntityModel,
|
|
||||||
NlpSampleEntity,
|
NlpSampleEntity,
|
||||||
|
NlpSampleEntityFull,
|
||||||
|
NlpSampleEntityModel,
|
||||||
} from '../schemas/nlp-sample-entity.schema';
|
} from '../schemas/nlp-sample-entity.schema';
|
||||||
import { NlpSampleModel } from '../schemas/nlp-sample.schema';
|
import { NlpSampleModel } from '../schemas/nlp-sample.schema';
|
||||||
import { NlpValueModel } from '../schemas/nlp-value.schema';
|
import { NlpValueModel, NlpValueStub } from '../schemas/nlp-value.schema';
|
||||||
|
|
||||||
import { NlpEntityRepository } from './nlp-entity.repository';
|
import { NlpEntityRepository } from './nlp-entity.repository';
|
||||||
import { NlpSampleEntityRepository } from './nlp-sample-entity.repository';
|
import { NlpSampleEntityRepository } from './nlp-sample-entity.repository';
|
||||||
@ -91,7 +93,7 @@ describe('NlpSampleEntityRepository', () => {
|
|||||||
value: { ...nlpValueFixtures[0], entity: nlpEntities[0].id },
|
value: { ...nlpValueFixtures[0], entity: nlpEntities[0].id },
|
||||||
sample: {
|
sample: {
|
||||||
...nlpSampleFixtures[0],
|
...nlpSampleFixtures[0],
|
||||||
language: languages[nlpSampleFixtures[0].language].id,
|
language: languages[nlpSampleFixtures[0].language!].id,
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
@ -115,7 +117,7 @@ describe('NlpSampleEntityRepository', () => {
|
|||||||
acc.push(ValueWithEntities);
|
acc.push(ValueWithEntities);
|
||||||
return acc;
|
return acc;
|
||||||
},
|
},
|
||||||
[],
|
[] as TFixtures<NlpValueStub>[],
|
||||||
);
|
);
|
||||||
nlpValueFixturesWithEntities[2] = {
|
nlpValueFixturesWithEntities[2] = {
|
||||||
...nlpValueFixturesWithEntities[2],
|
...nlpValueFixturesWithEntities[2],
|
||||||
@ -135,7 +137,7 @@ describe('NlpSampleEntityRepository', () => {
|
|||||||
};
|
};
|
||||||
acc.push(sampleEntityWithPopulate);
|
acc.push(sampleEntityWithPopulate);
|
||||||
return acc;
|
return acc;
|
||||||
}, []);
|
}, [] as TFixtures<NlpSampleEntityFull>[]);
|
||||||
expect(result).toEqualPayload(nlpSampleEntityFixturesWithPopulate);
|
expect(result).toEqualPayload(nlpSampleEntityFixturesWithPopulate);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@ -19,12 +19,17 @@ import {
|
|||||||
closeInMongodConnection,
|
closeInMongodConnection,
|
||||||
rootMongooseTestModule,
|
rootMongooseTestModule,
|
||||||
} from '@/utils/test/test';
|
} from '@/utils/test/test';
|
||||||
|
import { TFixtures } from '@/utils/test/types';
|
||||||
|
|
||||||
import {
|
import {
|
||||||
NlpSampleEntityModel,
|
|
||||||
NlpSampleEntity,
|
NlpSampleEntity,
|
||||||
|
NlpSampleEntityModel,
|
||||||
} from '../schemas/nlp-sample-entity.schema';
|
} from '../schemas/nlp-sample-entity.schema';
|
||||||
import { NlpSampleModel, NlpSample } from '../schemas/nlp-sample.schema';
|
import {
|
||||||
|
NlpSample,
|
||||||
|
NlpSampleFull,
|
||||||
|
NlpSampleModel,
|
||||||
|
} from '../schemas/nlp-sample.schema';
|
||||||
|
|
||||||
import { NlpSampleEntityRepository } from './nlp-sample-entity.repository';
|
import { NlpSampleEntityRepository } from './nlp-sample-entity.repository';
|
||||||
import { NlpSampleRepository } from './nlp-sample.repository';
|
import { NlpSampleRepository } from './nlp-sample.repository';
|
||||||
@ -33,8 +38,8 @@ describe('NlpSampleRepository', () => {
|
|||||||
let nlpSampleRepository: NlpSampleRepository;
|
let nlpSampleRepository: NlpSampleRepository;
|
||||||
let nlpSampleEntityRepository: NlpSampleEntityRepository;
|
let nlpSampleEntityRepository: NlpSampleEntityRepository;
|
||||||
let languageRepository: LanguageRepository;
|
let languageRepository: LanguageRepository;
|
||||||
let nlpSampleEntity: NlpSampleEntity;
|
let nlpSampleEntity: NlpSampleEntity | null;
|
||||||
let noNlpSample: NlpSample;
|
let noNlpSample: NlpSample | null;
|
||||||
let languages: Language[];
|
let languages: Language[];
|
||||||
|
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
@ -61,7 +66,7 @@ describe('NlpSampleRepository', () => {
|
|||||||
languageRepository = module.get<LanguageRepository>(LanguageRepository);
|
languageRepository = module.get<LanguageRepository>(LanguageRepository);
|
||||||
noNlpSample = await nlpSampleRepository.findOne({ text: 'No' });
|
noNlpSample = await nlpSampleRepository.findOne({ text: 'No' });
|
||||||
nlpSampleEntity = await nlpSampleEntityRepository.findOne({
|
nlpSampleEntity = await nlpSampleEntityRepository.findOne({
|
||||||
sample: noNlpSample.id,
|
sample: noNlpSample!.id,
|
||||||
});
|
});
|
||||||
languages = await languageRepository.findAll();
|
languages = await languageRepository.findAll();
|
||||||
});
|
});
|
||||||
@ -75,12 +80,12 @@ describe('NlpSampleRepository', () => {
|
|||||||
describe('findOneAndPopulate', () => {
|
describe('findOneAndPopulate', () => {
|
||||||
it('should return a nlp Sample with populate', async () => {
|
it('should return a nlp Sample with populate', async () => {
|
||||||
const result = await nlpSampleRepository.findOneAndPopulate(
|
const result = await nlpSampleRepository.findOneAndPopulate(
|
||||||
noNlpSample.id,
|
noNlpSample!.id,
|
||||||
);
|
);
|
||||||
expect(result).toEqualPayload({
|
expect(result).toEqualPayload({
|
||||||
...nlpSampleFixtures[1],
|
...nlpSampleFixtures[1],
|
||||||
entities: [nlpSampleEntity],
|
entities: [nlpSampleEntity],
|
||||||
language: languages[nlpSampleFixtures[1].language],
|
language: languages[nlpSampleFixtures[1].language!],
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
@ -104,12 +109,13 @@ describe('NlpSampleRepository', () => {
|
|||||||
entities: nlpSampleEntities.filter((currSampleEntity) => {
|
entities: nlpSampleEntities.filter((currSampleEntity) => {
|
||||||
return currSampleEntity.sample === currSample.id;
|
return currSampleEntity.sample === currSample.id;
|
||||||
}),
|
}),
|
||||||
language: languages.find((lang) => currSample.language === lang.id),
|
language:
|
||||||
|
languages.find((lang) => currSample.language === lang.id) || null,
|
||||||
};
|
};
|
||||||
acc.push(sampleWithEntities);
|
acc.push(sampleWithEntities);
|
||||||
return acc;
|
return acc;
|
||||||
},
|
},
|
||||||
[],
|
[] as TFixtures<NlpSampleFull>[],
|
||||||
);
|
);
|
||||||
expect(result).toEqualPayload(nlpSampleFixturesWithEntities);
|
expect(result).toEqualPayload(nlpSampleFixturesWithEntities);
|
||||||
});
|
});
|
||||||
@ -130,10 +136,10 @@ describe('NlpSampleRepository', () => {
|
|||||||
|
|
||||||
describe('The deleteCascadeOne function', () => {
|
describe('The deleteCascadeOne function', () => {
|
||||||
it('should delete a nlp Sample', async () => {
|
it('should delete a nlp Sample', async () => {
|
||||||
const result = await nlpSampleRepository.deleteOne(noNlpSample.id);
|
const result = await nlpSampleRepository.deleteOne(noNlpSample!.id);
|
||||||
expect(result.deletedCount).toEqual(1);
|
expect(result.deletedCount).toEqual(1);
|
||||||
const sampleEntities = await nlpSampleEntityRepository.find({
|
const sampleEntities = await nlpSampleEntityRepository.find({
|
||||||
sample: noNlpSample.id,
|
sample: noNlpSample!.id,
|
||||||
});
|
});
|
||||||
expect(sampleEntities.length).toEqual(0);
|
expect(sampleEntities.length).toEqual(0);
|
||||||
});
|
});
|
||||||
|
|||||||
@ -18,10 +18,15 @@ import {
|
|||||||
closeInMongodConnection,
|
closeInMongodConnection,
|
||||||
rootMongooseTestModule,
|
rootMongooseTestModule,
|
||||||
} from '@/utils/test/test';
|
} from '@/utils/test/test';
|
||||||
|
import { TFixtures } from '@/utils/test/types';
|
||||||
|
|
||||||
import { NlpEntityModel } from '../schemas/nlp-entity.schema';
|
import { NlpEntityModel } from '../schemas/nlp-entity.schema';
|
||||||
import { NlpSampleEntityModel } from '../schemas/nlp-sample-entity.schema';
|
import { NlpSampleEntityModel } from '../schemas/nlp-sample-entity.schema';
|
||||||
import { NlpValue, NlpValueModel } from '../schemas/nlp-value.schema';
|
import {
|
||||||
|
NlpValue,
|
||||||
|
NlpValueFull,
|
||||||
|
NlpValueModel,
|
||||||
|
} from '../schemas/nlp-value.schema';
|
||||||
|
|
||||||
import { NlpSampleEntityRepository } from './nlp-sample-entity.repository';
|
import { NlpSampleEntityRepository } from './nlp-sample-entity.repository';
|
||||||
import { NlpValueRepository } from './nlp-value.repository';
|
import { NlpValueRepository } from './nlp-value.repository';
|
||||||
@ -81,12 +86,14 @@ describe('NlpValueRepository', () => {
|
|||||||
(acc, curr) => {
|
(acc, curr) => {
|
||||||
const ValueWithEntities = {
|
const ValueWithEntities = {
|
||||||
...curr,
|
...curr,
|
||||||
entity: nlpEntityFixtures[parseInt(curr.entity)],
|
entity: nlpEntityFixtures[
|
||||||
|
parseInt(curr.entity)
|
||||||
|
] as NlpValueFull['entity'],
|
||||||
};
|
};
|
||||||
acc.push(ValueWithEntities);
|
acc.push(ValueWithEntities);
|
||||||
return acc;
|
return acc;
|
||||||
},
|
},
|
||||||
[],
|
[] as TFixtures<NlpValueFull>[],
|
||||||
);
|
);
|
||||||
expect(result).toEqualPayload(nlpValueFixturesWithEntities);
|
expect(result).toEqualPayload(nlpValueFixturesWithEntities);
|
||||||
});
|
});
|
||||||
|
|||||||
@ -21,7 +21,7 @@ import {
|
|||||||
import { NlpEntityRepository } from '../repositories/nlp-entity.repository';
|
import { NlpEntityRepository } from '../repositories/nlp-entity.repository';
|
||||||
import { NlpSampleEntityRepository } from '../repositories/nlp-sample-entity.repository';
|
import { NlpSampleEntityRepository } from '../repositories/nlp-sample-entity.repository';
|
||||||
import { NlpValueRepository } from '../repositories/nlp-value.repository';
|
import { NlpValueRepository } from '../repositories/nlp-value.repository';
|
||||||
import { NlpEntityModel, NlpEntity } from '../schemas/nlp-entity.schema';
|
import { NlpEntity, NlpEntityModel } from '../schemas/nlp-entity.schema';
|
||||||
import { NlpSampleEntityModel } from '../schemas/nlp-sample-entity.schema';
|
import { NlpSampleEntityModel } from '../schemas/nlp-sample-entity.schema';
|
||||||
import { NlpValueModel } from '../schemas/nlp-value.schema';
|
import { NlpValueModel } from '../schemas/nlp-value.schema';
|
||||||
|
|
||||||
@ -69,7 +69,7 @@ describe('nlpEntityService', () => {
|
|||||||
name: 'intent',
|
name: 'intent',
|
||||||
});
|
});
|
||||||
const result = await nlpEntityService.deleteCascadeOne(
|
const result = await nlpEntityService.deleteCascadeOne(
|
||||||
intentNlpEntity.id,
|
intentNlpEntity!.id,
|
||||||
);
|
);
|
||||||
expect(result.deletedCount).toEqual(1);
|
expect(result.deletedCount).toEqual(1);
|
||||||
});
|
});
|
||||||
@ -81,13 +81,13 @@ describe('nlpEntityService', () => {
|
|||||||
name: 'first_name',
|
name: 'first_name',
|
||||||
});
|
});
|
||||||
const result = await nlpEntityService.findOneAndPopulate(
|
const result = await nlpEntityService.findOneAndPopulate(
|
||||||
firstNameNlpEntity.id,
|
firstNameNlpEntity!.id,
|
||||||
);
|
);
|
||||||
const firstNameValues = await nlpValueRepository.findOne({
|
const firstNameValues = await nlpValueRepository.findOne({
|
||||||
entity: firstNameNlpEntity.id,
|
entity: firstNameNlpEntity!.id,
|
||||||
});
|
});
|
||||||
const entityWithValues = {
|
const entityWithValues = {
|
||||||
id: firstNameNlpEntity.id,
|
id: firstNameNlpEntity!.id,
|
||||||
...nlpEntityFixtures[1],
|
...nlpEntityFixtures[1],
|
||||||
values: [firstNameValues],
|
values: [firstNameValues],
|
||||||
};
|
};
|
||||||
@ -102,15 +102,15 @@ describe('nlpEntityService', () => {
|
|||||||
name: 'first_name',
|
name: 'first_name',
|
||||||
});
|
});
|
||||||
const result = await nlpEntityService.findPageAndPopulate(
|
const result = await nlpEntityService.findPageAndPopulate(
|
||||||
{ _id: firstNameNlpEntity.id },
|
{ _id: firstNameNlpEntity!.id },
|
||||||
pageQuery,
|
pageQuery,
|
||||||
);
|
);
|
||||||
const firstNameValues = await nlpValueRepository.findOne({
|
const firstNameValues = await nlpValueRepository.findOne({
|
||||||
entity: firstNameNlpEntity.id,
|
entity: firstNameNlpEntity!.id,
|
||||||
});
|
});
|
||||||
const entitiesWithValues = [
|
const entitiesWithValues = [
|
||||||
{
|
{
|
||||||
id: firstNameNlpEntity.id,
|
id: firstNameNlpEntity!.id,
|
||||||
...nlpEntityFixtures[1],
|
...nlpEntityFixtures[1],
|
||||||
values: [firstNameValues],
|
values: [firstNameValues],
|
||||||
},
|
},
|
||||||
@ -139,12 +139,12 @@ describe('nlpEntityService', () => {
|
|||||||
const deValue = await nlpValueRepository.findOne({ value: 'de' });
|
const deValue = await nlpValueRepository.findOne({ value: 'de' });
|
||||||
const storedEntites = [
|
const storedEntites = [
|
||||||
{
|
{
|
||||||
entity: intentEntity.id,
|
entity: intentEntity!.id,
|
||||||
value: nameValue.id,
|
value: nameValue!.id,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
entity: languageEntity.id,
|
entity: languageEntity!.id,
|
||||||
value: deValue.id,
|
value: deValue!.id,
|
||||||
},
|
},
|
||||||
];
|
];
|
||||||
|
|
||||||
|
|||||||
@ -23,17 +23,23 @@ import {
|
|||||||
closeInMongodConnection,
|
closeInMongodConnection,
|
||||||
rootMongooseTestModule,
|
rootMongooseTestModule,
|
||||||
} from '@/utils/test/test';
|
} from '@/utils/test/test';
|
||||||
|
import { TFixtures } from '@/utils/test/types';
|
||||||
|
|
||||||
import { NlpEntityRepository } from '../repositories/nlp-entity.repository';
|
import { NlpEntityRepository } from '../repositories/nlp-entity.repository';
|
||||||
import { NlpSampleEntityRepository } from '../repositories/nlp-sample-entity.repository';
|
import { NlpSampleEntityRepository } from '../repositories/nlp-sample-entity.repository';
|
||||||
import { NlpValueRepository } from '../repositories/nlp-value.repository';
|
import { NlpValueRepository } from '../repositories/nlp-value.repository';
|
||||||
import { NlpEntityModel, NlpEntity } from '../schemas/nlp-entity.schema';
|
import { NlpEntity, NlpEntityModel } from '../schemas/nlp-entity.schema';
|
||||||
import {
|
import {
|
||||||
NlpSampleEntityModel,
|
|
||||||
NlpSampleEntity,
|
NlpSampleEntity,
|
||||||
|
NlpSampleEntityFull,
|
||||||
|
NlpSampleEntityModel,
|
||||||
} from '../schemas/nlp-sample-entity.schema';
|
} from '../schemas/nlp-sample-entity.schema';
|
||||||
import { NlpSample, NlpSampleModel } from '../schemas/nlp-sample.schema';
|
import { NlpSample, NlpSampleModel } from '../schemas/nlp-sample.schema';
|
||||||
import { NlpValue, NlpValueModel } from '../schemas/nlp-value.schema';
|
import {
|
||||||
|
NlpValue,
|
||||||
|
NlpValueModel,
|
||||||
|
NlpValueStub,
|
||||||
|
} from '../schemas/nlp-value.schema';
|
||||||
|
|
||||||
import { NlpEntityService } from './nlp-entity.service';
|
import { NlpEntityService } from './nlp-entity.service';
|
||||||
import { NlpSampleEntityService } from './nlp-sample-entity.service';
|
import { NlpSampleEntityService } from './nlp-sample-entity.service';
|
||||||
@ -91,9 +97,7 @@ describe('NlpSampleEntityService', () => {
|
|||||||
languages = await languageRepository.findAll();
|
languages = await languageRepository.findAll();
|
||||||
});
|
});
|
||||||
|
|
||||||
afterAll(async () => {
|
afterAll(closeInMongodConnection);
|
||||||
await closeInMongodConnection();
|
|
||||||
});
|
|
||||||
|
|
||||||
afterEach(jest.clearAllMocks);
|
afterEach(jest.clearAllMocks);
|
||||||
|
|
||||||
@ -108,7 +112,7 @@ describe('NlpSampleEntityService', () => {
|
|||||||
value: { ...nlpValueFixtures[0], entity: nlpEntities[0].id },
|
value: { ...nlpValueFixtures[0], entity: nlpEntities[0].id },
|
||||||
sample: {
|
sample: {
|
||||||
...nlpSampleFixtures[0],
|
...nlpSampleFixtures[0],
|
||||||
language: languages[nlpSampleFixtures[0].language].id,
|
language: languages[nlpSampleFixtures[0].language!].id,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
expect(result).toEqualPayload(sampleEntityWithPopulate);
|
expect(result).toEqualPayload(sampleEntityWithPopulate);
|
||||||
@ -133,7 +137,7 @@ describe('NlpSampleEntityService', () => {
|
|||||||
acc.push(ValueWithEntities);
|
acc.push(ValueWithEntities);
|
||||||
return acc;
|
return acc;
|
||||||
},
|
},
|
||||||
[],
|
[] as TFixtures<NlpValueStub>[],
|
||||||
);
|
);
|
||||||
nlpValueFixturesWithEntities[2] = {
|
nlpValueFixturesWithEntities[2] = {
|
||||||
...nlpValueFixturesWithEntities[2],
|
...nlpValueFixturesWithEntities[2],
|
||||||
@ -153,7 +157,7 @@ describe('NlpSampleEntityService', () => {
|
|||||||
};
|
};
|
||||||
acc.push(sampleEntityWithPopulate);
|
acc.push(sampleEntityWithPopulate);
|
||||||
return acc;
|
return acc;
|
||||||
}, []);
|
}, [] as TFixtures<NlpSampleEntityFull>[]);
|
||||||
expect(result).toEqualPayload(nlpSampleEntityFixturesWithPopulate);
|
expect(result).toEqualPayload(nlpSampleEntityFixturesWithPopulate);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@ -33,7 +33,11 @@ import {
|
|||||||
NlpSampleEntity,
|
NlpSampleEntity,
|
||||||
NlpSampleEntityModel,
|
NlpSampleEntityModel,
|
||||||
} from '../schemas/nlp-sample-entity.schema';
|
} from '../schemas/nlp-sample-entity.schema';
|
||||||
import { NlpSample, NlpSampleModel } from '../schemas/nlp-sample.schema';
|
import {
|
||||||
|
NlpSample,
|
||||||
|
NlpSampleFull,
|
||||||
|
NlpSampleModel,
|
||||||
|
} from '../schemas/nlp-sample.schema';
|
||||||
import { NlpValueModel } from '../schemas/nlp-value.schema';
|
import { NlpValueModel } from '../schemas/nlp-value.schema';
|
||||||
|
|
||||||
import { NlpEntityService } from './nlp-entity.service';
|
import { NlpEntityService } from './nlp-entity.service';
|
||||||
@ -49,8 +53,8 @@ describe('NlpSampleService', () => {
|
|||||||
let nlpSampleEntityRepository: NlpSampleEntityRepository;
|
let nlpSampleEntityRepository: NlpSampleEntityRepository;
|
||||||
let nlpSampleRepository: NlpSampleRepository;
|
let nlpSampleRepository: NlpSampleRepository;
|
||||||
let languageRepository: LanguageRepository;
|
let languageRepository: LanguageRepository;
|
||||||
let noNlpSample: NlpSample;
|
let noNlpSample: NlpSample | null;
|
||||||
let nlpSampleEntity: NlpSampleEntity;
|
let nlpSampleEntity: NlpSampleEntity | null;
|
||||||
let languages: Language[];
|
let languages: Language[];
|
||||||
|
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
@ -104,7 +108,7 @@ describe('NlpSampleService', () => {
|
|||||||
languageRepository = module.get<LanguageRepository>(LanguageRepository);
|
languageRepository = module.get<LanguageRepository>(LanguageRepository);
|
||||||
noNlpSample = await nlpSampleService.findOne({ text: 'No' });
|
noNlpSample = await nlpSampleService.findOne({ text: 'No' });
|
||||||
nlpSampleEntity = await nlpSampleEntityRepository.findOne({
|
nlpSampleEntity = await nlpSampleEntityRepository.findOne({
|
||||||
sample: noNlpSample.id,
|
sample: noNlpSample!.id,
|
||||||
});
|
});
|
||||||
languages = await languageRepository.findAll();
|
languages = await languageRepository.findAll();
|
||||||
});
|
});
|
||||||
@ -117,11 +121,11 @@ describe('NlpSampleService', () => {
|
|||||||
|
|
||||||
describe('findOneAndPopulate', () => {
|
describe('findOneAndPopulate', () => {
|
||||||
it('should return a nlp Sample with populate', async () => {
|
it('should return a nlp Sample with populate', async () => {
|
||||||
const result = await nlpSampleService.findOneAndPopulate(noNlpSample.id);
|
const result = await nlpSampleService.findOneAndPopulate(noNlpSample!.id);
|
||||||
const sampleWithEntities = {
|
const sampleWithEntities = {
|
||||||
...nlpSampleFixtures[1],
|
...nlpSampleFixtures[1],
|
||||||
entities: [nlpSampleEntity],
|
entities: [nlpSampleEntity],
|
||||||
language: languages[nlpSampleFixtures[1].language],
|
language: languages[nlpSampleFixtures[1].language!],
|
||||||
};
|
};
|
||||||
expect(result).toEqualPayload(sampleWithEntities);
|
expect(result).toEqualPayload(sampleWithEntities);
|
||||||
});
|
});
|
||||||
@ -141,12 +145,13 @@ describe('NlpSampleService', () => {
|
|||||||
entities: nlpSampleEntities.filter((currSampleEntity) => {
|
entities: nlpSampleEntities.filter((currSampleEntity) => {
|
||||||
return currSampleEntity.sample === currSample.id;
|
return currSampleEntity.sample === currSample.id;
|
||||||
}),
|
}),
|
||||||
language: languages.find((lang) => lang.id === currSample.language),
|
language:
|
||||||
|
languages.find((lang) => lang.id === currSample.language) || null,
|
||||||
};
|
};
|
||||||
acc.push(sampleWithEntities);
|
acc.push(sampleWithEntities);
|
||||||
return acc;
|
return acc;
|
||||||
},
|
},
|
||||||
[],
|
[] as NlpSampleFull[],
|
||||||
);
|
);
|
||||||
expect(result).toEqualPayload(nlpSampleFixturesWithEntities);
|
expect(result).toEqualPayload(nlpSampleFixturesWithEntities);
|
||||||
});
|
});
|
||||||
@ -167,7 +172,7 @@ describe('NlpSampleService', () => {
|
|||||||
|
|
||||||
describe('The deleteCascadeOne function', () => {
|
describe('The deleteCascadeOne function', () => {
|
||||||
it('should delete a nlp Sample', async () => {
|
it('should delete a nlp Sample', async () => {
|
||||||
const result = await nlpSampleService.deleteOne(noNlpSample.id);
|
const result = await nlpSampleService.deleteOne(noNlpSample!.id);
|
||||||
expect(result.deletedCount).toEqual(1);
|
expect(result.deletedCount).toEqual(1);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@ -10,6 +10,7 @@ import { EventEmitter2 } from '@nestjs/event-emitter';
|
|||||||
import { MongooseModule } from '@nestjs/mongoose';
|
import { MongooseModule } from '@nestjs/mongoose';
|
||||||
import { Test, TestingModule } from '@nestjs/testing';
|
import { Test, TestingModule } from '@nestjs/testing';
|
||||||
|
|
||||||
|
import { BaseSchema } from '@/utils/generics/base-schema';
|
||||||
import { nlpEntityFixtures } from '@/utils/test/fixtures/nlpentity';
|
import { nlpEntityFixtures } from '@/utils/test/fixtures/nlpentity';
|
||||||
import {
|
import {
|
||||||
installNlpValueFixtures,
|
installNlpValueFixtures,
|
||||||
@ -26,7 +27,11 @@ import { NlpSampleEntityRepository } from '../repositories/nlp-sample-entity.rep
|
|||||||
import { NlpValueRepository } from '../repositories/nlp-value.repository';
|
import { NlpValueRepository } from '../repositories/nlp-value.repository';
|
||||||
import { NlpEntity, NlpEntityModel } from '../schemas/nlp-entity.schema';
|
import { NlpEntity, NlpEntityModel } from '../schemas/nlp-entity.schema';
|
||||||
import { NlpSampleEntityModel } from '../schemas/nlp-sample-entity.schema';
|
import { NlpSampleEntityModel } from '../schemas/nlp-sample-entity.schema';
|
||||||
import { NlpValue, NlpValueModel } from '../schemas/nlp-value.schema';
|
import {
|
||||||
|
NlpValue,
|
||||||
|
NlpValueFull,
|
||||||
|
NlpValueModel,
|
||||||
|
} from '../schemas/nlp-value.schema';
|
||||||
|
|
||||||
import { NlpEntityService } from './nlp-entity.service';
|
import { NlpEntityService } from './nlp-entity.service';
|
||||||
import { NlpValueService } from './nlp-value.service';
|
import { NlpValueService } from './nlp-value.service';
|
||||||
@ -89,12 +94,14 @@ describe('NlpValueService', () => {
|
|||||||
(acc, curr) => {
|
(acc, curr) => {
|
||||||
const ValueWithEntities = {
|
const ValueWithEntities = {
|
||||||
...curr,
|
...curr,
|
||||||
entity: nlpEntityFixtures[parseInt(curr.entity)],
|
entity: nlpEntityFixtures[
|
||||||
|
parseInt(curr.entity)
|
||||||
|
] as NlpValueFull['entity'],
|
||||||
};
|
};
|
||||||
acc.push(ValueWithEntities);
|
acc.push(ValueWithEntities);
|
||||||
return acc;
|
return acc;
|
||||||
},
|
},
|
||||||
[],
|
[] as Omit<NlpValueFull, keyof BaseSchema>[],
|
||||||
);
|
);
|
||||||
expect(result).toEqualPayload(nlpValueFixturesWithEntities);
|
expect(result).toEqualPayload(nlpValueFixturesWithEntities);
|
||||||
});
|
});
|
||||||
@ -130,12 +137,12 @@ describe('NlpValueService', () => {
|
|||||||
const jhonValue = await nlpValueRepository.findOne({ value: 'jhon' });
|
const jhonValue = await nlpValueRepository.findOne({ value: 'jhon' });
|
||||||
const storedValues = [
|
const storedValues = [
|
||||||
{
|
{
|
||||||
entity: intentEntity.id,
|
entity: intentEntity!.id,
|
||||||
value: greetingValue.id,
|
value: greetingValue!.id,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
entity: firstNameEntity.id,
|
entity: firstNameEntity!.id,
|
||||||
value: jhonValue.id,
|
value: jhonValue!.id,
|
||||||
},
|
},
|
||||||
];
|
];
|
||||||
|
|
||||||
|
|||||||
@ -123,7 +123,7 @@ export class NlpValueService extends BaseService<
|
|||||||
if ('start' in e && 'end' in e) {
|
if ('start' in e && 'end' in e) {
|
||||||
const word = sampleText.slice(e.start, e.end);
|
const word = sampleText.slice(e.start, e.end);
|
||||||
return (
|
return (
|
||||||
word !== e.value && vMap[e.value].expressions.indexOf(word) === -1
|
word !== e.value && vMap[e.value].expressions?.indexOf(word) === -1
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
@ -131,7 +131,7 @@ export class NlpValueService extends BaseService<
|
|||||||
.map((e) => {
|
.map((e) => {
|
||||||
return this.updateOne(vMap[e.value].id, {
|
return this.updateOne(vMap[e.value].id, {
|
||||||
...vMap[e.value],
|
...vMap[e.value],
|
||||||
expressions: vMap[e.value].expressions.concat([
|
expressions: vMap[e.value].expressions?.concat([
|
||||||
sampleText.slice(e.start, e.end),
|
sampleText.slice(e.start, e.end),
|
||||||
]),
|
]),
|
||||||
} as NlpValueUpdateDto);
|
} as NlpValueUpdateDto);
|
||||||
@ -202,9 +202,11 @@ export class NlpValueService extends BaseService<
|
|||||||
const promises = valuesToAdd.map(async (v) => {
|
const promises = valuesToAdd.map(async (v) => {
|
||||||
const createdOrFound = await this.findOneOrCreate({ value: v.value }, v);
|
const createdOrFound = await this.findOneOrCreate({ value: v.value }, v);
|
||||||
// If value is found in database, then update it's synonyms
|
// If value is found in database, then update it's synonyms
|
||||||
const expressions = createdOrFound.expressions
|
const expressions = v.expressions
|
||||||
.concat(v.expressions) // Add new synonyms
|
? createdOrFound.expressions
|
||||||
.filter((v, i, a) => a.indexOf(v) === i); // Filter unique values
|
?.concat(v.expressions) // Add new synonyms
|
||||||
|
.filter((v, i, a) => a.indexOf(v) === i)
|
||||||
|
: createdOrFound.expressions?.filter((v, i, a) => a.indexOf(v) === i); // Filter unique values
|
||||||
|
|
||||||
// Update expressions
|
// Update expressions
|
||||||
const result = await this.updateOne({ value: v.value }, { expressions });
|
const result = await this.updateOne({ value: v.value }, { expressions });
|
||||||
|
|||||||
@ -31,7 +31,7 @@ export class SearchFilterPipe<T>
|
|||||||
private readonly props: {
|
private readonly props: {
|
||||||
allowedFields: TFilterNestedKeysOfType<
|
allowedFields: TFilterNestedKeysOfType<
|
||||||
T,
|
T,
|
||||||
undefined | string | string[]
|
null | undefined | string | string[]
|
||||||
>[];
|
>[];
|
||||||
},
|
},
|
||||||
) {}
|
) {}
|
||||||
@ -48,7 +48,10 @@ export class SearchFilterPipe<T>
|
|||||||
private isAllowedField(field: string) {
|
private isAllowedField(field: string) {
|
||||||
if (
|
if (
|
||||||
this.props.allowedFields.includes(
|
this.props.allowedFields.includes(
|
||||||
field as TFilterNestedKeysOfType<T, undefined | string | string[]>,
|
field as TFilterNestedKeysOfType<
|
||||||
|
T,
|
||||||
|
null | undefined | string | string[]
|
||||||
|
>,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
return true;
|
return true;
|
||||||
|
|||||||
4
api/src/utils/test/fixtures/nlpsample.ts
vendored
4
api/src/utils/test/fixtures/nlpsample.ts
vendored
@ -9,7 +9,7 @@
|
|||||||
import mongoose from 'mongoose';
|
import mongoose from 'mongoose';
|
||||||
|
|
||||||
import { NlpSampleCreateDto } from '@/nlp/dto/nlp-sample.dto';
|
import { NlpSampleCreateDto } from '@/nlp/dto/nlp-sample.dto';
|
||||||
import { NlpSampleModel, NlpSample } from '@/nlp/schemas/nlp-sample.schema';
|
import { NlpSample, NlpSampleModel } from '@/nlp/schemas/nlp-sample.schema';
|
||||||
import { NlpSampleState } from '@/nlp/schemas/types';
|
import { NlpSampleState } from '@/nlp/schemas/types';
|
||||||
|
|
||||||
import { getFixturesWithDefaultValues } from '../defaultValues';
|
import { getFixturesWithDefaultValues } from '../defaultValues';
|
||||||
@ -56,7 +56,7 @@ export const installNlpSampleFixtures = async () => {
|
|||||||
nlpSampleFixtures.map((v) => {
|
nlpSampleFixtures.map((v) => {
|
||||||
return {
|
return {
|
||||||
...v,
|
...v,
|
||||||
language: languages[parseInt(v.language)].id,
|
language: v.language ? languages[parseInt(v.language)].id : null,
|
||||||
};
|
};
|
||||||
}),
|
}),
|
||||||
);
|
);
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user