From ad684676a7161c3914a698d3a03df4a758593f6c Mon Sep 17 00:00:00 2001 From: Mohamed Marrouchi Date: Wed, 4 Jun 2025 17:25:47 +0100 Subject: [PATCH 1/7] feat: add support to filter samples by entities/values --- .../nlp/controllers/nlp-sample.controller.ts | 36 +++- .../nlp/repositories/nlp-sample.repository.ts | 170 +++++++++++++++++- api/src/nlp/services/nlp-entity.service.ts | 15 ++ api/src/nlp/services/nlp-sample.service.ts | 66 ++++++- api/src/nlp/services/nlp-value.service.ts | 15 ++ api/src/utils/generics/base-repository.ts | 45 ++++- .../inputs/NlpPatternSelect.tsx | 23 ++- .../components/nlp/components/NlpSample.tsx | 24 ++- .../form/inputs/triggers/PatternInput.tsx | 1 + 9 files changed, 383 insertions(+), 12 deletions(-) diff --git a/api/src/nlp/controllers/nlp-sample.controller.ts b/api/src/nlp/controllers/nlp-sample.controller.ts index 949e8e86..a94de961 100644 --- a/api/src/nlp/controllers/nlp-sample.controller.ts +++ b/api/src/nlp/controllers/nlp-sample.controller.ts @@ -29,17 +29,21 @@ import { import { FileInterceptor } from '@nestjs/platform-express'; import { CsrfCheck } from '@tekuconcept/nestjs-csrf'; import { Response } from 'express'; +import { z } from 'zod'; +import { NlpPattern, nlpPatternSchema } from '@/chat/schemas/types/pattern'; import { HelperService } from '@/helper/helper.service'; import { HelperType } from '@/helper/types'; import { LanguageService } from '@/i18n/services/language.service'; import { CsrfInterceptor } from '@/interceptors/csrf.interceptor'; +import { Roles } from '@/utils/decorators/roles.decorator'; import { BaseController } from '@/utils/generics/base-controller'; import { DeleteResult } from '@/utils/generics/base-repository'; import { PageQueryDto } from '@/utils/pagination/pagination-query.dto'; import { PageQueryPipe } from '@/utils/pagination/pagination-query.pipe'; import { PopulatePipe } from '@/utils/pipes/populate.pipe'; import { SearchFilterPipe } from '@/utils/pipes/search-filter.pipe'; +import { ZodQueryParamPipe } from '@/utils/pipes/zod.pipe'; import { TFilterQuery } from '@/utils/types/filter.types'; import { NlpSampleDto, TNlpSampleDto } from '../dto/nlp-sample.dto'; @@ -177,6 +181,7 @@ export class NlpSampleController extends BaseController< * * @returns The count of samples that match the filters. */ + @Roles('public') @Get('count') async filterCount( @Query( @@ -184,8 +189,18 @@ export class NlpSampleController extends BaseController< allowedFields: ['text', 'type', 'language'], }), ) - filters?: TFilterQuery, + filters: TFilterQuery = {}, + @Query( + new ZodQueryParamPipe( + z.array(nlpPatternSchema), + (q) => q?.where?.patterns, + ), + ) + patterns: NlpPattern[] = [], ) { + if (patterns.length) { + return await this.nlpSampleService.countByPatterns({ filters, patterns }); + } return await this.count(filters); } @@ -276,6 +291,7 @@ export class NlpSampleController extends BaseController< * @returns A paginated list of NLP samples. */ @Get() + @Roles('public') async findPage( @Query(PageQueryPipe) pageQuery: PageQueryDto, @Query(PopulatePipe) populate: string[], @@ -285,7 +301,25 @@ export class NlpSampleController extends BaseController< }), ) filters: TFilterQuery, + @Query( + new ZodQueryParamPipe( + z.array(nlpPatternSchema), + (q) => q?.where?.patterns, + ), + ) + patterns: NlpPattern[] = [], ) { + if (patterns.length) { + return this.canPopulate(populate) + ? await this.nlpSampleService.findByPatternsAndPopulate( + { filters, patterns }, + pageQuery, + ) + : await this.nlpSampleService.findByPatterns( + { filters, patterns }, + pageQuery, + ); + } return this.canPopulate(populate) ? await this.nlpSampleService.findAndPopulate(filters, pageQuery) : await this.nlpSampleService.find(filters, pageQuery); diff --git a/api/src/nlp/repositories/nlp-sample.repository.ts b/api/src/nlp/repositories/nlp-sample.repository.ts index 9da6eab3..25e51fb9 100644 --- a/api/src/nlp/repositories/nlp-sample.repository.ts +++ b/api/src/nlp/repositories/nlp-sample.repository.ts @@ -8,15 +8,27 @@ import { Injectable } from '@nestjs/common'; import { InjectModel } from '@nestjs/mongoose'; -import { Document, Model, Query } from 'mongoose'; +import { plainToClass } from 'class-transformer'; +import { + Aggregate, + Document, + Model, + PipelineStage, + ProjectionType, + Query, + Types, +} from 'mongoose'; import { BaseRepository, DeleteResult } from '@/utils/generics/base-repository'; +import { PageQueryDto } from '@/utils/pagination/pagination-query.dto'; import { TFilterQuery } from '@/utils/types/filter.types'; import { TNlpSampleDto } from '../dto/nlp-sample.dto'; +import { NlpSampleEntity } from '../schemas/nlp-sample-entity.schema'; import { NLP_SAMPLE_POPULATE, NlpSample, + NlpSampleDocument, NlpSampleFull, NlpSamplePopulate, } from '../schemas/nlp-sample.schema'; @@ -32,11 +44,167 @@ export class NlpSampleRepository extends BaseRepository< > { constructor( @InjectModel(NlpSample.name) readonly model: Model, + @InjectModel(NlpSampleEntity.name) + readonly sampleEntityModel: Model, private readonly nlpSampleEntityRepository: NlpSampleEntityRepository, ) { super(model, NlpSample, NLP_SAMPLE_POPULATE, NlpSampleFull); } + buildFindByEntitiesStages({ + filters, + entityIds, + valueIds, + }: { + filters: TFilterQuery; + entityIds: Types.ObjectId[]; + valueIds: Types.ObjectId[]; + }): PipelineStage[] { + return [ + // pick link docs whose entity / value matches a pattern + { + $match: { + ...(entityIds.length && { entity: { $in: entityIds } }), + ...(valueIds.length && { value: { $in: valueIds } }), + }, + }, + + // join to the real sample *and* apply sample-side filters early + { + $lookup: { + from: 'nlpsamples', + let: { sampleId: '$sample' }, + pipeline: [ + { + $match: { + $expr: { $eq: ['$_id', '$$sampleId'] }, + ...(filters?.$and + ? { + $and: filters.$and?.map((condition) => { + if ('language' in condition && condition.language) { + return { + language: new Types.ObjectId(condition.language), + }; + } + return condition; + }), + } + : {}), + }, + }, + ], + as: 'sample', + }, + }, + { $unwind: '$sample' }, + ]; + } + + findByEntitiesAggregation( + criterias: { + filters: TFilterQuery; + entityIds: Types.ObjectId[]; + valueIds: Types.ObjectId[]; + }, + page?: PageQueryDto, + projection?: ProjectionType, + ): Aggregate { + return this.sampleEntityModel.aggregate([ + ...this.buildFindByEntitiesStages(criterias), + + // promote the sample document + { $replaceRoot: { newRoot: '$sample' } }, + + // sort / skip / limit + ...this.buildPaginationPipelineStages(page), + + // projection + ...(projection + ? [ + { + $project: + typeof projection === 'string' + ? { [projection]: 1 } + : projection, + }, + ] + : []), + ]); + } + + async findByEntities( + criterias: { + filters: TFilterQuery; + entityIds: Types.ObjectId[]; + valueIds: Types.ObjectId[]; + }, + page?: PageQueryDto, + projection?: ProjectionType, + ): Promise { + const aggregation = this.findByEntitiesAggregation( + criterias, + page, + projection, + ); + + const resultSet = await aggregation.exec(); + return resultSet.map((doc) => + plainToClass(NlpSample, doc, this.transformOpts), + ); + } + + async findByEntitiesAndPopulate( + criterias: { + filters: TFilterQuery; + entityIds: Types.ObjectId[]; + valueIds: Types.ObjectId[]; + }, + page?: PageQueryDto, + projection?: ProjectionType, + ): Promise { + const aggregation = this.findByEntitiesAggregation( + criterias, + page, + projection, + ); + + const docs = await aggregation.exec(); + + const populatedResultSet = await this.populate(docs); + + return populatedResultSet.map((doc) => + plainToClass(NlpSampleFull, doc, this.transformOpts), + ); + } + + countByEntitiesAggregation(criterias: { + filters: TFilterQuery; + entityIds: Types.ObjectId[]; + valueIds: Types.ObjectId[]; + }): Aggregate<{ count: number }[]> { + return this.sampleEntityModel.aggregate<{ count: number }>([ + ...this.buildFindByEntitiesStages(criterias), + + // Collapse duplicates: one bucket per unique sample + { $group: { _id: '$sample._id' } }, + + // Final count + { $count: 'count' }, + ]); + } + + async countByEntities(criterias: { + filters: TFilterQuery; + entityIds: Types.ObjectId[]; + valueIds: Types.ObjectId[]; + }): Promise<{ count: number }> { + const aggregation = this.countByEntitiesAggregation(criterias); + + const [result] = await aggregation.exec(); + + return { count: result?.count || 0 }; + } + /** * Deletes NLP sample entities associated with the provided criteria before deleting the sample itself. * diff --git a/api/src/nlp/services/nlp-entity.service.ts b/api/src/nlp/services/nlp-entity.service.ts index 0876f3c1..897e841f 100644 --- a/api/src/nlp/services/nlp-entity.service.ts +++ b/api/src/nlp/services/nlp-entity.service.ts @@ -10,7 +10,9 @@ import { CACHE_MANAGER } from '@nestjs/cache-manager'; import { BadRequestException, Inject, Injectable } from '@nestjs/common'; import { OnEvent } from '@nestjs/event-emitter'; import { Cache } from 'cache-manager'; +import { Types } from 'mongoose'; +import { NlpPattern } from '@/chat/schemas/types/pattern'; import { NLP_MAP_CACHE_KEY } from '@/utils/constants/cache'; import { Cacheable } from '@/utils/decorators/cacheable.decorator'; import { BaseService } from '@/utils/generics/base-service'; @@ -72,6 +74,19 @@ export class NlpEntityService extends BaseService< return await this.repository.updateOne(id, { weight: updatedWeight }); } + async findObjectIdsByPatterns(patterns: NlpPattern[]) { + // resolve pattern → ids (kept here because it uses other services) + return ( + await this.find({ + name: { + $in: patterns + .filter((p) => p.match === 'entity') + .map((p) => p.entity), + }, + }) + ).map((e) => new Types.ObjectId(e.id)); + } + /** * Stores new entities based on the sample text and sample entities. * Deletes all values relative to this entity before deleting the entity itself. diff --git a/api/src/nlp/services/nlp-sample.service.ts b/api/src/nlp/services/nlp-sample.service.ts index 665f8cc7..700e508b 100644 --- a/api/src/nlp/services/nlp-sample.service.ts +++ b/api/src/nlp/services/nlp-sample.service.ts @@ -12,14 +12,16 @@ import { NotFoundException, } from '@nestjs/common'; import { OnEvent } from '@nestjs/event-emitter'; -import { Document, Query } from 'mongoose'; +import { Document, ProjectionType, Query } from 'mongoose'; import Papa from 'papaparse'; import { Message } from '@/chat/schemas/message.schema'; +import { NlpPattern } from '@/chat/schemas/types/pattern'; import { Language } from '@/i18n/schemas/language.schema'; import { LanguageService } from '@/i18n/services/language.service'; import { DeleteResult } from '@/utils/generics/base-repository'; import { BaseService } from '@/utils/generics/base-service'; +import { PageQueryDto } from '@/utils/pagination/pagination-query.dto'; import { TFilterQuery, THydratedDocument } from '@/utils/types/filter.types'; import { NlpSampleEntityCreateDto } from '../dto/nlp-sample-entity.dto'; @@ -35,6 +37,7 @@ import { NlpSampleEntityValue, NlpSampleState } from '../schemas/types'; import { NlpEntityService } from './nlp-entity.service'; import { NlpSampleEntityService } from './nlp-sample-entity.service'; +import { NlpValueService } from './nlp-value.service'; @Injectable() export class NlpSampleService extends BaseService< @@ -47,6 +50,7 @@ export class NlpSampleService extends BaseService< readonly repository: NlpSampleRepository, private readonly nlpSampleEntityService: NlpSampleEntityService, private readonly nlpEntityService: NlpEntityService, + private readonly nlpValueService: NlpValueService, private readonly languageService: LanguageService, ) { super(repository); @@ -279,6 +283,66 @@ export class NlpSampleService extends BaseService< } } + async findByPatterns( + { + filters, + patterns, + }: { + filters: TFilterQuery; + patterns: NlpPattern[]; + }, + page?: PageQueryDto, + projection?: ProjectionType, + ): Promise { + return await this.repository.findByEntities( + { + filters, + entityIds: + await this.nlpEntityService.findObjectIdsByPatterns(patterns), + valueIds: await this.nlpValueService.findObjectIdsByPatterns(patterns), + }, + page, + projection, + ); + } + + async findByPatternsAndPopulate( + { + filters, + patterns, + }: { + filters: TFilterQuery; + patterns: NlpPattern[]; + }, + page?: PageQueryDto, + projection?: ProjectionType, + ): Promise { + return await this.repository.findByEntitiesAndPopulate( + { + filters, + entityIds: + await this.nlpEntityService.findObjectIdsByPatterns(patterns), + valueIds: await this.nlpValueService.findObjectIdsByPatterns(patterns), + }, + page, + projection, + ); + } + + async countByPatterns({ + filters, + patterns, + }: { + filters: TFilterQuery; + patterns: NlpPattern[]; + }): Promise<{ count: number }> { + return await this.repository.countByEntities({ + filters, + entityIds: await this.nlpEntityService.findObjectIdsByPatterns(patterns), + valueIds: await this.nlpValueService.findObjectIdsByPatterns(patterns), + }); + } + @OnEvent('hook:message:preCreate') async handleNewMessage(doc: THydratedDocument) { // If message is sent by the user then add it as an inbox sample diff --git a/api/src/nlp/services/nlp-value.service.ts b/api/src/nlp/services/nlp-value.service.ts index 87988140..021ff3d1 100644 --- a/api/src/nlp/services/nlp-value.service.ts +++ b/api/src/nlp/services/nlp-value.service.ts @@ -7,7 +7,9 @@ */ import { forwardRef, Inject, Injectable } from '@nestjs/common'; +import { Types } from 'mongoose'; +import { NlpPattern } from '@/chat/schemas/types/pattern'; import { DeleteResult } from '@/utils/generics/base-repository'; import { BaseService } from '@/utils/generics/base-service'; import { PageQueryDto } from '@/utils/pagination/pagination-query.dto'; @@ -42,6 +44,19 @@ export class NlpValueService extends BaseService< super(repository); } + async findObjectIdsByPatterns(patterns: NlpPattern[]) { + // resolve pattern → ids (kept here because it uses other services) + return ( + await this.find({ + value: { + $in: patterns + .map((p) => (p.match === 'value' ? p.value : null)) + .filter(Boolean), + }, + }) + ).map((v) => new Types.ObjectId(v.id)); + } + /** * Deletes an NLP value by its ID, cascading any dependent data. * diff --git a/api/src/utils/generics/base-repository.ts b/api/src/utils/generics/base-repository.ts index 9244d13c..d73f58bb 100644 --- a/api/src/utils/generics/base-repository.ts +++ b/api/src/utils/generics/base-repository.ts @@ -19,6 +19,7 @@ import { FlattenMaps, HydratedDocument, Model, + PipelineStage, ProjectionType, Query, SortOrder, @@ -31,6 +32,7 @@ import { LoggerService } from '@/logger/logger.service'; import { TFilterQuery, TFlattenOption, + THydratedDocument, TQueryOptions, } from '@/utils/types/filter.types'; @@ -81,9 +83,13 @@ export abstract class BaseRepository< U extends Omit = Omit, D = Document, > { - private readonly transformOpts = { excludePrefixes: ['_', 'password'] }; + protected readonly transformOpts = { excludePrefixes: ['_', 'password'] }; - private readonly leanOpts = { virtuals: true, defaults: true, getters: true }; + protected readonly leanOpts = { + virtuals: true, + defaults: true, + getters: true, + }; @Inject(EventEmitter2) readonly eventEmitter: EventEmitter2; @@ -643,4 +649,39 @@ export abstract class BaseRepository< ): Promise { // Nothing ... } + + buildPaginationPipelineStages(page?: PageQueryDto): PipelineStage[] { + if (!page) return []; + + const stages: PipelineStage[] = []; + + if (page.sort) { + const [field, dir] = page.sort; + stages.push({ + $sort: { + [field]: + typeof dir === 'number' + ? dir + : ['asc', 'ascending'].includes(dir as string) + ? 1 + : -1, + } as Record, + }); + } + + if (page.skip) stages.push({ $skip: page.skip }); + if (page.limit) stages.push({ $limit: page.limit }); + + return stages; + } + + async populate(docs: THydratedDocument[]) { + return await this.model.populate( + docs, + this.populatePaths.map((path) => ({ + path, + options: { lean: true }, + })), + ); + } } diff --git a/frontend/src/app-components/inputs/NlpPatternSelect.tsx b/frontend/src/app-components/inputs/NlpPatternSelect.tsx index 142a5d0f..39e4c4b9 100644 --- a/frontend/src/app-components/inputs/NlpPatternSelect.tsx +++ b/frontend/src/app-components/inputs/NlpPatternSelect.tsx @@ -1,5 +1,5 @@ /* - * Copyright © 2024 Hexastack. All rights reserved. + * Copyright © 2025 Hexastack. All rights reserved. * * Licensed under the GNU Affero General Public License v3.0 (AGPLv3) with the following additional terms: * 1. The name "Hexabot" is a trademark of Hexastack. You may not use this name in derivative works without express written permission. @@ -17,7 +17,7 @@ import { Typography, useTheme, } from "@mui/material"; -import Autocomplete from "@mui/material/Autocomplete"; +import Autocomplete, { AutocompleteProps } from "@mui/material/Autocomplete"; import { forwardRef, SyntheticEvent, useRef } from "react"; import { Input } from "@/app-components/inputs/Input"; @@ -30,13 +30,24 @@ import { NlpPattern } from "@/types/block.types"; import { INlpEntity } from "@/types/nlp-entity.types"; import { INlpValue } from "@/types/nlp-value.types"; -type NlpPatternSelectProps = { +interface NlpPatternSelectProps + extends Omit< + AutocompleteProps, + | "onChange" + | "value" + | "options" + | "multiple" + | "disabled" + | "renderTags" + | "renderOptions" + | "renderInput" + > { patterns: NlpPattern[]; onChange: (patterns: NlpPattern[]) => void; -}; +} const NlpPatternSelect = ( - { patterns, onChange }: NlpPatternSelectProps, + { patterns, onChange, ...props }: NlpPatternSelectProps, ref, ) => { const inputRef = useRef(null); @@ -116,8 +127,8 @@ const NlpPatternSelect = ( return ( ("all"); const [language, setLanguage] = useState(undefined); + const [patterns, setPatterns] = useState([]); const hasPermission = useHasPermission(); const getNlpEntityFromCache = useGetFromCache(EntityType.NLP_ENTITY); const getNlpValueFromCache = useGetFromCache(EntityType.NLP_VALUE); @@ -86,11 +89,14 @@ export default function NlpSample() { EntityType.NLP_SAMPLE_ENTITY, ); const getLanguageFromCache = useGetFromCache(EntityType.LANGUAGE); - const { onSearch, searchPayload, searchText } = useSearch( + const { onSearch, searchPayload, searchText } = useSearch< + INlpSample & { patterns: NlpPattern[] } + >( { $eq: [ ...(type !== "all" ? [{ type }] : []), ...(language ? [{ language }] : []), + ...(patterns ? [{ patterns }] : []), ], $iLike: ["text"], }, @@ -425,6 +431,22 @@ export default function NlpSample() { + + { + setPatterns(patterns); + }} + fullWidth={true} + /> + diff --git a/frontend/src/components/visual-editor/form/inputs/triggers/PatternInput.tsx b/frontend/src/components/visual-editor/form/inputs/triggers/PatternInput.tsx index 8d0b6b3f..2caa02d7 100644 --- a/frontend/src/components/visual-editor/form/inputs/triggers/PatternInput.tsx +++ b/frontend/src/components/visual-editor/form/inputs/triggers/PatternInput.tsx @@ -103,6 +103,7 @@ const PatternInput: FC = ({ )} {["payload", "content", "menu"].includes(patternType) ? ( From 5c2ecaf8fc6c6b94a647b6f6caf6a0d12223df73 Mon Sep 17 00:00:00 2001 From: Mohamed Marrouchi Date: Wed, 4 Jun 2025 18:17:17 +0100 Subject: [PATCH 2/7] fix: remove public role --- api/src/nlp/controllers/nlp-sample.controller.ts | 3 --- 1 file changed, 3 deletions(-) diff --git a/api/src/nlp/controllers/nlp-sample.controller.ts b/api/src/nlp/controllers/nlp-sample.controller.ts index a94de961..7caf121d 100644 --- a/api/src/nlp/controllers/nlp-sample.controller.ts +++ b/api/src/nlp/controllers/nlp-sample.controller.ts @@ -36,7 +36,6 @@ import { HelperService } from '@/helper/helper.service'; import { HelperType } from '@/helper/types'; import { LanguageService } from '@/i18n/services/language.service'; import { CsrfInterceptor } from '@/interceptors/csrf.interceptor'; -import { Roles } from '@/utils/decorators/roles.decorator'; import { BaseController } from '@/utils/generics/base-controller'; import { DeleteResult } from '@/utils/generics/base-repository'; import { PageQueryDto } from '@/utils/pagination/pagination-query.dto'; @@ -181,7 +180,6 @@ export class NlpSampleController extends BaseController< * * @returns The count of samples that match the filters. */ - @Roles('public') @Get('count') async filterCount( @Query( @@ -291,7 +289,6 @@ export class NlpSampleController extends BaseController< * @returns A paginated list of NLP samples. */ @Get() - @Roles('public') async findPage( @Query(PageQueryPipe) pageQuery: PageQueryDto, @Query(PopulatePipe) populate: string[], From e89d948f37b78309986347237da6f02a3913023e Mon Sep 17 00:00:00 2001 From: Mohamed Marrouchi Date: Thu, 5 Jun 2025 14:56:36 +0100 Subject: [PATCH 3/7] feat: enhance search ux --- api/src/chat/schemas/types/pattern.ts | 28 +- .../controllers/nlp-sample.controller.spec.ts | 27 ++ .../nlp/controllers/nlp-sample.controller.ts | 45 +-- .../nlp/repositories/nlp-sample.repository.ts | 164 ++++++--- api/src/nlp/services/nlp-entity.service.ts | 15 - api/src/nlp/services/nlp-sample.service.ts | 173 +++++---- api/src/nlp/services/nlp-value.service.ts | 26 +- api/src/utils/generics/base-repository.ts | 338 ++++++++++++++++++ .../utils/test/fixtures/nlpsampleentity.ts | 4 +- frontend/public/locales/en/translation.json | 2 +- frontend/public/locales/fr/translation.json | 2 +- .../inputs/NlpPatternSelect.tsx | 38 +- .../src/app-components/tables/DataGrid.tsx | 2 +- .../components/nlp/components/NlpSample.tsx | 11 +- frontend/src/hooks/crud/useFind.tsx | 23 +- frontend/src/types/block.types.ts | 11 +- 16 files changed, 709 insertions(+), 200 deletions(-) diff --git a/api/src/chat/schemas/types/pattern.ts b/api/src/chat/schemas/types/pattern.ts index 48df5efe..520f2e6a 100644 --- a/api/src/chat/schemas/types/pattern.ts +++ b/api/src/chat/schemas/types/pattern.ts @@ -18,19 +18,27 @@ export const payloadPatternSchema = z.object({ export type PayloadPattern = z.infer; +export const nlpEntityMatchPatternSchema = z.object({ + entity: z.string(), + match: z.literal('entity'), +}); + +export type NlpEntityMatchPattern = z.infer; + +export const nlpValueMatchPatternSchema = z.object({ + entity: z.string(), + match: z.literal('value'), + value: z.string(), +}); + +export type NlpValueMatchPattern = z.infer; + export const nlpPatternSchema = z.discriminatedUnion('match', [ - z.object({ - entity: z.string(), - match: z.literal('entity'), - }), - z.object({ - entity: z.string(), - match: z.literal('value'), - value: z.string(), - }), + nlpEntityMatchPatternSchema, + nlpValueMatchPatternSchema, ]); -export type NlpPattern = z.infer; +export type NlpPattern = NlpEntityMatchPattern | NlpValueMatchPattern; export const stringRegexPatternSchema = z.string().refine( (value) => { diff --git a/api/src/nlp/controllers/nlp-sample.controller.spec.ts b/api/src/nlp/controllers/nlp-sample.controller.spec.ts index 031c5dac..21436ba5 100644 --- a/api/src/nlp/controllers/nlp-sample.controller.spec.ts +++ b/api/src/nlp/controllers/nlp-sample.controller.spec.ts @@ -10,6 +10,7 @@ import { CACHE_MANAGER } from '@nestjs/cache-manager'; import { BadRequestException, NotFoundException } from '@nestjs/common'; import { MongooseModule } from '@nestjs/mongoose'; +import { NlpValueMatchPattern } from '@/chat/schemas/types/pattern'; import { HelperService } from '@/helper/helper.service'; import { LanguageRepository } from '@/i18n/repositories/language.repository'; import { Language, LanguageModel } from '@/i18n/schemas/language.schema'; @@ -439,4 +440,30 @@ describe('NlpSampleController', () => { ).rejects.toThrow(NotFoundException); }); }); + + describe('filterCount', () => { + it('should count the nlp samples without patterns', async () => { + const filters = { text: 'Hello' }; + const result = await nlpSampleController.filterCount(filters, []); + expect(result).toEqual({ count: 1 }); + }); + + it('should count the nlp samples with patterns', async () => { + const filters = { text: 'Hello' }; + const patterns: NlpValueMatchPattern[] = [ + { entity: 'intent', match: 'value', value: 'greeting' }, + ]; + const result = await nlpSampleController.filterCount(filters, patterns); + expect(result).toEqual({ count: 1 }); + }); + + it('should return zero count when no samples match the filters and patterns', async () => { + const filters = { text: 'Nonexistent' }; + const patterns: NlpValueMatchPattern[] = [ + { entity: 'intent', match: 'value', value: 'nonexistent' }, + ]; + const result = await nlpSampleController.filterCount(filters, patterns); + expect(result).toEqual({ count: 0 }); + }); + }); }); diff --git a/api/src/nlp/controllers/nlp-sample.controller.ts b/api/src/nlp/controllers/nlp-sample.controller.ts index 7caf121d..b5213932 100644 --- a/api/src/nlp/controllers/nlp-sample.controller.ts +++ b/api/src/nlp/controllers/nlp-sample.controller.ts @@ -31,7 +31,10 @@ import { CsrfCheck } from '@tekuconcept/nestjs-csrf'; import { Response } from 'express'; import { z } from 'zod'; -import { NlpPattern, nlpPatternSchema } from '@/chat/schemas/types/pattern'; +import { + NlpValueMatchPattern, + nlpValueMatchPatternSchema, +} from '@/chat/schemas/types/pattern'; import { HelperService } from '@/helper/helper.service'; import { HelperType } from '@/helper/types'; import { LanguageService } from '@/i18n/services/language.service'; @@ -190,16 +193,19 @@ export class NlpSampleController extends BaseController< filters: TFilterQuery = {}, @Query( new ZodQueryParamPipe( - z.array(nlpPatternSchema), + z.array(nlpValueMatchPatternSchema), (q) => q?.where?.patterns, ), ) - patterns: NlpPattern[] = [], + patterns: NlpValueMatchPattern[] = [], ) { - if (patterns.length) { - return await this.nlpSampleService.countByPatterns({ filters, patterns }); - } - return await this.count(filters); + const count = await this.nlpSampleService.countByPatterns({ + filters, + patterns, + }); + return { + count, + }; } /** @@ -300,26 +306,21 @@ export class NlpSampleController extends BaseController< filters: TFilterQuery, @Query( new ZodQueryParamPipe( - z.array(nlpPatternSchema), + z.array(nlpValueMatchPatternSchema), (q) => q?.where?.patterns, ), ) - patterns: NlpPattern[] = [], + patterns: NlpValueMatchPattern[] = [], ) { - if (patterns.length) { - return this.canPopulate(populate) - ? await this.nlpSampleService.findByPatternsAndPopulate( - { filters, patterns }, - pageQuery, - ) - : await this.nlpSampleService.findByPatterns( - { filters, patterns }, - pageQuery, - ); - } return this.canPopulate(populate) - ? await this.nlpSampleService.findAndPopulate(filters, pageQuery) - : await this.nlpSampleService.find(filters, pageQuery); + ? await this.nlpSampleService.findByPatternsAndPopulate( + { filters, patterns }, + pageQuery, + ) + : await this.nlpSampleService.findByPatterns( + { filters, patterns }, + pageQuery, + ); } /** diff --git a/api/src/nlp/repositories/nlp-sample.repository.ts b/api/src/nlp/repositories/nlp-sample.repository.ts index 25e51fb9..c7b2e90a 100644 --- a/api/src/nlp/repositories/nlp-sample.repository.ts +++ b/api/src/nlp/repositories/nlp-sample.repository.ts @@ -32,6 +32,7 @@ import { NlpSampleFull, NlpSamplePopulate, } from '../schemas/nlp-sample.schema'; +import { NlpValue } from '../schemas/nlp-value.schema'; import { NlpSampleEntityRepository } from './nlp-sample-entity.repository'; @@ -51,70 +52,128 @@ export class NlpSampleRepository extends BaseRepository< super(model, NlpSample, NLP_SAMPLE_POPULATE, NlpSampleFull); } + /** + * Build the aggregation stages that restrict a *nlpSampleEntities* collection + * to links which: + * 1. Reference all of the supplied `values`, and + * 2. Whose document satisfies the optional `filters`. + * + * @param criterias Object with: + * @param criterias.filters Extra filters to be applied on *nlpsamples*. + * @param criterias.entities Entity documents whose IDs should match `entity`. + * @param criterias.values Value documents whose IDs should match `value`. + * @returns Array of aggregation `PipelineStage`s ready to be concatenated + * into a larger pipeline. + */ buildFindByEntitiesStages({ filters, - entityIds, - valueIds, + values, }: { filters: TFilterQuery; - entityIds: Types.ObjectId[]; - valueIds: Types.ObjectId[]; + values: NlpValue[]; }): PipelineStage[] { + const requiredPairs = values.map(({ id, entity }) => ({ + entity: new Types.ObjectId(entity), + value: new Types.ObjectId(id), + })); + return [ - // pick link docs whose entity / value matches a pattern + // Apply sample-side filters early { $match: { - ...(entityIds.length && { entity: { $in: entityIds } }), - ...(valueIds.length && { value: { $in: valueIds } }), + ...(filters?.$and + ? { + $and: filters.$and?.map((condition) => { + if ('language' in condition && condition.language) { + return { + language: new Types.ObjectId( + condition.language as string, + ), + }; + } + return condition; + }), + } + : {}), }, }, - // join to the real sample *and* apply sample-side filters early + // Fetch the entities for each sample { $lookup: { - from: 'nlpsamples', - let: { sampleId: '$sample' }, + from: 'nlpsampleentities', + localField: '_id', // nlpsamples._id + foreignField: 'sample', // nlpsampleentities.sample + as: 'sampleentities', pipeline: [ { $match: { - $expr: { $eq: ['$_id', '$$sampleId'] }, - ...(filters?.$and - ? { - $and: filters.$and?.map((condition) => { - if ('language' in condition && condition.language) { - return { - language: new Types.ObjectId(condition.language), - }; - } - return condition; - }), - } - : {}), + $or: requiredPairs, }, }, ], - as: 'sample', }, }, - { $unwind: '$sample' }, + + // Filter out empty or less matching + { + $match: { + $expr: { + $gte: [{ $size: '$sampleentities' }, requiredPairs.length], + }, + }, + }, + + // Collapse each link into an { entity, value } object + { + $addFields: { + entities: { + $ifNull: [ + { + $map: { + input: '$sampleentities', + as: 's', + in: { entity: '$$s.entity', value: '$$s.value' }, + }, + }, + [], + ], + }, + }, + }, + + // Keep only the samples whose `entities` array ⊇ `requiredPairs` + { + $match: { + $expr: { + $eq: [ + requiredPairs.length, // target size + { + $size: { + $setIntersection: ['$entities', requiredPairs], + }, + }, + ], + }, + }, + }, + + //drop helper array if you don’t need it downstream + { $project: { entities: 0, sampleentities: 0 } }, ]; } findByEntitiesAggregation( criterias: { filters: TFilterQuery; - entityIds: Types.ObjectId[]; - valueIds: Types.ObjectId[]; + values: NlpValue[]; }, page?: PageQueryDto, projection?: ProjectionType, ): Aggregate { - return this.sampleEntityModel.aggregate([ + return this.model.aggregate([ ...this.buildFindByEntitiesStages(criterias), - // promote the sample document - { $replaceRoot: { newRoot: '$sample' } }, - // sort / skip / limit ...this.buildPaginationPipelineStages(page), @@ -135,8 +194,7 @@ export class NlpSampleRepository extends BaseRepository< async findByEntities( criterias: { filters: TFilterQuery; - entityIds: Types.ObjectId[]; - valueIds: Types.ObjectId[]; + values: NlpValue[]; }, page?: PageQueryDto, projection?: ProjectionType, @@ -153,11 +211,18 @@ export class NlpSampleRepository extends BaseRepository< ); } + /** + * Find NLP samples by entities and populate them with their related data. + * + * @param criterias - Criteria containing filters and values to match. + * @param page - Optional pagination parameters. + * @param projection - Optional projection to limit fields returned. + * @returns Promise resolving to an array of populated NlpSampleFull objects. + */ async findByEntitiesAndPopulate( criterias: { filters: TFilterQuery; - entityIds: Types.ObjectId[]; - valueIds: Types.ObjectId[]; + values: NlpValue[]; }, page?: PageQueryDto, projection?: ProjectionType, @@ -177,32 +242,41 @@ export class NlpSampleRepository extends BaseRepository< ); } + /** + * Build an aggregation pipeline that counts NLP samples satisfying: + * – the extra `filters` (passed to `$match` later on), and + * – All of the supplied `entities` / `values`. + * + * @param criterias `{ filters, entities, values }` + * @returns Un-executed aggregation cursor. + */ countByEntitiesAggregation(criterias: { filters: TFilterQuery; - entityIds: Types.ObjectId[]; - valueIds: Types.ObjectId[]; + values: NlpValue[]; }): Aggregate<{ count: number }[]> { - return this.sampleEntityModel.aggregate<{ count: number }>([ + return this.model.aggregate<{ count: number }>([ ...this.buildFindByEntitiesStages(criterias), - // Collapse duplicates: one bucket per unique sample - { $group: { _id: '$sample._id' } }, - // Final count { $count: 'count' }, ]); } + /** + * Returns the count of samples by filters, entities and/or values + * + * @param criterias `{ filters, entities, values }` + * @returns Promise resolving to `{ count: number }`. + */ async countByEntities(criterias: { filters: TFilterQuery; - entityIds: Types.ObjectId[]; - valueIds: Types.ObjectId[]; - }): Promise<{ count: number }> { + values: NlpValue[]; + }): Promise { const aggregation = this.countByEntitiesAggregation(criterias); const [result] = await aggregation.exec(); - return { count: result?.count || 0 }; + return result?.count || 0; } /** diff --git a/api/src/nlp/services/nlp-entity.service.ts b/api/src/nlp/services/nlp-entity.service.ts index 897e841f..0876f3c1 100644 --- a/api/src/nlp/services/nlp-entity.service.ts +++ b/api/src/nlp/services/nlp-entity.service.ts @@ -10,9 +10,7 @@ import { CACHE_MANAGER } from '@nestjs/cache-manager'; import { BadRequestException, Inject, Injectable } from '@nestjs/common'; import { OnEvent } from '@nestjs/event-emitter'; import { Cache } from 'cache-manager'; -import { Types } from 'mongoose'; -import { NlpPattern } from '@/chat/schemas/types/pattern'; import { NLP_MAP_CACHE_KEY } from '@/utils/constants/cache'; import { Cacheable } from '@/utils/decorators/cacheable.decorator'; import { BaseService } from '@/utils/generics/base-service'; @@ -74,19 +72,6 @@ export class NlpEntityService extends BaseService< return await this.repository.updateOne(id, { weight: updatedWeight }); } - async findObjectIdsByPatterns(patterns: NlpPattern[]) { - // resolve pattern → ids (kept here because it uses other services) - return ( - await this.find({ - name: { - $in: patterns - .filter((p) => p.match === 'entity') - .map((p) => p.entity), - }, - }) - ).map((e) => new Types.ObjectId(e.id)); - } - /** * Stores new entities based on the sample text and sample entities. * Deletes all values relative to this entity before deleting the entity itself. diff --git a/api/src/nlp/services/nlp-sample.service.ts b/api/src/nlp/services/nlp-sample.service.ts index 700e508b..31d3c009 100644 --- a/api/src/nlp/services/nlp-sample.service.ts +++ b/api/src/nlp/services/nlp-sample.service.ts @@ -16,7 +16,7 @@ import { Document, ProjectionType, Query } from 'mongoose'; import Papa from 'papaparse'; import { Message } from '@/chat/schemas/message.schema'; -import { NlpPattern } from '@/chat/schemas/types/pattern'; +import { NlpValueMatchPattern } from '@/chat/schemas/types/pattern'; import { Language } from '@/i18n/schemas/language.schema'; import { LanguageService } from '@/i18n/services/language.service'; import { DeleteResult } from '@/utils/generics/base-repository'; @@ -56,6 +56,117 @@ export class NlpSampleService extends BaseService< super(repository); } + /** + * Retrieve samples that satisfy `filters` **and** reference any entity / value + * contained in `patterns`. + * + * The pattern list is first resolved via `NlpEntityService.findByPatterns` + * and `NlpValueService.findByPatterns`, then delegated to + * `repository.findByEntities`. + * + * @param criterias `{ filters, patterns }` + * @param page Optional paging / sorting descriptor. + * @param projection Optional Mongo projection. + * @returns Promise resolving to the matching samples. + */ + async findByPatterns( + { + filters, + patterns, + }: { + filters: TFilterQuery; + patterns: NlpValueMatchPattern[]; + }, + page?: PageQueryDto, + projection?: ProjectionType, + ): Promise { + const values = + patterns.length > 0 + ? await this.nlpValueService.findByPatterns(patterns) + : []; + + if (values.length === 0) { + return await this.repository.find(filters, page, projection); + } + + return await this.repository.findByEntities( + { + filters, + values, + }, + page, + projection, + ); + } + + /** + * Same as `findByPatterns`, but also populates all relations declared + * in the repository (`populatePaths`). + * + * @param criteras `{ filters, patterns }` + * @param page Optional paging / sorting descriptor. + * @param projection Optional Mongo projection. + * @returns Promise resolving to the populated samples. + */ + async findByPatternsAndPopulate( + { + filters, + patterns, + }: { + filters: TFilterQuery; + patterns: NlpValueMatchPattern[]; + }, + page?: PageQueryDto, + projection?: ProjectionType, + ): Promise { + const values = + patterns.length > 0 + ? await this.nlpValueService.findByPatterns(patterns) + : []; + + if (values.length === 0) { + return await this.repository.findAndPopulate(filters, page, projection); + } + + return await this.repository.findByEntitiesAndPopulate( + { + filters, + values, + }, + page, + projection, + ); + } + + /** + * Count how many samples satisfy `filters` and reference any entity / value + * present in `patterns`. + * + * @param param0 `{ filters, patterns }` + * @returns Promise resolving to `{ count }`. + */ + async countByPatterns({ + filters, + patterns, + }: { + filters: TFilterQuery; + patterns: NlpValueMatchPattern[]; + }): Promise { + const values = + patterns.length > 0 + ? await this.nlpValueService.findByPatterns(patterns) + : []; + + if (values.length === 0) { + return await this.repository.count(filters); + } + + return await this.repository.countByEntities({ + filters, + values, + }); + } + /** * Fetches the samples and entities for a given sample type. * @@ -283,66 +394,6 @@ export class NlpSampleService extends BaseService< } } - async findByPatterns( - { - filters, - patterns, - }: { - filters: TFilterQuery; - patterns: NlpPattern[]; - }, - page?: PageQueryDto, - projection?: ProjectionType, - ): Promise { - return await this.repository.findByEntities( - { - filters, - entityIds: - await this.nlpEntityService.findObjectIdsByPatterns(patterns), - valueIds: await this.nlpValueService.findObjectIdsByPatterns(patterns), - }, - page, - projection, - ); - } - - async findByPatternsAndPopulate( - { - filters, - patterns, - }: { - filters: TFilterQuery; - patterns: NlpPattern[]; - }, - page?: PageQueryDto, - projection?: ProjectionType, - ): Promise { - return await this.repository.findByEntitiesAndPopulate( - { - filters, - entityIds: - await this.nlpEntityService.findObjectIdsByPatterns(patterns), - valueIds: await this.nlpValueService.findObjectIdsByPatterns(patterns), - }, - page, - projection, - ); - } - - async countByPatterns({ - filters, - patterns, - }: { - filters: TFilterQuery; - patterns: NlpPattern[]; - }): Promise<{ count: number }> { - return await this.repository.countByEntities({ - filters, - entityIds: await this.nlpEntityService.findObjectIdsByPatterns(patterns), - valueIds: await this.nlpValueService.findObjectIdsByPatterns(patterns), - }); - } - @OnEvent('hook:message:preCreate') async handleNewMessage(doc: THydratedDocument) { // If message is sent by the user then add it as an inbox sample diff --git a/api/src/nlp/services/nlp-value.service.ts b/api/src/nlp/services/nlp-value.service.ts index 021ff3d1..0b916bdc 100644 --- a/api/src/nlp/services/nlp-value.service.ts +++ b/api/src/nlp/services/nlp-value.service.ts @@ -7,9 +7,8 @@ */ import { forwardRef, Inject, Injectable } from '@nestjs/common'; -import { Types } from 'mongoose'; -import { NlpPattern } from '@/chat/schemas/types/pattern'; +import { NlpValueMatchPattern } from '@/chat/schemas/types/pattern'; import { DeleteResult } from '@/utils/generics/base-repository'; import { BaseService } from '@/utils/generics/base-service'; import { PageQueryDto } from '@/utils/pagination/pagination-query.dto'; @@ -44,17 +43,18 @@ export class NlpValueService extends BaseService< super(repository); } - async findObjectIdsByPatterns(patterns: NlpPattern[]) { - // resolve pattern → ids (kept here because it uses other services) - return ( - await this.find({ - value: { - $in: patterns - .map((p) => (p.match === 'value' ? p.value : null)) - .filter(Boolean), - }, - }) - ).map((v) => new Types.ObjectId(v.id)); + /** + * Fetch values whose `value` field matches the patterns provided. + * + * @param patterns Pattern list + * @returns Promise resolving to the matching values. + */ + async findByPatterns(patterns: NlpValueMatchPattern[]) { + return await this.find({ + value: { + $in: patterns.map((p) => p.value), + }, + }); } /** diff --git a/api/src/utils/generics/base-repository.ts b/api/src/utils/generics/base-repository.ts index d73f58bb..6258247e 100644 --- a/api/src/utils/generics/base-repository.ts +++ b/api/src/utils/generics/base-repository.ts @@ -106,15 +106,50 @@ export abstract class BaseRepository< this.registerLifeCycleHooks(); } + /** + * Determine whether at least one of the requested populate paths + * is supported by the repository. + * + * @param populate Array of path strings supplied by the caller. + * @returns `true` if any item appears in `this.populatePaths`, else `false`. + */ canPopulate(populate: string[]): boolean { return populate.some((p) => this.populatePaths.includes(p as P)); } + /** + * Build the canonical event name used by the repository’s event-bus hooks. + * + * Format: `hook::` + * where `` is the lower-cased class name and `` is an + * `EHook` value such as `"preCreate"` or `"postUpdate"`. + * + * @param suffix Lifecycle-hook suffix. + * @returns A type-safe event name string. + */ getEventName(suffix: EHook) { const entity = this.cls.name.toLocaleLowerCase(); return `hook:${entity}:${suffix}` as `hook:${IHookEntities}:${TNormalizedEvents}`; } + /** + * Wire all Mongoose lifecycle hooks to the repository’s overridable + * `pre-/post-*` methods **and** to the domain event bus. + * + * For the current repository (`this.cls.name`) the method: + * 1. Retrieves the hook definitions from `LifecycleHookManager`. + * 2. Registers handlers for: + * • `validate.pre / validate.post` → `preCreateValidate` / `postCreateValidate` + * • `save.pre / save.post` → `preCreate` / `postCreate` + * • `deleteOne.* deleteMany.*` → `preDelete` / `postDelete` + * • `findOneAndUpdate.*` → `preUpdate` / `postUpdate` + * • `updateMany.*` → `preUpdateMany` / `postUpdateMany` + * 3. Emits the corresponding domain events (`EHook.*`) via `eventEmitter` + * after each repository callback. + * + * If no hooks are registered for the current class, a console warning is + * issued and the method exits gracefully. + */ private registerLifeCycleHooks(): void { const repository = this; const hooks = LifecycleHookManager.getHooks(this.cls.name); @@ -258,6 +293,19 @@ export abstract class BaseRepository< }); } + /** + * Execute a `find`-style query, convert each lean result to `cls`, and return + * the transformed list. + * + * - The query is run with `lean(this.leanOpts)` for performance. + * - Each plain object is passed through `plainToClass` using + * `this.transformOpts`. + * + * @template R Result type – typically the populated or base DTO class. + * @param query Mongoose query returning an array of documents. + * @param cls Constructor used by `plainToClass` for transformation. + * @returns Promise resolving to an array of class instances. + */ protected async execute>( query: Query, cls: new () => R, @@ -266,6 +314,19 @@ export abstract class BaseRepository< return resultSet.map((doc) => plainToClass(cls, doc, this.transformOpts)); } + /** + * Execute a single-document query, convert the result to `cls`, + * and return it (or `null`). + * + * - Uses `lean(this.leanOpts)` for performance. + * - Falls back to `this.transformOpts` when `options` is not provided. + * + * @template R Result type – typically the populated or base DTO class. + * @param query Mongoose query expected to return one document. + * @param cls Constructor used by `plainToClass`. + * @param options Optional `ClassTransformOptions` overriding defaults. + * @returns Promise resolving to a class instance or `null`. + */ protected async executeOne>( query: Query, cls: new () => R, @@ -275,6 +336,18 @@ export abstract class BaseRepository< return plainToClass(cls, doc, options ?? this.transformOpts); } + /** + * Build a `findOne`/`findById` query. + * + * - `criteria` may be an `_id` string or any Mongo filter; + * an empty / falsy value is **not allowed** (throws). + * - Optional `projection` is forwarded unchanged. + * + * @param criteria Document `_id` **or** Mongo filter. + * @param projection Optional Mongo projection. + * @throws Error when `criteria` is empty. + * @returns Un-executed Mongoose query. + */ protected findOneQuery( criteria: string | TFilterQuery, projection?: ProjectionType, @@ -289,6 +362,18 @@ export abstract class BaseRepository< : this.model.findOne>(criteria, projection); } + /** + * Retrieve a single document and convert it to `this.cls`. + * + * - Returns `null` immediately when `criteria` is falsy. + * - Optional `options` are passed to `plainToClass`. + * - Optional `projection` limits returned fields. + * + * @param criteria Document `_id` **or** Mongo filter. + * @param options Class-transform options. + * @param projection Optional Mongo projection. + * @returns Promise resolving to the found entity or `null`. + */ async findOne( criteria: string | TFilterQuery, options?: ClassTransformOptions, @@ -303,6 +388,16 @@ export abstract class BaseRepository< return await this.executeOne(query, this.cls, options); } + /** + * Retrieve a single document with all `populatePaths` relations resolved. + * + * - Throws if population is not configured. + * - Returns `null` when nothing matches `criteria`. + * + * @param criteria Document `_id` **or** Mongo filter. + * @param projection Optional Mongo projection. + * @returns Promise resolving to the populated entity or `null`. + */ async findOneAndPopulate( criteria: string | TFilterQuery, projection?: ProjectionType, @@ -329,6 +424,17 @@ export abstract class BaseRepository< projection?: ProjectionType, ): Query; + /** + * Build an un-executed `find` query with optional pagination, sorting, + * and projection. + * + * The returned query can be further chained or passed to `execute`. + * + * @param filter Mongo selector for the documents. + * @param pageQuery Sort tuple **or** paging object (optional). + * @param projection Mongo projection (optional). + * @returns A Mongoose `find` query with `skip`, `limit`, and `sort` applied. + */ protected findQuery( filter: TFilterQuery, pageQuery?: QuerySortDto | PageQueryDto, @@ -366,6 +472,20 @@ export abstract class BaseRepository< projection?: ProjectionType, ): Promise; + /** + * Find documents matching `filter`. + * + * - `pageQuery` may be: + * * a **sort descriptor** (`QuerySortDto`) ‒ an array of `[field, dir]` + * * a **paging object** (`PageQueryDto`) ‒ `{ limit, skip, sort }` + * - Optional `projection` is forwarded to `findQuery`. + * - Delegates execution to `this.execute`, mapping raw docs to `this.cls`. + * + * @param filter Mongo filter selecting documents. + * @param pageQuery Sort descriptor **or** paging object. + * @param projection Optional Mongo projection. + * @returns Promise resolving to the found documents. + */ async find( filter: TFilterQuery, pageQuery?: QuerySortDto | PageQueryDto, @@ -380,6 +500,14 @@ export abstract class BaseRepository< return await this.execute(query, this.cls); } + /** + * Ensure that population is possible for the current repository. + * + * Throws when either `populatePaths` or `clsPopulate` is not configured, + * preventing accidental calls to population-aware methods. + * + * @throws Error if population cannot be performed. + */ private ensureCanPopulate(): void { if (!this.populatePaths || !this.clsPopulate) { throw new Error('Cannot populate query'); @@ -401,6 +529,20 @@ export abstract class BaseRepository< projection?: ProjectionType, ): Promise; + /** + * Find documents that match `filters` and return them with the relations + * in `populatePaths` resolved. + * + * - `pageQuery` can be either a sort descriptor (`QuerySortDto`) or a full + * paging object (`PageQueryDto`). + * - Optional `projection` is forwarded to `findQuery`. + * - Throws if the repository is not configured for population. + * + * @param filters Mongo filter. + * @param pageQuery Sort or paging information. + * @param projection Optional Mongo projection. + * @returns Promise resolving to the populated documents. + */ async findAndPopulate( filters: TFilterQuery, pageQuery?: QuerySortDto | PageQueryDto, @@ -420,16 +562,37 @@ export abstract class BaseRepository< return await this.execute(query, this.clsPopulate!); } + /** + * Build an un-executed query that selects **all** documents, + * applies `sort`, and disables pagination (`limit` / `skip` = 0). + * + * @param sort Optional sort descriptor. + * @returns Mongoose `find` query. + */ protected findAllQuery( sort?: QuerySortDto, ): Query { return this.findQuery({}, { limit: 0, skip: 0, sort }); } + /** + * Retrieve every document in the collection, optionally sorted. + * + * @param sort Optional sort descriptor. + * @returns Promise resolving to the documents. + */ async findAll(sort?: QuerySortDto): Promise { return await this.find({}, { limit: 0, skip: 0, sort }); } + /** + * Retrieve every document with all `populatePaths` relations resolved. + * + * - Throws if population is not configured. + * + * @param sort Optional sort descriptor. + * @returns Promise resolving to the populated documents. + */ async findAllAndPopulate(sort?: QuerySortDto): Promise { this.ensureCanPopulate(); const query = this.findAllQuery(sort).populate(this.populatePaths); @@ -474,14 +637,38 @@ export abstract class BaseRepository< return await this.execute(query, this.clsPopulate!); } + /** + * Return the total number of documents in the collection + * (uses MongoDB’s `estimatedDocumentCount` for speed). + * + * @returns Promise resolving to the estimated document count. + */ async countAll(): Promise { return await this.model.estimatedDocumentCount().exec(); } + /** + * Count documents that match the given criteria + * (falls back to all documents when `criteria` is omitted). + * + * @param criteria Optional Mongo filter. + * @returns Promise resolving to the exact document count. + */ async count(criteria?: TFilterQuery): Promise { return await this.model.countDocuments(criteria).exec(); } + /** + * Persist a single document and return it as an instance of `this.cls`. + * + * Internally: + * 1. `model.create()` inserts the raw DTO. + * 2. The Mongoose document is converted to a plain object with `leanOpts`. + * 3. `plainToClass()` transforms that object into the domain class. + * + * @param dto Data-transfer object describing the new record. + * @returns A hydrated instance of the domain class. + */ async create(dto: DtoInfer): Promise { const doc = await this.model.create(dto); @@ -492,6 +679,12 @@ export abstract class BaseRepository< ); } + /** + * Persist an array of documents at once and map each result to `this.cls`. + * + * @param dtoArray Array of DTOs to insert. + * @returns Array of domain-class instances in the same order as `dtoArray`. + */ async createMany( dtoArray: DtoInfer[], ): Promise { @@ -502,6 +695,21 @@ export abstract class BaseRepository< ); } + /** + * Update a **single** document and return the modified version. + * + * Behaviour : + * - `criteria` may be an `_id` string or any Mongo filter object. + * - `dto` is applied via `$set`; when `options.shouldFlatten` is true the + * payload is flattened (e.g. `"a.b": value`) before the update. + * - Fires the `pre|postUpdateValidate` hooks + events. + * - Throws if nothing matches the criteria or if `dto` is empty. + * + * @param criteria `_id` or filter selecting the target document. + * @param dto Partial update payload. + * @param options `new`, `upsert`, `shouldFlatten`, … (forwarded to Mongoose). + * @returns The updated document (with `new: true` by default). + */ async updateOne>( criteria: string | TFilterQuery, dto: UpdateQuery>, @@ -550,6 +758,18 @@ export abstract class BaseRepository< return result; } + /** + * Update **many** documents at once. + * + * - Applies `$set` with the supplied `dto`. + * - When `options.shouldFlatten` is true, flattens the payload first. + * - Does **not** run the validation / event hooks (use `updateOne` for that). + * + * @param filter Mongo filter selecting the documents to update. + * @param dto Update payload. + * @param options `{ shouldFlatten?: boolean }`. + * @returns MongoDB `UpdateWriteOpResult` describing the operation outcome. + */ async updateMany>( filter: TFilterQuery, dto: UpdateQuery, @@ -560,6 +780,17 @@ export abstract class BaseRepository< }); } + /** + * Remove **one** document, unless it is marked as `builtin: true`. + * + * If `criteria` is a string, it is treated as the document’s `_id`; + * otherwise it is used as a full Mongo filter. + * The filter is automatically augmented with `{ builtin: { $ne: true } }` + * to protect built-in records from deletion. + * + * @param criteria Document `_id` or Mongo filter. + * @returns Promise that resolves to Mongo’s `DeleteResult`. + */ async deleteOne(criteria: string | TFilterQuery): Promise { const filter = typeof criteria === 'string' ? { _id: criteria } : criteria; @@ -568,10 +799,25 @@ export abstract class BaseRepository< .exec(); } + /** + * Remove **many** documents that match `criteria`, excluding those flagged + * with `builtin: true`. + * + * @param criteria Mongo filter describing the set to delete. + * @returns Promise that resolves to Mongo’s `DeleteResult`. + */ async deleteMany(criteria: TFilterQuery): Promise { return await this.model.deleteMany({ ...criteria, builtin: { $ne: true } }); } + /** + * Runs *before* create-validation logic. + * Override to perform domain-specific checks; throw to abort. + * + * @param _doc The document that will be created. + * @param _filterCriteria Optional additional criteria (e.g. conditional create). + * @param _updates Optional update pipeline when upserting. + */ async preCreateValidate( _doc: HydratedDocument, _filterCriteria?: FilterQuery, @@ -580,10 +826,23 @@ export abstract class BaseRepository< // Nothing ... } + /** + * Called *after* create-validation passes, + * but before persistence. Override for side-effects (audit logs, events, …). + * + * @param _validated The validated (not yet saved) document. + */ async postCreateValidate(_validated: HydratedDocument): Promise { // Nothing ... } + /** + * Runs *before* validating a single-document update. + * Override to enforce custom rules; throw to abort. + * + * @param _filterCriteria Query criteria used to locate the document. + * @param _updates Update payload or aggregation pipeline. + */ async preUpdateValidate( _filterCriteria: FilterQuery, _updates: UpdateWithAggregationPipeline | UpdateQuery, @@ -591,6 +850,13 @@ export abstract class BaseRepository< // Nothing ... } + /** + * Called *after* an update payload is validated, + * just before it is applied. + * + * @param _filterCriteria Same criteria passed to the update. + * @param _updates The validated update payload. + */ async postUpdateValidate( _filterCriteria: FilterQuery, _updates: UpdateWithAggregationPipeline | UpdateQuery, @@ -598,14 +864,33 @@ export abstract class BaseRepository< // Nothing ... } + /** + * Rxecutes immediately before persisting a new document. + * Use to inject defaults, timestamps, or derive fields. + * + * @param _doc The document about to be saved. + */ async preCreate(_doc: HydratedDocument): Promise { // Nothing ... } + /** + * Fires right after a document is saved. + * Useful for emitting events or refreshing caches. + * + * @param _created The newly created document. + */ async postCreate(_created: HydratedDocument): Promise { // Nothing ... } + /** + * Runs before a `findOneAndUpdate` operation. + * + * @param _query The Mongoose query object. + * @param _criteria Original filter criteria. + * @param _updates Update payload or pipeline. + */ async preUpdate( _query: Query, _criteria: TFilterQuery, @@ -614,6 +899,13 @@ export abstract class BaseRepository< // Nothing ... } + /** + * Runs before an `updateMany` operation. + * + * @param _query The Mongoose query object. + * @param _criteria Filter criteria. + * @param _updates Update payload or pipeline. + */ async preUpdateMany( _query: Query, _criteria: TFilterQuery, @@ -622,6 +914,12 @@ export abstract class BaseRepository< // Nothing ... } + /** + * Fires after an `updateMany` completes. + * + * @param _query The originating query. + * @param _updated Mongoose result object. + */ async postUpdateMany( _query: Query, _updated: any, @@ -629,6 +927,12 @@ export abstract class BaseRepository< // Nothing ... } + /** + * Fires after a `findOneAndUpdate` completes. + * + * @param _query The originating query. + * @param _updated The updated document. + */ async postUpdate( _query: Query, _updated: T, @@ -636,6 +940,12 @@ export abstract class BaseRepository< // Nothing ... } + /** + * Runs before a `deleteOne` or `deleteMany`. + * + * @param _query The Mongoose query object. + * @param _criteria Filter criteria. + */ async preDelete( _query: Query, _criteria: TFilterQuery, @@ -643,6 +953,12 @@ export abstract class BaseRepository< // Nothing ... } + /** + * Fires after a `deleteOne` or `deleteMany` completes. + * + * @param _query The originating query. + * @param _result MongoDB `DeleteResult`. + */ async postDelete( _query: Query, _result: DeleteResult, @@ -650,6 +966,21 @@ export abstract class BaseRepository< // Nothing ... } + /** + * Translate a `PageQueryDto` into MongoDB aggregation stages. + * + * Creates, in order: + * 1. **$sort** – when `page.sort` is provided. Accepts `1 | -1 | 'asc' | 'desc'` + * (plus `'ascending' | 'descending'`) and normalises them to `1` or `-1`. + * 2. **$skip** – when `page.skip` > 0. + * 3. **$limit** – when `page.limit` > 0. + * + * If `page` is omitted, an empty array is returned so callers can safely + * spread the result into a pipeline without extra checks. + * + * @param page Optional pagination/sort descriptor. + * @returns Array of `$sort`, `$skip`, and `$limit` stages in the correct order. + */ buildPaginationPipelineStages(page?: PageQueryDto): PipelineStage[] { if (!page) return []; @@ -675,6 +1006,13 @@ export abstract class BaseRepository< return stages; } + /** + * Populates the provided Mongoose documents with the relations listed in + * `this.populatePaths`, returning lean (plain) objects. + * + * @param docs Hydrated documents to enrich. + * @returns Promise resolving to the populated docs. + */ async populate(docs: THydratedDocument[]) { return await this.model.populate( docs, diff --git a/api/src/utils/test/fixtures/nlpsampleentity.ts b/api/src/utils/test/fixtures/nlpsampleentity.ts index a474227e..8d5d2447 100644 --- a/api/src/utils/test/fixtures/nlpsampleentity.ts +++ b/api/src/utils/test/fixtures/nlpsampleentity.ts @@ -1,5 +1,5 @@ /* - * Copyright © 2024 Hexastack. All rights reserved. + * Copyright © 2025 Hexastack. All rights reserved. * * Licensed under the GNU Affero General Public License v3.0 (AGPLv3) with the following additional terms: * 1. The name "Hexabot" is a trademark of Hexastack. You may not use this name in derivative works without express written permission. @@ -28,7 +28,7 @@ export const nlpSampleEntityFixtures: NlpSampleEntityCreateDto[] = [ { sample: '2', entity: '0', - value: '2', + value: '3', }, { sample: '3', diff --git a/frontend/public/locales/en/translation.json b/frontend/public/locales/en/translation.json index fd7b1ec1..5173d80f 100644 --- a/frontend/public/locales/en/translation.json +++ b/frontend/public/locales/en/translation.json @@ -491,7 +491,7 @@ "original_text": "Original Text", "inputs": "Inputs", "outputs": "Outputs", - "any": "- Any -", + "any": "Any", "full_name": "First and last name", "password": "Password" }, diff --git a/frontend/public/locales/fr/translation.json b/frontend/public/locales/fr/translation.json index 0e746c98..8bc2f29f 100644 --- a/frontend/public/locales/fr/translation.json +++ b/frontend/public/locales/fr/translation.json @@ -492,7 +492,7 @@ "original_text": "Texte par défaut", "inputs": "Ports d'entrée", "outputs": "Ports de sortie", - "any": "- Toutes -", + "any": "Toutes", "full_name": "Nom et Prénom", "password": "Mot de passe" }, diff --git a/frontend/src/app-components/inputs/NlpPatternSelect.tsx b/frontend/src/app-components/inputs/NlpPatternSelect.tsx index 39e4c4b9..8ba42786 100644 --- a/frontend/src/app-components/inputs/NlpPatternSelect.tsx +++ b/frontend/src/app-components/inputs/NlpPatternSelect.tsx @@ -44,10 +44,11 @@ interface NlpPatternSelectProps > { patterns: NlpPattern[]; onChange: (patterns: NlpPattern[]) => void; + noneLabel?: string; } const NlpPatternSelect = ( - { patterns, onChange, ...props }: NlpPatternSelectProps, + { patterns, onChange, noneLabel = "", ...props }: NlpPatternSelectProps, ref, ) => { const inputRef = useRef(null); @@ -91,23 +92,29 @@ const NlpPatternSelect = ( valueId: string, ): void => { const newSelection = patterns.slice(0); - const update = newSelection.find(({ entity: e }) => e === name); + const idx = newSelection.findIndex(({ entity: e }) => e === name); - if (!update) { + if (idx === -1) { throw new Error("Unable to find nlp entity"); } if (valueId === id) { - update.match = "entity"; - update.value = name; + newSelection[idx] = { + entity: newSelection[idx].entity, + match: "entity", + }; } else { const value = getNlpValueFromCache(valueId); if (!value) { throw new Error("Unable to find nlp value in cache"); } - update.match = "value"; - update.value = value.value; + + newSelection[idx] = { + entity: newSelection[idx].entity, + match: "value", + value: value.value, + }; } onChange(newSelection); @@ -119,10 +126,11 @@ const NlpPatternSelect = ( ); } - const defaultValue = - options.filter(({ name }) => - patterns.find(({ entity: entityName }) => entityName === name), - ) || {}; + const defaultValue = patterns + .map(({ entity: entityName }) => + options.find(({ name }) => entityName === name), + ) + .filter(Boolean) as INlpEntity[]; return ( getNlpValueFromCache(vId), ) as INlpValue[]; - const selectedValue = patterns.find( - (e) => e.entity === name, - )?.value; + const currentPattern = patterns.find((e) => e.entity === name); + const selectedValue = + currentPattern?.match === "value" ? currentPattern.value : null; const { id: selectedId = id } = nlpValues.find(({ value }) => value === selectedValue) || {}; @@ -204,7 +212,7 @@ const NlpPatternSelect = ( } if (option === id) { - return t("label.any"); + return `- ${noneLabel || t("label.any")} -`; } return option; diff --git a/frontend/src/app-components/tables/DataGrid.tsx b/frontend/src/app-components/tables/DataGrid.tsx index 8816ae72..25ddbc61 100644 --- a/frontend/src/app-components/tables/DataGrid.tsx +++ b/frontend/src/app-components/tables/DataGrid.tsx @@ -75,7 +75,7 @@ export const DataGrid = ({ slots={slots} slotProps={{ loadingOverlay: { - variant: "linear-progress", + variant: "skeleton", noRowsVariant: "skeleton", }, }} diff --git a/frontend/src/components/nlp/components/NlpSample.tsx b/frontend/src/components/nlp/components/NlpSample.tsx index e2d0f127..9cd5de67 100644 --- a/frontend/src/components/nlp/components/NlpSample.tsx +++ b/frontend/src/components/nlp/components/NlpSample.tsx @@ -96,7 +96,10 @@ export default function NlpSample() { $eq: [ ...(type !== "all" ? [{ type }] : []), ...(language ? [{ language }] : []), - ...(patterns ? [{ patterns }] : []), + // We send only value match patterns + ...(patterns + ? [{ patterns: patterns.filter(({ match }) => match === "value") }] + : []), ], $iLike: ["text"], }, @@ -218,6 +221,7 @@ export default function NlpSample() { {row.entities .map((e) => getSampleEntityFromCache(e) as INlpSampleEntity) .filter((e) => !!e) + .sort((a, b) => String(a.entity).localeCompare(String(b.entity))) .map((entity) => ( { - setPatterns(patterns); - }} + onChange={setPatterns} fullWidth={true} + noneLabel={t("label.select")} /> diff --git a/frontend/src/hooks/crud/useFind.tsx b/frontend/src/hooks/crud/useFind.tsx index 04c25b1a..d1580082 100644 --- a/frontend/src/hooks/crud/useFind.tsx +++ b/frontend/src/hooks/crud/useFind.tsx @@ -56,24 +56,27 @@ export const useFind = < entity, ); const getFromCache = useGetFromCache(entity); - const { data: total } = useCount(entity, params["where"], { + const countQuery = useCount(entity, params["where"], { enabled: hasCount, }); const { dataGridPaginationProps, pageQueryPayload } = usePagination( - total?.count, + countQuery.data?.count, initialPaginationState, initialSortState, hasCount, ); const normalizedParams = { ...pageQueryPayload, ...(params || {}) }; - const enabled = !!total || !hasCount; + const enabled = !!countQuery.data || !hasCount; const { data: ids, ...normalizedQuery } = useQuery({ enabled, queryFn: async () => { - const data = await api.find( - normalizedParams, - format === Format.FULL && (POPULATE_BY_TYPE[entity] as P), - ); + const data = + !hasCount || (hasCount && !!countQuery.data?.count) + ? await api.find( + normalizedParams, + format === Format.FULL && (POPULATE_BY_TYPE[entity] as P), + ) + : []; const { result } = normalizeAndCache(data); return result; @@ -100,7 +103,11 @@ export const useFind = < dataGridProps: { ...dataGridPaginationProps, rows: data || [], - loading: normalizedQuery.isLoading || normalizedQuery.isFetching, + loading: + normalizedQuery.isLoading || + normalizedQuery.isFetching || + countQuery.isLoading || + countQuery.isFetching, }, }; }; diff --git a/frontend/src/types/block.types.ts b/frontend/src/types/block.types.ts index 0a4917d0..9c7fa913 100644 --- a/frontend/src/types/block.types.ts +++ b/frontend/src/types/block.types.ts @@ -68,12 +68,19 @@ export interface PayloadPattern { type?: PayloadType; } -export type NlpPattern = { +export type NlpEntityMatchPattern = { entity: string; - match: "value" | "entity"; + match: "entity"; +}; + +export type NlpValueMatchPattern = { + entity: string; + match: "value"; value: string; }; +export type NlpPattern = NlpEntityMatchPattern | NlpValueMatchPattern; + export type Pattern = null | string | PayloadPattern | NlpPattern[]; export type PatternType = From 7b13bd07ba588962b275c0663ae8b2543d69c6f7 Mon Sep 17 00:00:00 2001 From: Mohamed Marrouchi Date: Thu, 5 Jun 2025 15:13:07 +0100 Subject: [PATCH 4/7] test: add unit tests for the nlp sample repo --- .../nlp-sample.repository.spec.ts | 169 +++++++++++++++++- 1 file changed, 163 insertions(+), 6 deletions(-) diff --git a/api/src/nlp/repositories/nlp-sample.repository.spec.ts b/api/src/nlp/repositories/nlp-sample.repository.spec.ts index 7aea6bc1..190e1c7d 100644 --- a/api/src/nlp/repositories/nlp-sample.repository.spec.ts +++ b/api/src/nlp/repositories/nlp-sample.repository.spec.ts @@ -7,9 +7,11 @@ */ import { MongooseModule } from '@nestjs/mongoose'; +import { Types } from 'mongoose'; import { LanguageRepository } from '@/i18n/repositories/language.repository'; import { Language, LanguageModel } from '@/i18n/schemas/language.schema'; +import { PageQueryDto } from '@/utils/pagination/pagination-query.dto'; import { nlpSampleFixtures } from '@/utils/test/fixtures/nlpsample'; import { installNlpSampleEntityFixtures } from '@/utils/test/fixtures/nlpsampleentity'; import { getPageQuery } from '@/utils/test/pagination'; @@ -29,13 +31,16 @@ import { NlpSampleFull, NlpSampleModel, } from '../schemas/nlp-sample.schema'; +import { NlpValue, NlpValueModel } from '../schemas/nlp-value.schema'; import { NlpSampleEntityRepository } from './nlp-sample-entity.repository'; import { NlpSampleRepository } from './nlp-sample.repository'; +import { NlpValueRepository } from './nlp-value.repository'; describe('NlpSampleRepository', () => { let nlpSampleRepository: NlpSampleRepository; let nlpSampleEntityRepository: NlpSampleEntityRepository; + let nlpValueRepository: NlpValueRepository; let languageRepository: LanguageRepository; let nlpSampleEntity: NlpSampleEntity | null; let noNlpSample: NlpSample | null; @@ -48,21 +53,28 @@ describe('NlpSampleRepository', () => { MongooseModule.forFeature([ NlpSampleModel, NlpSampleEntityModel, + NlpValueModel, LanguageModel, ]), ], providers: [ NlpSampleRepository, NlpSampleEntityRepository, + NlpValueRepository, LanguageRepository, ], }); - [nlpSampleRepository, nlpSampleEntityRepository, languageRepository] = - await getMocks([ - NlpSampleRepository, - NlpSampleEntityRepository, - LanguageRepository, - ]); + [ + nlpSampleRepository, + nlpSampleEntityRepository, + nlpValueRepository, + languageRepository, + ] = await getMocks([ + NlpSampleRepository, + NlpSampleEntityRepository, + NlpValueRepository, + LanguageRepository, + ]); noNlpSample = await nlpSampleRepository.findOne({ text: 'No' }); nlpSampleEntity = await nlpSampleEntityRepository.findOne({ sample: noNlpSample!.id, @@ -141,4 +153,149 @@ describe('NlpSampleRepository', () => { expect(sampleEntities.length).toEqual(0); }); }); + + describe('findByEntities', () => { + it('should return mapped NlpSample instances for matching entities', async () => { + const filters = {}; + const values = await nlpValueRepository.find({ value: 'greeting' }); + + const result = await nlpSampleRepository.findByEntities({ + filters, + values, + }); + expect(result).toHaveLength(2); + expect(result[0]).toBeInstanceOf(NlpSample); + expect(result[0].text).toBe('Hello'); + }); + + it('should return an empty array if no samples match', async () => { + const filters = {}; + const values = [ + { + id: new Types.ObjectId().toHexString(), + entity: new Types.ObjectId().toHexString(), + value: 'nonexistent', + }, + ] as NlpValue[]; + + const result = await nlpSampleRepository.findByEntities({ + filters, + values, + }); + + expect(Array.isArray(result)).toBe(true); + expect(result).toHaveLength(0); + }); + }); + + describe('findByEntitiesAndPopulate', () => { + it('should return populated NlpSampleFull instances for matching entities', async () => { + const filters = {}; + const values = await nlpValueRepository.find({ value: 'greeting' }); + + const result = await nlpSampleRepository.findByEntitiesAndPopulate({ + filters, + values, + }); + + expect(result.length).toBe(2); + result.forEach((sample) => { + expect(sample).toBeInstanceOf(NlpSampleFull); + expect(sample.entities).toBeDefined(); + expect(Array.isArray(sample.entities)).toBe(true); + expect(sample.language).toBeDefined(); + }); + }); + + it('should return an empty array if no samples match', async () => { + const filters = {}; + const values = [ + { + id: new Types.ObjectId().toHexString(), + entity: new Types.ObjectId().toHexString(), + value: 'nonexistent', + }, + ] as NlpValue[]; + + const result = await nlpSampleRepository.findByEntitiesAndPopulate({ + filters, + values, + }); + + expect(Array.isArray(result)).toBe(true); + expect(result).toHaveLength(0); + }); + + it('should support pagination and projection', async () => { + const filters = {}; + const values = await nlpValueRepository.find({ value: 'greeting' }); + const page = { + limit: 1, + skip: 0, + sort: ['text', 'asc'], + } as PageQueryDto; + const projection = { text: 1 }; + + const result = await nlpSampleRepository.findByEntitiesAndPopulate( + { filters, values }, + page, + projection, + ); + + expect(Array.isArray(result)).toBe(true); + expect(result.length).toBe(1); + if (result.length > 0) { + expect(result[0]).toHaveProperty('text'); + } + }); + }); + + describe('countByEntities', () => { + it('should return the correct count for matching entities', async () => { + const filters = {}; + const values = await nlpValueRepository.find({ value: 'greeting' }); + + const count = await nlpSampleRepository.countByEntities({ + filters, + values, + }); + + expect(typeof count).toBe('number'); + expect(count).toBe(2); + }); + + it('should return 0 if no samples match', async () => { + const filters = {}; + const values = [ + { + id: new Types.ObjectId().toHexString(), + entity: new Types.ObjectId().toHexString(), + value: 'nonexistent', + }, + ] as NlpValue[]; + + const count = await nlpSampleRepository.countByEntities({ + filters, + values, + }); + + expect(count).toBe(0); + }); + + it('should respect filters (e.g. language)', async () => { + const values = await nlpValueRepository.find({ value: 'greeting' }); + const language = languages[0]; + const filters = { language: language.id }; + + const count = await nlpSampleRepository.countByEntities({ + filters, + values, + }); + + // Should be <= total greeting samples, and >= 0 + expect(typeof count).toBe('number'); + expect(count).toBeGreaterThanOrEqual(0); + expect(count).toBeLessThanOrEqual(2); + }); + }); }); From 1d837ca51d9b425a109c31c8e42d3c8aa23d32dc Mon Sep 17 00:00:00 2001 From: Mohamed Marrouchi Date: Thu, 5 Jun 2025 15:29:33 +0100 Subject: [PATCH 5/7] test: add unit tests --- .../controllers/nlp-sample.controller.spec.ts | 34 ++++ .../nlp/repositories/nlp-sample.repository.ts | 7 +- .../nlp/services/nlp-sample.service.spec.ts | 151 ++++++++++++++++++ api/src/nlp/services/nlp-sample.service.ts | 41 ++--- 4 files changed, 212 insertions(+), 21 deletions(-) diff --git a/api/src/nlp/controllers/nlp-sample.controller.spec.ts b/api/src/nlp/controllers/nlp-sample.controller.spec.ts index 21436ba5..464f3276 100644 --- a/api/src/nlp/controllers/nlp-sample.controller.spec.ts +++ b/api/src/nlp/controllers/nlp-sample.controller.spec.ts @@ -182,6 +182,40 @@ describe('NlpSampleController', () => { })), ); }); + + it('should find nlp samples with patterns', async () => { + const pageQuery = getPageQuery({ sort: ['text', 'desc'] }); + const patterns: NlpValueMatchPattern[] = [ + { entity: 'intent', match: 'value', value: 'greeting' }, + ]; + const result = await nlpSampleController.findPage( + pageQuery, + ['language', 'entities'], + {}, + patterns, + ); + // Should only return samples matching the pattern + const nlpSamples = await nlpSampleService.findByPatternsAndPopulate( + { filters: {}, patterns }, + pageQuery, + ); + expect(result).toEqualPayload(nlpSamples); + }); + + it('should return empty array if no samples match the patterns', async () => { + const pageQuery = getPageQuery({ sort: ['text', 'desc'] }); + const patterns: NlpValueMatchPattern[] = [ + { entity: 'intent', match: 'value', value: 'nonexistent' }, + ]; + const result = await nlpSampleController.findPage( + pageQuery, + ['language', 'entities'], + {}, + patterns, + ); + expect(Array.isArray(result)).toBe(true); + expect(result).toHaveLength(0); + }); }); describe('count', () => { diff --git a/api/src/nlp/repositories/nlp-sample.repository.ts b/api/src/nlp/repositories/nlp-sample.repository.ts index c7b2e90a..13c38544 100644 --- a/api/src/nlp/repositories/nlp-sample.repository.ts +++ b/api/src/nlp/repositories/nlp-sample.repository.ts @@ -78,9 +78,12 @@ export class NlpSampleRepository extends BaseRepository< })); return [ - // Apply sample-side filters early { $match: { + // @todo: think of a better way to handle language to objectId conversion + // This is a workaround for the fact that language is stored as an ObjectId + // in the database, but we want to filter by its string representation. + ...filters, ...(filters?.$and ? { $and: filters.$and?.map((condition) => { @@ -266,7 +269,7 @@ export class NlpSampleRepository extends BaseRepository< * Returns the count of samples by filters, entities and/or values * * @param criterias `{ filters, entities, values }` - * @returns Promise resolving to `{ count: number }`. + * @returns Promise resolving to the count. */ async countByEntities(criterias: { filters: TFilterQuery; diff --git a/api/src/nlp/services/nlp-sample.service.spec.ts b/api/src/nlp/services/nlp-sample.service.spec.ts index d1c3798c..40e76b4e 100644 --- a/api/src/nlp/services/nlp-sample.service.spec.ts +++ b/api/src/nlp/services/nlp-sample.service.spec.ts @@ -10,9 +10,11 @@ import { CACHE_MANAGER } from '@nestjs/cache-manager'; import { BadRequestException, NotFoundException } from '@nestjs/common'; import { MongooseModule } from '@nestjs/mongoose'; +import { NlpValueMatchPattern } from '@/chat/schemas/types/pattern'; import { LanguageRepository } from '@/i18n/repositories/language.repository'; import { Language, LanguageModel } from '@/i18n/schemas/language.schema'; import { LanguageService } from '@/i18n/services/language.service'; +import { PageQueryDto } from '@/utils/pagination/pagination-query.dto'; import { nlpSampleFixtures } from '@/utils/test/fixtures/nlpsample'; import { installNlpSampleEntityFixtures } from '@/utils/test/fixtures/nlpsampleentity'; import { getPageQuery } from '@/utils/test/pagination'; @@ -360,4 +362,153 @@ describe('NlpSampleService', () => { expect(extractSpy).not.toHaveBeenCalled(); }); }); + + describe('findByPatterns', () => { + it('should return samples matching the given patterns', async () => { + // Assume pattern: entity 'intent', value 'greeting' + const patterns: NlpValueMatchPattern[] = [ + { entity: 'intent', match: 'value', value: 'greeting' }, + ]; + + const result = await nlpSampleService.findByPatterns( + { filters: {}, patterns }, + undefined, + ); + + expect(Array.isArray(result)).toBe(true); + expect(result[0].text).toBe('Hello'); + }); + + it('should return an empty array if no samples match the patterns', async () => { + const patterns: NlpValueMatchPattern[] = [ + { entity: 'intent', match: 'value', value: 'nonexistent' }, + ]; + + const result = await nlpSampleService.findByPatterns( + { filters: {}, patterns }, + undefined, + ); + + expect(Array.isArray(result)).toBe(true); + expect(result).toHaveLength(0); + }); + + it('should support pagination', async () => { + const patterns: NlpValueMatchPattern[] = [ + { entity: 'intent', match: 'value', value: 'greeting' }, + ]; + const page: PageQueryDto = { + limit: 1, + skip: 0, + sort: ['text', 'asc'], + }; + + const result = await nlpSampleService.findByPatterns( + { filters: {}, patterns }, + page, + ); + + expect(Array.isArray(result)).toBe(true); + expect(result.length).toBe(1); + }); + }); + + describe('findByPatternsAndPopulate', () => { + it('should return populated NlpSampleFull instances for matching patterns', async () => { + const patterns: NlpValueMatchPattern[] = [ + { entity: 'intent', match: 'value', value: 'greeting' }, + ]; + + const result = await nlpSampleService.findByPatternsAndPopulate( + { filters: {}, patterns }, + undefined, + ); + + expect(Array.isArray(result)).toBe(true); + expect(result.length).toBeGreaterThan(0); + result.forEach((sample) => { + expect(sample).toBeInstanceOf(NlpSampleFull); + expect(sample.entities).toBeDefined(); + expect(Array.isArray(sample.entities)).toBe(true); + expect(sample.language).toBeDefined(); + }); + }); + + it('should return an empty array if no samples match the patterns', async () => { + const patterns: NlpValueMatchPattern[] = [ + { entity: 'intent', match: 'value', value: 'nonexistent' }, + ]; + + const result = await nlpSampleService.findByPatternsAndPopulate( + { filters: {}, patterns }, + undefined, + ); + + expect(Array.isArray(result)).toBe(true); + expect(result).toHaveLength(0); + }); + + it('should support pagination and projection', async () => { + const patterns: NlpValueMatchPattern[] = [ + { entity: 'intent', match: 'value', value: 'greeting' }, + ]; + const page: PageQueryDto = { + limit: 1, + skip: 0, + sort: ['text', 'asc'], + }; + + const result = await nlpSampleService.findByPatternsAndPopulate( + { filters: {}, patterns }, + page, + ); + + expect(Array.isArray(result)).toBe(true); + expect(result.length).toBe(1); + }); + }); + + describe('countByPatterns', () => { + it('should return the correct count for matching patterns', async () => { + const patterns: NlpValueMatchPattern[] = [ + { entity: 'intent', match: 'value', value: 'greeting' }, + ]; + + const count = await nlpSampleService.countByPatterns({ + filters: {}, + patterns, + }); + + expect(typeof count).toBe('number'); + expect(count).toBe(2); + }); + + it('should return 0 if no samples match the patterns', async () => { + const patterns: NlpValueMatchPattern[] = [ + { entity: 'intent', match: 'value', value: 'nonexistent' }, + ]; + + const count = await nlpSampleService.countByPatterns({ + filters: {}, + patterns, + }); + + expect(count).toBe(0); + }); + + it('should respect filters (e.g. language)', async () => { + const patterns: NlpValueMatchPattern[] = [ + { entity: 'intent', match: 'value', value: 'greeting' }, + ]; + const filters = { text: 'Hello' }; + + const count = await nlpSampleService.countByPatterns({ + filters, + patterns, + }); + + expect(typeof count).toBe('number'); + expect(count).toBe(1); + }); + }); }); diff --git a/api/src/nlp/services/nlp-sample.service.ts b/api/src/nlp/services/nlp-sample.service.ts index 31d3c009..94de75fb 100644 --- a/api/src/nlp/services/nlp-sample.service.ts +++ b/api/src/nlp/services/nlp-sample.service.ts @@ -80,15 +80,16 @@ export class NlpSampleService extends BaseService< page?: PageQueryDto, projection?: ProjectionType, ): Promise { - const values = - patterns.length > 0 - ? await this.nlpValueService.findByPatterns(patterns) - : []; - - if (values.length === 0) { + if (!patterns.length) { return await this.repository.find(filters, page, projection); } + const values = await this.nlpValueService.findByPatterns(patterns); + + if (!values.length) { + return []; + } + return await this.repository.findByEntities( { filters, @@ -119,15 +120,16 @@ export class NlpSampleService extends BaseService< page?: PageQueryDto, projection?: ProjectionType, ): Promise { - const values = - patterns.length > 0 - ? await this.nlpValueService.findByPatterns(patterns) - : []; - - if (values.length === 0) { + if (!patterns.length) { return await this.repository.findAndPopulate(filters, page, projection); } + const values = await this.nlpValueService.findByPatterns(patterns); + + if (!values.length) { + return []; + } + return await this.repository.findByEntitiesAndPopulate( { filters, @@ -143,7 +145,7 @@ export class NlpSampleService extends BaseService< * present in `patterns`. * * @param param0 `{ filters, patterns }` - * @returns Promise resolving to `{ count }`. + * @returns Promise resolving to the count. */ async countByPatterns({ filters, @@ -152,15 +154,16 @@ export class NlpSampleService extends BaseService< filters: TFilterQuery; patterns: NlpValueMatchPattern[]; }): Promise { - const values = - patterns.length > 0 - ? await this.nlpValueService.findByPatterns(patterns) - : []; - - if (values.length === 0) { + if (!patterns.length) { return await this.repository.count(filters); } + const values = await this.nlpValueService.findByPatterns(patterns); + + if (!values.length) { + return 0; + } + return await this.repository.countByEntities({ filters, values, From 4beeaae087bda13e52f2100a0c9e65f7c874abc9 Mon Sep 17 00:00:00 2001 From: Mohamed Marrouchi Date: Mon, 9 Jun 2025 16:11:01 +0100 Subject: [PATCH 6/7] feat: disallow multiple matches when local fallback is enabled --- api/src/chat/services/block.service.spec.ts | 38 +++- api/src/chat/services/block.service.ts | 219 +++++++++++--------- api/src/chat/services/bot.service.spec.ts | 2 +- api/src/chat/services/bot.service.ts | 13 +- api/src/utils/test/mocks/block.ts | 8 + 5 files changed, 168 insertions(+), 112 deletions(-) diff --git a/api/src/chat/services/block.service.spec.ts b/api/src/chat/services/block.service.spec.ts index f4d45440..0988e6de 100644 --- a/api/src/chat/services/block.service.spec.ts +++ b/api/src/chat/services/block.service.spec.ts @@ -65,6 +65,7 @@ import { mockNlpGreetingNamePatterns, mockNlpGreetingPatterns, mockNlpGreetingWrongNamePatterns, + mockWebChannelData, } from '@/utils/test/mocks/block'; import { contextBlankInstance, @@ -288,11 +289,7 @@ describe('BlockService', () => { text: 'Hello', }, }, - { - isSocket: true, - ipAddress: '1.1.1.1', - agent: 'Chromium', - }, + mockWebChannelData, ); const webEventGetStarted = new WebEventWrapper( handlerMock, @@ -303,11 +300,18 @@ describe('BlockService', () => { payload: 'GET_STARTED', }, }, + mockWebChannelData, + ); + + const webEventAmbiguous = new WebEventWrapper( + handlerMock, { - isSocket: true, - ipAddress: '1.1.1.1', - agent: 'Chromium', + type: Web.IncomingMessageType.text, + data: { + text: "It's not a yes or no answer!", + }, }, + mockWebChannelData, ); it('should return undefined when no blocks are provided', async () => { @@ -332,6 +336,24 @@ describe('BlockService', () => { expect(result).toEqual(blockGetStarted); }); + it('should return undefined when multiple matches are not allowed', async () => { + const result = await blockService.match( + [ + { + ...blockEmpty, + patterns: ['/yes/'], + }, + { + ...blockEmpty, + patterns: ['/no/'], + }, + ], + webEventAmbiguous, + false, + ); + expect(result).toEqual(undefined); + }); + it('should match block with payload', async () => { webEventGetStarted.setSender(subscriberWithLabels); const result = await blockService.match(blocks, webEventGetStarted); diff --git a/api/src/chat/services/block.service.ts b/api/src/chat/services/block.service.ts index fe88a274..d85b7bc1 100644 --- a/api/src/chat/services/block.service.ts +++ b/api/src/chat/services/block.service.ts @@ -64,68 +64,66 @@ export class BlockService extends BaseService< } /** - * Filters an array of blocks based on the specified channel. + * Checks if block is supported on the specified channel. * - * This function ensures that only blocks that are either: - * - Not restricted to specific trigger channels (`trigger_channels` is undefined or empty), or - * - Explicitly allow the given channel - * - * are included in the returned array. - * - * @param blocks - The list of blocks to be filtered. + * @param block - The block * @param channel - The name of the channel to filter blocks by. * - * @returns The filtered array of blocks that are allowed for the given channel. + * @returns Whether the block is supported on the given channel. */ - filterBlocksByChannel( - blocks: B[], + isChannelSupported( + block: B, channel: ChannelName, ) { - return blocks.filter((b) => { - return ( - !b.trigger_channels || - b.trigger_channels.length === 0 || - b.trigger_channels.includes(channel) - ); - }); + return ( + !block.trigger_channels || + block.trigger_channels.length === 0 || + block.trigger_channels.includes(channel) + ); } /** - * Filters an array of blocks based on subscriber labels. + * Checks if the block matches the subscriber labels, allowing for two scenarios: + * - Has no trigger labels (making it applicable to all subscribers), or + * - Contains at least one trigger label that matches a label from the provided list. * - * This function selects blocks that either: - * - Have no trigger labels (making them applicable to all subscribers), or - * - Contain at least one trigger label that matches a label from the provided list. - * - * The filtered blocks are then **sorted** in descending order by the number of trigger labels, - * ensuring that blocks with more specific targeting (more trigger labels) are prioritized. - * - * @param blocks - The list of blocks to be filtered. + * @param block - The block to check. * @param labels - The list of subscriber labels to match against. - * @returns The filtered and sorted list of blocks. + * @returns True if the block matches the subscriber labels, false otherwise. */ - filterBlocksBySubscriberLabels( - blocks: B[], - profile?: Subscriber, + matchesSubscriberLabels( + block: B, + subscriber?: Subscriber, ) { - if (!profile) { - return blocks; + if (!subscriber) { + return block; } - return ( - blocks - .filter((b) => { - const triggerLabels = b.trigger_labels.map((l) => - typeof l === 'string' ? l : l.id, - ); - return ( - triggerLabels.length === 0 || - triggerLabels.some((l) => profile.labels.includes(l)) - ); - }) - // Priority goes to block who target users with labels - .sort((a, b) => b.trigger_labels.length - a.trigger_labels.length) + const triggerLabels = block.trigger_labels.map((l: string | Label) => + typeof l === 'string' ? l : l.id, ); + return ( + triggerLabels.length === 0 || + triggerLabels.some((l) => subscriber.labels.includes(l)) + ); + } + + /** + * Retrieves the configured NLU penalty factor from settings, or falls back to a default value. + * + * @returns The NLU penalty factor as a number. + */ + private async getPenaltyFactor(): Promise { + const settings = await this.settingService.getSettings(); + const configured = settings.chatbot_settings?.default_nlu_penalty_factor; + + if (configured == null) { + this.logger.warn( + 'Using fallback NLU penalty factor value: %s', + FALLBACK_DEFAULT_NLU_PENALTY_FACTOR, + ); + } + return configured ?? FALLBACK_DEFAULT_NLU_PENALTY_FACTOR; } /** @@ -133,75 +131,88 @@ export class BlockService extends BaseService< * * @param filteredBlocks blocks Starting/Next blocks in the conversation flow * @param event Received channel's message + * @param canHaveMultipleMatches Whether to allow multiple matches for the same event + * (eg. Yes/No question to which the answer is ambiguous "Sometimes yes, sometimes no") * * @returns The block that matches */ async match( blocks: BlockFull[], event: EventWrapper, + canHaveMultipleMatches = true, ): Promise { if (!blocks.length) { return undefined; } - // Search for block matching a given event - let block: BlockFull | undefined = undefined; - const payload = event.getPayload(); + // Narrow the search space + const channelName = event.getHandler().getName(); + const sender = event.getSender(); + const candidates = blocks.filter( + (b) => + this.isChannelSupported(b, channelName) && + this.matchesSubscriberLabels(b, sender), + ); - // Perform a filter to get the candidates blocks - const filteredBlocks = this.filterBlocksBySubscriberLabels( - this.filterBlocksByChannel(blocks, event.getHandler().getName()), - event.getSender(), + if (!candidates.length) { + return undefined; + } + + // Priority goes to block who target users with labels + const prioritizedCandidates = candidates.sort( + (a, b) => b.trigger_labels.length - a.trigger_labels.length, ); // Perform a payload match & pick last createdAt + const payload = event.getPayload(); if (payload) { - block = filteredBlocks - .filter((b) => { - return this.matchPayload(payload, b); - }) - .shift(); - } - - if (!block) { - // Perform a text match (Text or Quick reply) - const text = event.getText().trim(); - - // Perform a text pattern match - block = filteredBlocks - .filter((b) => { - return this.matchText(text, b); - }) - .shift(); - - // Perform an NLP Match - const nlp = event.getNLP(); - if (!block && nlp) { - const scoredEntities = - await this.nlpService.computePredictionScore(nlp); - - const settings = await this.settingService.getSettings(); - let penaltyFactor = - settings.chatbot_settings?.default_nlu_penalty_factor; - if (!penaltyFactor) { - this.logger.warn( - 'Using fallback NLU penalty factor value: %s', - FALLBACK_DEFAULT_NLU_PENALTY_FACTOR, - ); - penaltyFactor = FALLBACK_DEFAULT_NLU_PENALTY_FACTOR; - } - - if (scoredEntities.entities.length > 0) { - block = this.matchBestNLP( - filteredBlocks, - scoredEntities, - penaltyFactor, - ); - } + const payloadMatches = prioritizedCandidates.filter((b) => { + return this.matchPayload(payload, b); + }); + if (payloadMatches.length > 1 && !canHaveMultipleMatches) { + // If the payload matches multiple blocks , + // we return undefined so that we trigger the local fallback + return undefined; + } else if (payloadMatches.length > 0) { + // If we have a payload match, we return the first one + // (which is the most recent one due to the sort) + // and we don't check for text or NLP matches + return payloadMatches[0]; } } - return block; + // Perform a text match (Text or Quick reply) + const text = event.getText().trim(); + if (text) { + const textMatches = prioritizedCandidates.filter((b) => { + return this.matchText(text, b); + }); + + if (textMatches.length > 1 && !canHaveMultipleMatches) { + // If the text matches multiple blocks (especially regex), + // we return undefined so that we trigger the local fallback + return undefined; + } else if (textMatches.length > 0) { + return textMatches[0]; + } + } + + // Perform an NLP Match + const nlp = event.getNLP(); + if (nlp) { + const scoredEntities = await this.nlpService.computePredictionScore(nlp); + + if (scoredEntities.entities.length) { + const penaltyFactor = await this.getPenaltyFactor(); + return this.matchBestNLP( + prioritizedCandidates, + scoredEntities, + penaltyFactor, + ); + } + } + + return undefined; } /** @@ -500,11 +511,19 @@ export class BlockService extends BaseService< envelope: StdOutgoingSystemEnvelope, ) { // Perform a filter to get the candidates blocks - const filteredBlocks = this.filterBlocksBySubscriberLabels( - this.filterBlocksByChannel(blocks, event.getHandler().getName()), - event.getSender(), + const handlerName = event.getHandler().getName(); + const sender = event.getSender(); + const candidates = blocks.filter( + (b) => + this.isChannelSupported(b, handlerName) && + this.matchesSubscriberLabels(b, sender), ); - return filteredBlocks.find((b) => { + + if (!candidates.length) { + return undefined; + } + + return candidates.find((b) => { return b.patterns .filter( (p) => typeof p === 'object' && 'type' in p && p.type === 'outcome', diff --git a/api/src/chat/services/bot.service.spec.ts b/api/src/chat/services/bot.service.spec.ts index 93814633..d8b61596 100644 --- a/api/src/chat/services/bot.service.spec.ts +++ b/api/src/chat/services/bot.service.spec.ts @@ -293,7 +293,7 @@ describe('BotService', () => { event.setSender(webSubscriber); const clearMock = jest - .spyOn(botService, 'handleIncomingMessage') + .spyOn(botService, 'handleOngoingConversationMessage') .mockImplementation( async ( actualConversation: ConversationFull, diff --git a/api/src/chat/services/bot.service.ts b/api/src/chat/services/bot.service.ts index a5b9dbb3..ae3dac76 100644 --- a/api/src/chat/services/bot.service.ts +++ b/api/src/chat/services/bot.service.ts @@ -253,7 +253,7 @@ export class BotService { * * @returns A promise that resolves with a boolean indicating whether the conversation is active and a matching block was found. */ - async handleIncomingMessage( + async handleOngoingConversationMessage( convo: ConversationFull, event: EventWrapper, ) { @@ -272,8 +272,15 @@ export class BotService { max_attempts: 0, }; + // We will avoid having multiple matches when we are not at the start of a conversation + // and only if local fallback is enabled + const canHaveMultipleMatches = !fallbackOptions.active; // Find the next block that matches - const matchedBlock = await this.blockService.match(nextBlocks, event); + const matchedBlock = await this.blockService.match( + nextBlocks, + event, + canHaveMultipleMatches, + ); // If there is no match in next block then loopback (current fallback) // This applies only to text messages + there's a max attempt to be specified let fallbackBlock: BlockFull | undefined; @@ -376,7 +383,7 @@ export class BotService { 'Existing conversations', ); this.logger.debug('Conversation has been captured! Responding ...'); - return await this.handleIncomingMessage(conversation, event); + return await this.handleOngoingConversationMessage(conversation, event); } catch (err) { this.logger.error( 'An error occurred when searching for a conversation ', diff --git a/api/src/utils/test/mocks/block.ts b/api/src/utils/test/mocks/block.ts index 32a26f70..0de1196f 100644 --- a/api/src/utils/test/mocks/block.ts +++ b/api/src/utils/test/mocks/block.ts @@ -18,6 +18,7 @@ import { OutgoingMessageFormat } from '@/chat/schemas/types/message'; import { BlockOptions, ContentOptions } from '@/chat/schemas/types/options'; import { NlpPattern, Pattern } from '@/chat/schemas/types/pattern'; import { QuickReplyType } from '@/chat/schemas/types/quick-reply'; +import { WEB_CHANNEL_NAME } from '@/extensions/channels/web/settings'; import { modelInstance } from './misc'; @@ -391,3 +392,10 @@ export const blockCarouselMock = { } as unknown as BlockFull; export const blocks: BlockFull[] = [blockGetStarted, blockEmpty]; + +export const mockWebChannelData: SubscriberChannelDict[typeof WEB_CHANNEL_NAME] = + { + isSocket: true, + ipAddress: '1.1.1.1', + agent: 'Chromium', + }; From ab1d58ac1739121d246c593b17f3a5e240f54b1a Mon Sep 17 00:00:00 2001 From: Mohamed Marrouchi Date: Wed, 11 Jun 2025 11:02:46 +0100 Subject: [PATCH 7/7] test: consolidate tests --- api/src/chat/services/block.service.ts | 4 +- .../controllers/nlp-sample.controller.spec.ts | 10 ++++ .../nlp/repositories/nlp-sample.repository.ts | 50 ++++++++++------- .../nlp/services/nlp-sample.service.spec.ts | 54 ++++++++++++++++++- api/src/nlp/services/nlp-sample.service.ts | 2 +- api/src/utils/generics/base-repository.ts | 4 +- 6 files changed, 99 insertions(+), 25 deletions(-) diff --git a/api/src/chat/services/block.service.ts b/api/src/chat/services/block.service.ts index d85b7bc1..af561a7e 100644 --- a/api/src/chat/services/block.service.ts +++ b/api/src/chat/services/block.service.ts @@ -95,8 +95,8 @@ export class BlockService extends BaseService< block: B, subscriber?: Subscriber, ) { - if (!subscriber) { - return block; + if (!subscriber || !subscriber.labels) { + return true; // No subscriber or labels to match against } const triggerLabels = block.trigger_labels.map((l: string | Label) => diff --git a/api/src/nlp/controllers/nlp-sample.controller.spec.ts b/api/src/nlp/controllers/nlp-sample.controller.spec.ts index 464f3276..54abd46d 100644 --- a/api/src/nlp/controllers/nlp-sample.controller.spec.ts +++ b/api/src/nlp/controllers/nlp-sample.controller.spec.ts @@ -207,12 +207,16 @@ describe('NlpSampleController', () => { const patterns: NlpValueMatchPattern[] = [ { entity: 'intent', match: 'value', value: 'nonexistent' }, ]; + jest.spyOn(nlpSampleService, 'findByPatternsAndPopulate'); const result = await nlpSampleController.findPage( pageQuery, ['language', 'entities'], {}, patterns, ); + expect(nlpSampleService.findByPatternsAndPopulate).toHaveBeenCalledTimes( + 1, + ); expect(Array.isArray(result)).toBe(true); expect(result).toHaveLength(0); }); @@ -220,7 +224,9 @@ describe('NlpSampleController', () => { describe('count', () => { it('should count the nlp samples', async () => { + jest.spyOn(nlpSampleService, 'count'); const result = await nlpSampleController.count({}); + expect(nlpSampleService.count).toHaveBeenCalledTimes(1); const count = nlpSampleFixtures.length; expect(result).toEqual({ count }); }); @@ -478,7 +484,9 @@ describe('NlpSampleController', () => { describe('filterCount', () => { it('should count the nlp samples without patterns', async () => { const filters = { text: 'Hello' }; + jest.spyOn(nlpSampleService, 'countByPatterns'); const result = await nlpSampleController.filterCount(filters, []); + expect(nlpSampleService.countByPatterns).toHaveBeenCalledTimes(1); expect(result).toEqual({ count: 1 }); }); @@ -487,7 +495,9 @@ describe('NlpSampleController', () => { const patterns: NlpValueMatchPattern[] = [ { entity: 'intent', match: 'value', value: 'greeting' }, ]; + jest.spyOn(nlpSampleService, 'countByPatterns'); const result = await nlpSampleController.filterCount(filters, patterns); + expect(nlpSampleService.countByPatterns).toHaveBeenCalledTimes(1); expect(result).toEqual({ count: 1 }); }); diff --git a/api/src/nlp/repositories/nlp-sample.repository.ts b/api/src/nlp/repositories/nlp-sample.repository.ts index 13c38544..37aa50db 100644 --- a/api/src/nlp/repositories/nlp-sample.repository.ts +++ b/api/src/nlp/repositories/nlp-sample.repository.ts @@ -52,6 +52,35 @@ export class NlpSampleRepository extends BaseRepository< super(model, NlpSample, NLP_SAMPLE_POPULATE, NlpSampleFull); } + /** + * Normalize the filter query. + * + * @param filters - The filters to normalize. + * @returns The normalized filters. + */ + private normalizeFilters( + filters: TFilterQuery, + ): TFilterQuery { + if (filters?.$and) { + return { + ...filters, + $and: filters.$and.map((condition) => { + // @todo: think of a better way to handle language to objectId conversion + // This is a workaround for the fact that language is stored as an ObjectId + // in the database, but we want to filter by its string representation. + if ('language' in condition && condition.language) { + return { + ...condition, + language: new Types.ObjectId(condition.language as string), + }; + } + return condition; + }), + }; + } + return filters; + } + /** * Build the aggregation stages that restrict a *nlpSampleEntities* collection * to links which: @@ -77,27 +106,12 @@ export class NlpSampleRepository extends BaseRepository< value: new Types.ObjectId(id), })); + const normalizedFilters = this.normalizeFilters(filters); + return [ { $match: { - // @todo: think of a better way to handle language to objectId conversion - // This is a workaround for the fact that language is stored as an ObjectId - // in the database, but we want to filter by its string representation. - ...filters, - ...(filters?.$and - ? { - $and: filters.$and?.map((condition) => { - if ('language' in condition && condition.language) { - return { - language: new Types.ObjectId( - condition.language as string, - ), - }; - } - return condition; - }), - } - : {}), + ...normalizedFilters, }, }, diff --git a/api/src/nlp/services/nlp-sample.service.spec.ts b/api/src/nlp/services/nlp-sample.service.spec.ts index 40e76b4e..e9ba31af 100644 --- a/api/src/nlp/services/nlp-sample.service.spec.ts +++ b/api/src/nlp/services/nlp-sample.service.spec.ts @@ -54,6 +54,7 @@ describe('NlpSampleService', () => { let nlpEntityService: NlpEntityService; let nlpSampleService: NlpSampleService; let nlpSampleEntityService: NlpSampleEntityService; + let nlpValueService: NlpValueService; let languageService: LanguageService; let nlpSampleEntityRepository: NlpSampleEntityRepository; let nlpSampleRepository: NlpSampleRepository; @@ -100,6 +101,7 @@ describe('NlpSampleService', () => { nlpEntityService, nlpSampleService, nlpSampleEntityService, + nlpValueService, nlpSampleRepository, nlpSampleEntityRepository, nlpSampleEntityRepository, @@ -109,6 +111,7 @@ describe('NlpSampleService', () => { NlpEntityService, NlpSampleService, NlpSampleEntityService, + NlpValueService, NlpSampleRepository, NlpSampleEntityRepository, NlpSampleEntityRepository, @@ -364,17 +367,29 @@ describe('NlpSampleService', () => { }); describe('findByPatterns', () => { + it('should return samples without providing patterns', async () => { + const result = await nlpSampleService.findByPatterns( + { filters: {}, patterns: [] }, + undefined, + ); + + expect(Array.isArray(result)).toBe(true); + expect(result.length).toBeGreaterThan(0); + }); + it('should return samples matching the given patterns', async () => { // Assume pattern: entity 'intent', value 'greeting' const patterns: NlpValueMatchPattern[] = [ { entity: 'intent', match: 'value', value: 'greeting' }, ]; - + jest.spyOn(nlpSampleRepository, 'findByEntities'); + jest.spyOn(nlpValueService, 'findByPatterns'); const result = await nlpSampleService.findByPatterns( { filters: {}, patterns }, undefined, ); - + expect(nlpSampleRepository.findByEntities).toHaveBeenCalled(); + expect(nlpValueService.findByPatterns).toHaveBeenCalled(); expect(Array.isArray(result)).toBe(true); expect(result[0].text).toBe('Hello'); }); @@ -384,11 +399,15 @@ describe('NlpSampleService', () => { { entity: 'intent', match: 'value', value: 'nonexistent' }, ]; + jest.spyOn(nlpSampleRepository, 'findByEntities'); + jest.spyOn(nlpValueService, 'findByPatterns'); const result = await nlpSampleService.findByPatterns( { filters: {}, patterns }, undefined, ); + expect(nlpSampleRepository.findByEntities).not.toHaveBeenCalled(); + expect(nlpValueService.findByPatterns).toHaveBeenCalled(); expect(Array.isArray(result)).toBe(true); expect(result).toHaveLength(0); }); @@ -434,6 +453,19 @@ describe('NlpSampleService', () => { }); }); + it('should return populated NlpSampleFull without providing patterns', async () => { + const result = await nlpSampleService.findByPatternsAndPopulate( + { filters: { text: /Hello/gi }, patterns: [] }, + undefined, + ); + + expect(Array.isArray(result)).toBe(true); + expect(result.length).toBe(1); + expect(result[0]).toBeInstanceOf(NlpSampleFull); + expect(result[0].entities).toBeDefined(); + expect(Array.isArray(result[0].entities)).toBe(true); + }); + it('should return an empty array if no samples match the patterns', async () => { const patterns: NlpValueMatchPattern[] = [ { entity: 'intent', match: 'value', value: 'nonexistent' }, @@ -474,15 +506,33 @@ describe('NlpSampleService', () => { { entity: 'intent', match: 'value', value: 'greeting' }, ]; + jest.spyOn(nlpSampleRepository, 'countByEntities'); + jest.spyOn(nlpValueService, 'findByPatterns'); const count = await nlpSampleService.countByPatterns({ filters: {}, patterns, }); + expect(nlpSampleRepository.countByEntities).toHaveBeenCalled(); + expect(nlpValueService.findByPatterns).toHaveBeenCalled(); expect(typeof count).toBe('number'); expect(count).toBe(2); }); + it('should return the correct count without providing patterns', async () => { + jest.spyOn(nlpSampleRepository, 'findByEntities'); + jest.spyOn(nlpValueService, 'findByPatterns'); + const count = await nlpSampleService.countByPatterns({ + filters: {}, + patterns: [], + }); + + expect(nlpSampleRepository.findByEntities).not.toHaveBeenCalled(); + expect(nlpValueService.findByPatterns).not.toHaveBeenCalled(); + expect(typeof count).toBe('number'); + expect(count).toBeGreaterThan(2); + }); + it('should return 0 if no samples match the patterns', async () => { const patterns: NlpValueMatchPattern[] = [ { entity: 'intent', match: 'value', value: 'nonexistent' }, diff --git a/api/src/nlp/services/nlp-sample.service.ts b/api/src/nlp/services/nlp-sample.service.ts index 94de75fb..5f789643 100644 --- a/api/src/nlp/services/nlp-sample.service.ts +++ b/api/src/nlp/services/nlp-sample.service.ts @@ -104,7 +104,7 @@ export class NlpSampleService extends BaseService< * Same as `findByPatterns`, but also populates all relations declared * in the repository (`populatePaths`). * - * @param criteras `{ filters, patterns }` + * @param criteria `{ filters, patterns }` * @param page Optional paging / sorting descriptor. * @param projection Optional Mongo projection. * @returns Promise resolving to the populated samples. diff --git a/api/src/utils/generics/base-repository.ts b/api/src/utils/generics/base-repository.ts index 6258247e..18ef046f 100644 --- a/api/src/utils/generics/base-repository.ts +++ b/api/src/utils/generics/base-repository.ts @@ -378,7 +378,7 @@ export abstract class BaseRepository< criteria: string | TFilterQuery, options?: ClassTransformOptions, projection?: ProjectionType, - ) { + ): Promise { if (!criteria) { // @TODO : Issue a warning ? return null; @@ -768,7 +768,7 @@ export abstract class BaseRepository< * @param filter Mongo filter selecting the documents to update. * @param dto Update payload. * @param options `{ shouldFlatten?: boolean }`. - * @returns MongoDB `UpdateWriteOpResult` describing the operation outcome. + * @returns Promise that resolves a MongoDB `UpdateWriteOpResult` describing the operation outcome. */ async updateMany>( filter: TFilterQuery,