diff --git a/api/src/nlp/controllers/nlp-sample.controller.ts b/api/src/nlp/controllers/nlp-sample.controller.ts index 949e8e86..a94de961 100644 --- a/api/src/nlp/controllers/nlp-sample.controller.ts +++ b/api/src/nlp/controllers/nlp-sample.controller.ts @@ -29,17 +29,21 @@ import { import { FileInterceptor } from '@nestjs/platform-express'; import { CsrfCheck } from '@tekuconcept/nestjs-csrf'; import { Response } from 'express'; +import { z } from 'zod'; +import { NlpPattern, nlpPatternSchema } from '@/chat/schemas/types/pattern'; import { HelperService } from '@/helper/helper.service'; import { HelperType } from '@/helper/types'; import { LanguageService } from '@/i18n/services/language.service'; import { CsrfInterceptor } from '@/interceptors/csrf.interceptor'; +import { Roles } from '@/utils/decorators/roles.decorator'; import { BaseController } from '@/utils/generics/base-controller'; import { DeleteResult } from '@/utils/generics/base-repository'; import { PageQueryDto } from '@/utils/pagination/pagination-query.dto'; import { PageQueryPipe } from '@/utils/pagination/pagination-query.pipe'; import { PopulatePipe } from '@/utils/pipes/populate.pipe'; import { SearchFilterPipe } from '@/utils/pipes/search-filter.pipe'; +import { ZodQueryParamPipe } from '@/utils/pipes/zod.pipe'; import { TFilterQuery } from '@/utils/types/filter.types'; import { NlpSampleDto, TNlpSampleDto } from '../dto/nlp-sample.dto'; @@ -177,6 +181,7 @@ export class NlpSampleController extends BaseController< * * @returns The count of samples that match the filters. */ + @Roles('public') @Get('count') async filterCount( @Query( @@ -184,8 +189,18 @@ export class NlpSampleController extends BaseController< allowedFields: ['text', 'type', 'language'], }), ) - filters?: TFilterQuery, + filters: TFilterQuery = {}, + @Query( + new ZodQueryParamPipe( + z.array(nlpPatternSchema), + (q) => q?.where?.patterns, + ), + ) + patterns: NlpPattern[] = [], ) { + if (patterns.length) { + return await this.nlpSampleService.countByPatterns({ filters, patterns }); + } return await this.count(filters); } @@ -276,6 +291,7 @@ export class NlpSampleController extends BaseController< * @returns A paginated list of NLP samples. */ @Get() + @Roles('public') async findPage( @Query(PageQueryPipe) pageQuery: PageQueryDto, @Query(PopulatePipe) populate: string[], @@ -285,7 +301,25 @@ export class NlpSampleController extends BaseController< }), ) filters: TFilterQuery, + @Query( + new ZodQueryParamPipe( + z.array(nlpPatternSchema), + (q) => q?.where?.patterns, + ), + ) + patterns: NlpPattern[] = [], ) { + if (patterns.length) { + return this.canPopulate(populate) + ? await this.nlpSampleService.findByPatternsAndPopulate( + { filters, patterns }, + pageQuery, + ) + : await this.nlpSampleService.findByPatterns( + { filters, patterns }, + pageQuery, + ); + } return this.canPopulate(populate) ? await this.nlpSampleService.findAndPopulate(filters, pageQuery) : await this.nlpSampleService.find(filters, pageQuery); diff --git a/api/src/nlp/repositories/nlp-sample.repository.ts b/api/src/nlp/repositories/nlp-sample.repository.ts index 9da6eab3..25e51fb9 100644 --- a/api/src/nlp/repositories/nlp-sample.repository.ts +++ b/api/src/nlp/repositories/nlp-sample.repository.ts @@ -8,15 +8,27 @@ import { Injectable } from '@nestjs/common'; import { InjectModel } from '@nestjs/mongoose'; -import { Document, Model, Query } from 'mongoose'; +import { plainToClass } from 'class-transformer'; +import { + Aggregate, + Document, + Model, + PipelineStage, + ProjectionType, + Query, + Types, +} from 'mongoose'; import { BaseRepository, DeleteResult } from '@/utils/generics/base-repository'; +import { PageQueryDto } from '@/utils/pagination/pagination-query.dto'; import { TFilterQuery } from '@/utils/types/filter.types'; import { TNlpSampleDto } from '../dto/nlp-sample.dto'; +import { NlpSampleEntity } from '../schemas/nlp-sample-entity.schema'; import { NLP_SAMPLE_POPULATE, NlpSample, + NlpSampleDocument, NlpSampleFull, NlpSamplePopulate, } from '../schemas/nlp-sample.schema'; @@ -32,11 +44,167 @@ export class NlpSampleRepository extends BaseRepository< > { constructor( @InjectModel(NlpSample.name) readonly model: Model, + @InjectModel(NlpSampleEntity.name) + readonly sampleEntityModel: Model, private readonly nlpSampleEntityRepository: NlpSampleEntityRepository, ) { super(model, NlpSample, NLP_SAMPLE_POPULATE, NlpSampleFull); } + buildFindByEntitiesStages({ + filters, + entityIds, + valueIds, + }: { + filters: TFilterQuery; + entityIds: Types.ObjectId[]; + valueIds: Types.ObjectId[]; + }): PipelineStage[] { + return [ + // pick link docs whose entity / value matches a pattern + { + $match: { + ...(entityIds.length && { entity: { $in: entityIds } }), + ...(valueIds.length && { value: { $in: valueIds } }), + }, + }, + + // join to the real sample *and* apply sample-side filters early + { + $lookup: { + from: 'nlpsamples', + let: { sampleId: '$sample' }, + pipeline: [ + { + $match: { + $expr: { $eq: ['$_id', '$$sampleId'] }, + ...(filters?.$and + ? { + $and: filters.$and?.map((condition) => { + if ('language' in condition && condition.language) { + return { + language: new Types.ObjectId(condition.language), + }; + } + return condition; + }), + } + : {}), + }, + }, + ], + as: 'sample', + }, + }, + { $unwind: '$sample' }, + ]; + } + + findByEntitiesAggregation( + criterias: { + filters: TFilterQuery; + entityIds: Types.ObjectId[]; + valueIds: Types.ObjectId[]; + }, + page?: PageQueryDto, + projection?: ProjectionType, + ): Aggregate { + return this.sampleEntityModel.aggregate([ + ...this.buildFindByEntitiesStages(criterias), + + // promote the sample document + { $replaceRoot: { newRoot: '$sample' } }, + + // sort / skip / limit + ...this.buildPaginationPipelineStages(page), + + // projection + ...(projection + ? [ + { + $project: + typeof projection === 'string' + ? { [projection]: 1 } + : projection, + }, + ] + : []), + ]); + } + + async findByEntities( + criterias: { + filters: TFilterQuery; + entityIds: Types.ObjectId[]; + valueIds: Types.ObjectId[]; + }, + page?: PageQueryDto, + projection?: ProjectionType, + ): Promise { + const aggregation = this.findByEntitiesAggregation( + criterias, + page, + projection, + ); + + const resultSet = await aggregation.exec(); + return resultSet.map((doc) => + plainToClass(NlpSample, doc, this.transformOpts), + ); + } + + async findByEntitiesAndPopulate( + criterias: { + filters: TFilterQuery; + entityIds: Types.ObjectId[]; + valueIds: Types.ObjectId[]; + }, + page?: PageQueryDto, + projection?: ProjectionType, + ): Promise { + const aggregation = this.findByEntitiesAggregation( + criterias, + page, + projection, + ); + + const docs = await aggregation.exec(); + + const populatedResultSet = await this.populate(docs); + + return populatedResultSet.map((doc) => + plainToClass(NlpSampleFull, doc, this.transformOpts), + ); + } + + countByEntitiesAggregation(criterias: { + filters: TFilterQuery; + entityIds: Types.ObjectId[]; + valueIds: Types.ObjectId[]; + }): Aggregate<{ count: number }[]> { + return this.sampleEntityModel.aggregate<{ count: number }>([ + ...this.buildFindByEntitiesStages(criterias), + + // Collapse duplicates: one bucket per unique sample + { $group: { _id: '$sample._id' } }, + + // Final count + { $count: 'count' }, + ]); + } + + async countByEntities(criterias: { + filters: TFilterQuery; + entityIds: Types.ObjectId[]; + valueIds: Types.ObjectId[]; + }): Promise<{ count: number }> { + const aggregation = this.countByEntitiesAggregation(criterias); + + const [result] = await aggregation.exec(); + + return { count: result?.count || 0 }; + } + /** * Deletes NLP sample entities associated with the provided criteria before deleting the sample itself. * diff --git a/api/src/nlp/services/nlp-entity.service.ts b/api/src/nlp/services/nlp-entity.service.ts index 0876f3c1..897e841f 100644 --- a/api/src/nlp/services/nlp-entity.service.ts +++ b/api/src/nlp/services/nlp-entity.service.ts @@ -10,7 +10,9 @@ import { CACHE_MANAGER } from '@nestjs/cache-manager'; import { BadRequestException, Inject, Injectable } from '@nestjs/common'; import { OnEvent } from '@nestjs/event-emitter'; import { Cache } from 'cache-manager'; +import { Types } from 'mongoose'; +import { NlpPattern } from '@/chat/schemas/types/pattern'; import { NLP_MAP_CACHE_KEY } from '@/utils/constants/cache'; import { Cacheable } from '@/utils/decorators/cacheable.decorator'; import { BaseService } from '@/utils/generics/base-service'; @@ -72,6 +74,19 @@ export class NlpEntityService extends BaseService< return await this.repository.updateOne(id, { weight: updatedWeight }); } + async findObjectIdsByPatterns(patterns: NlpPattern[]) { + // resolve pattern → ids (kept here because it uses other services) + return ( + await this.find({ + name: { + $in: patterns + .filter((p) => p.match === 'entity') + .map((p) => p.entity), + }, + }) + ).map((e) => new Types.ObjectId(e.id)); + } + /** * Stores new entities based on the sample text and sample entities. * Deletes all values relative to this entity before deleting the entity itself. diff --git a/api/src/nlp/services/nlp-sample.service.ts b/api/src/nlp/services/nlp-sample.service.ts index 665f8cc7..700e508b 100644 --- a/api/src/nlp/services/nlp-sample.service.ts +++ b/api/src/nlp/services/nlp-sample.service.ts @@ -12,14 +12,16 @@ import { NotFoundException, } from '@nestjs/common'; import { OnEvent } from '@nestjs/event-emitter'; -import { Document, Query } from 'mongoose'; +import { Document, ProjectionType, Query } from 'mongoose'; import Papa from 'papaparse'; import { Message } from '@/chat/schemas/message.schema'; +import { NlpPattern } from '@/chat/schemas/types/pattern'; import { Language } from '@/i18n/schemas/language.schema'; import { LanguageService } from '@/i18n/services/language.service'; import { DeleteResult } from '@/utils/generics/base-repository'; import { BaseService } from '@/utils/generics/base-service'; +import { PageQueryDto } from '@/utils/pagination/pagination-query.dto'; import { TFilterQuery, THydratedDocument } from '@/utils/types/filter.types'; import { NlpSampleEntityCreateDto } from '../dto/nlp-sample-entity.dto'; @@ -35,6 +37,7 @@ import { NlpSampleEntityValue, NlpSampleState } from '../schemas/types'; import { NlpEntityService } from './nlp-entity.service'; import { NlpSampleEntityService } from './nlp-sample-entity.service'; +import { NlpValueService } from './nlp-value.service'; @Injectable() export class NlpSampleService extends BaseService< @@ -47,6 +50,7 @@ export class NlpSampleService extends BaseService< readonly repository: NlpSampleRepository, private readonly nlpSampleEntityService: NlpSampleEntityService, private readonly nlpEntityService: NlpEntityService, + private readonly nlpValueService: NlpValueService, private readonly languageService: LanguageService, ) { super(repository); @@ -279,6 +283,66 @@ export class NlpSampleService extends BaseService< } } + async findByPatterns( + { + filters, + patterns, + }: { + filters: TFilterQuery; + patterns: NlpPattern[]; + }, + page?: PageQueryDto, + projection?: ProjectionType, + ): Promise { + return await this.repository.findByEntities( + { + filters, + entityIds: + await this.nlpEntityService.findObjectIdsByPatterns(patterns), + valueIds: await this.nlpValueService.findObjectIdsByPatterns(patterns), + }, + page, + projection, + ); + } + + async findByPatternsAndPopulate( + { + filters, + patterns, + }: { + filters: TFilterQuery; + patterns: NlpPattern[]; + }, + page?: PageQueryDto, + projection?: ProjectionType, + ): Promise { + return await this.repository.findByEntitiesAndPopulate( + { + filters, + entityIds: + await this.nlpEntityService.findObjectIdsByPatterns(patterns), + valueIds: await this.nlpValueService.findObjectIdsByPatterns(patterns), + }, + page, + projection, + ); + } + + async countByPatterns({ + filters, + patterns, + }: { + filters: TFilterQuery; + patterns: NlpPattern[]; + }): Promise<{ count: number }> { + return await this.repository.countByEntities({ + filters, + entityIds: await this.nlpEntityService.findObjectIdsByPatterns(patterns), + valueIds: await this.nlpValueService.findObjectIdsByPatterns(patterns), + }); + } + @OnEvent('hook:message:preCreate') async handleNewMessage(doc: THydratedDocument) { // If message is sent by the user then add it as an inbox sample diff --git a/api/src/nlp/services/nlp-value.service.ts b/api/src/nlp/services/nlp-value.service.ts index 87988140..021ff3d1 100644 --- a/api/src/nlp/services/nlp-value.service.ts +++ b/api/src/nlp/services/nlp-value.service.ts @@ -7,7 +7,9 @@ */ import { forwardRef, Inject, Injectable } from '@nestjs/common'; +import { Types } from 'mongoose'; +import { NlpPattern } from '@/chat/schemas/types/pattern'; import { DeleteResult } from '@/utils/generics/base-repository'; import { BaseService } from '@/utils/generics/base-service'; import { PageQueryDto } from '@/utils/pagination/pagination-query.dto'; @@ -42,6 +44,19 @@ export class NlpValueService extends BaseService< super(repository); } + async findObjectIdsByPatterns(patterns: NlpPattern[]) { + // resolve pattern → ids (kept here because it uses other services) + return ( + await this.find({ + value: { + $in: patterns + .map((p) => (p.match === 'value' ? p.value : null)) + .filter(Boolean), + }, + }) + ).map((v) => new Types.ObjectId(v.id)); + } + /** * Deletes an NLP value by its ID, cascading any dependent data. * diff --git a/api/src/utils/generics/base-repository.ts b/api/src/utils/generics/base-repository.ts index 9244d13c..d73f58bb 100644 --- a/api/src/utils/generics/base-repository.ts +++ b/api/src/utils/generics/base-repository.ts @@ -19,6 +19,7 @@ import { FlattenMaps, HydratedDocument, Model, + PipelineStage, ProjectionType, Query, SortOrder, @@ -31,6 +32,7 @@ import { LoggerService } from '@/logger/logger.service'; import { TFilterQuery, TFlattenOption, + THydratedDocument, TQueryOptions, } from '@/utils/types/filter.types'; @@ -81,9 +83,13 @@ export abstract class BaseRepository< U extends Omit = Omit, D = Document, > { - private readonly transformOpts = { excludePrefixes: ['_', 'password'] }; + protected readonly transformOpts = { excludePrefixes: ['_', 'password'] }; - private readonly leanOpts = { virtuals: true, defaults: true, getters: true }; + protected readonly leanOpts = { + virtuals: true, + defaults: true, + getters: true, + }; @Inject(EventEmitter2) readonly eventEmitter: EventEmitter2; @@ -643,4 +649,39 @@ export abstract class BaseRepository< ): Promise { // Nothing ... } + + buildPaginationPipelineStages(page?: PageQueryDto): PipelineStage[] { + if (!page) return []; + + const stages: PipelineStage[] = []; + + if (page.sort) { + const [field, dir] = page.sort; + stages.push({ + $sort: { + [field]: + typeof dir === 'number' + ? dir + : ['asc', 'ascending'].includes(dir as string) + ? 1 + : -1, + } as Record, + }); + } + + if (page.skip) stages.push({ $skip: page.skip }); + if (page.limit) stages.push({ $limit: page.limit }); + + return stages; + } + + async populate(docs: THydratedDocument[]) { + return await this.model.populate( + docs, + this.populatePaths.map((path) => ({ + path, + options: { lean: true }, + })), + ); + } } diff --git a/frontend/src/app-components/inputs/NlpPatternSelect.tsx b/frontend/src/app-components/inputs/NlpPatternSelect.tsx index 142a5d0f..39e4c4b9 100644 --- a/frontend/src/app-components/inputs/NlpPatternSelect.tsx +++ b/frontend/src/app-components/inputs/NlpPatternSelect.tsx @@ -1,5 +1,5 @@ /* - * Copyright © 2024 Hexastack. All rights reserved. + * Copyright © 2025 Hexastack. All rights reserved. * * Licensed under the GNU Affero General Public License v3.0 (AGPLv3) with the following additional terms: * 1. The name "Hexabot" is a trademark of Hexastack. You may not use this name in derivative works without express written permission. @@ -17,7 +17,7 @@ import { Typography, useTheme, } from "@mui/material"; -import Autocomplete from "@mui/material/Autocomplete"; +import Autocomplete, { AutocompleteProps } from "@mui/material/Autocomplete"; import { forwardRef, SyntheticEvent, useRef } from "react"; import { Input } from "@/app-components/inputs/Input"; @@ -30,13 +30,24 @@ import { NlpPattern } from "@/types/block.types"; import { INlpEntity } from "@/types/nlp-entity.types"; import { INlpValue } from "@/types/nlp-value.types"; -type NlpPatternSelectProps = { +interface NlpPatternSelectProps + extends Omit< + AutocompleteProps, + | "onChange" + | "value" + | "options" + | "multiple" + | "disabled" + | "renderTags" + | "renderOptions" + | "renderInput" + > { patterns: NlpPattern[]; onChange: (patterns: NlpPattern[]) => void; -}; +} const NlpPatternSelect = ( - { patterns, onChange }: NlpPatternSelectProps, + { patterns, onChange, ...props }: NlpPatternSelectProps, ref, ) => { const inputRef = useRef(null); @@ -116,8 +127,8 @@ const NlpPatternSelect = ( return ( ("all"); const [language, setLanguage] = useState(undefined); + const [patterns, setPatterns] = useState([]); const hasPermission = useHasPermission(); const getNlpEntityFromCache = useGetFromCache(EntityType.NLP_ENTITY); const getNlpValueFromCache = useGetFromCache(EntityType.NLP_VALUE); @@ -86,11 +89,14 @@ export default function NlpSample() { EntityType.NLP_SAMPLE_ENTITY, ); const getLanguageFromCache = useGetFromCache(EntityType.LANGUAGE); - const { onSearch, searchPayload, searchText } = useSearch( + const { onSearch, searchPayload, searchText } = useSearch< + INlpSample & { patterns: NlpPattern[] } + >( { $eq: [ ...(type !== "all" ? [{ type }] : []), ...(language ? [{ language }] : []), + ...(patterns ? [{ patterns }] : []), ], $iLike: ["text"], }, @@ -425,6 +431,22 @@ export default function NlpSample() { + + { + setPatterns(patterns); + }} + fullWidth={true} + /> + diff --git a/frontend/src/components/visual-editor/form/inputs/triggers/PatternInput.tsx b/frontend/src/components/visual-editor/form/inputs/triggers/PatternInput.tsx index 8d0b6b3f..2caa02d7 100644 --- a/frontend/src/components/visual-editor/form/inputs/triggers/PatternInput.tsx +++ b/frontend/src/components/visual-editor/form/inputs/triggers/PatternInput.tsx @@ -103,6 +103,7 @@ const PatternInput: FC = ({ )} {["payload", "content", "menu"].includes(patternType) ? (