feat: enhance search ux

This commit is contained in:
Mohamed Marrouchi 2025-06-05 14:56:36 +01:00
parent 5c2ecaf8fc
commit e89d948f37
16 changed files with 709 additions and 200 deletions

View File

@ -18,19 +18,27 @@ export const payloadPatternSchema = z.object({
export type PayloadPattern = z.infer<typeof payloadPatternSchema>;
export const nlpEntityMatchPatternSchema = z.object({
entity: z.string(),
match: z.literal('entity'),
});
export type NlpEntityMatchPattern = z.infer<typeof nlpEntityMatchPatternSchema>;
export const nlpValueMatchPatternSchema = z.object({
entity: z.string(),
match: z.literal('value'),
value: z.string(),
});
export type NlpValueMatchPattern = z.infer<typeof nlpValueMatchPatternSchema>;
export const nlpPatternSchema = z.discriminatedUnion('match', [
z.object({
entity: z.string(),
match: z.literal('entity'),
}),
z.object({
entity: z.string(),
match: z.literal('value'),
value: z.string(),
}),
nlpEntityMatchPatternSchema,
nlpValueMatchPatternSchema,
]);
export type NlpPattern = z.infer<typeof nlpPatternSchema>;
export type NlpPattern = NlpEntityMatchPattern | NlpValueMatchPattern;
export const stringRegexPatternSchema = z.string().refine(
(value) => {

View File

@ -10,6 +10,7 @@ import { CACHE_MANAGER } from '@nestjs/cache-manager';
import { BadRequestException, NotFoundException } from '@nestjs/common';
import { MongooseModule } from '@nestjs/mongoose';
import { NlpValueMatchPattern } from '@/chat/schemas/types/pattern';
import { HelperService } from '@/helper/helper.service';
import { LanguageRepository } from '@/i18n/repositories/language.repository';
import { Language, LanguageModel } from '@/i18n/schemas/language.schema';
@ -439,4 +440,30 @@ describe('NlpSampleController', () => {
).rejects.toThrow(NotFoundException);
});
});
describe('filterCount', () => {
it('should count the nlp samples without patterns', async () => {
const filters = { text: 'Hello' };
const result = await nlpSampleController.filterCount(filters, []);
expect(result).toEqual({ count: 1 });
});
it('should count the nlp samples with patterns', async () => {
const filters = { text: 'Hello' };
const patterns: NlpValueMatchPattern[] = [
{ entity: 'intent', match: 'value', value: 'greeting' },
];
const result = await nlpSampleController.filterCount(filters, patterns);
expect(result).toEqual({ count: 1 });
});
it('should return zero count when no samples match the filters and patterns', async () => {
const filters = { text: 'Nonexistent' };
const patterns: NlpValueMatchPattern[] = [
{ entity: 'intent', match: 'value', value: 'nonexistent' },
];
const result = await nlpSampleController.filterCount(filters, patterns);
expect(result).toEqual({ count: 0 });
});
});
});

View File

@ -31,7 +31,10 @@ import { CsrfCheck } from '@tekuconcept/nestjs-csrf';
import { Response } from 'express';
import { z } from 'zod';
import { NlpPattern, nlpPatternSchema } from '@/chat/schemas/types/pattern';
import {
NlpValueMatchPattern,
nlpValueMatchPatternSchema,
} from '@/chat/schemas/types/pattern';
import { HelperService } from '@/helper/helper.service';
import { HelperType } from '@/helper/types';
import { LanguageService } from '@/i18n/services/language.service';
@ -190,16 +193,19 @@ export class NlpSampleController extends BaseController<
filters: TFilterQuery<NlpSample> = {},
@Query(
new ZodQueryParamPipe(
z.array(nlpPatternSchema),
z.array(nlpValueMatchPatternSchema),
(q) => q?.where?.patterns,
),
)
patterns: NlpPattern[] = [],
patterns: NlpValueMatchPattern[] = [],
) {
if (patterns.length) {
return await this.nlpSampleService.countByPatterns({ filters, patterns });
}
return await this.count(filters);
const count = await this.nlpSampleService.countByPatterns({
filters,
patterns,
});
return {
count,
};
}
/**
@ -300,26 +306,21 @@ export class NlpSampleController extends BaseController<
filters: TFilterQuery<NlpSample>,
@Query(
new ZodQueryParamPipe(
z.array(nlpPatternSchema),
z.array(nlpValueMatchPatternSchema),
(q) => q?.where?.patterns,
),
)
patterns: NlpPattern[] = [],
patterns: NlpValueMatchPattern[] = [],
) {
if (patterns.length) {
return this.canPopulate(populate)
? await this.nlpSampleService.findByPatternsAndPopulate(
{ filters, patterns },
pageQuery,
)
: await this.nlpSampleService.findByPatterns(
{ filters, patterns },
pageQuery,
);
}
return this.canPopulate(populate)
? await this.nlpSampleService.findAndPopulate(filters, pageQuery)
: await this.nlpSampleService.find(filters, pageQuery);
? await this.nlpSampleService.findByPatternsAndPopulate(
{ filters, patterns },
pageQuery,
)
: await this.nlpSampleService.findByPatterns(
{ filters, patterns },
pageQuery,
);
}
/**

View File

@ -32,6 +32,7 @@ import {
NlpSampleFull,
NlpSamplePopulate,
} from '../schemas/nlp-sample.schema';
import { NlpValue } from '../schemas/nlp-value.schema';
import { NlpSampleEntityRepository } from './nlp-sample-entity.repository';
@ -51,70 +52,128 @@ export class NlpSampleRepository extends BaseRepository<
super(model, NlpSample, NLP_SAMPLE_POPULATE, NlpSampleFull);
}
/**
* Build the aggregation stages that restrict a *nlpSampleEntities* collection
* to links which:
* 1. Reference all of the supplied `values`, and
* 2. Whose document satisfies the optional `filters`.
*
* @param criterias Object with:
* @param criterias.filters Extra filters to be applied on *nlpsamples*.
* @param criterias.entities Entity documents whose IDs should match `entity`.
* @param criterias.values Value documents whose IDs should match `value`.
* @returns Array of aggregation `PipelineStage`s ready to be concatenated
* into a larger pipeline.
*/
buildFindByEntitiesStages({
filters,
entityIds,
valueIds,
values,
}: {
filters: TFilterQuery<NlpSample>;
entityIds: Types.ObjectId[];
valueIds: Types.ObjectId[];
values: NlpValue[];
}): PipelineStage[] {
const requiredPairs = values.map(({ id, entity }) => ({
entity: new Types.ObjectId(entity),
value: new Types.ObjectId(id),
}));
return [
// pick link docs whose entity / value matches a pattern
// Apply sample-side filters early
{
$match: {
...(entityIds.length && { entity: { $in: entityIds } }),
...(valueIds.length && { value: { $in: valueIds } }),
...(filters?.$and
? {
$and: filters.$and?.map((condition) => {
if ('language' in condition && condition.language) {
return {
language: new Types.ObjectId(
condition.language as string,
),
};
}
return condition;
}),
}
: {}),
},
},
// join to the real sample *and* apply sample-side filters early
// Fetch the entities for each sample
{
$lookup: {
from: 'nlpsamples',
let: { sampleId: '$sample' },
from: 'nlpsampleentities',
localField: '_id', // nlpsamples._id
foreignField: 'sample', // nlpsampleentities.sample
as: 'sampleentities',
pipeline: [
{
$match: {
$expr: { $eq: ['$_id', '$$sampleId'] },
...(filters?.$and
? {
$and: filters.$and?.map((condition) => {
if ('language' in condition && condition.language) {
return {
language: new Types.ObjectId(condition.language),
};
}
return condition;
}),
}
: {}),
$or: requiredPairs,
},
},
],
as: 'sample',
},
},
{ $unwind: '$sample' },
// Filter out empty or less matching
{
$match: {
$expr: {
$gte: [{ $size: '$sampleentities' }, requiredPairs.length],
},
},
},
// Collapse each link into an { entity, value } object
{
$addFields: {
entities: {
$ifNull: [
{
$map: {
input: '$sampleentities',
as: 's',
in: { entity: '$$s.entity', value: '$$s.value' },
},
},
[],
],
},
},
},
// Keep only the samples whose `entities` array ⊇ `requiredPairs`
{
$match: {
$expr: {
$eq: [
requiredPairs.length, // target size
{
$size: {
$setIntersection: ['$entities', requiredPairs],
},
},
],
},
},
},
//drop helper array if you dont need it downstream
{ $project: { entities: 0, sampleentities: 0 } },
];
}
findByEntitiesAggregation(
criterias: {
filters: TFilterQuery<NlpSample>;
entityIds: Types.ObjectId[];
valueIds: Types.ObjectId[];
values: NlpValue[];
},
page?: PageQueryDto<NlpSample>,
projection?: ProjectionType<NlpSample>,
): Aggregate<NlpSampleDocument[]> {
return this.sampleEntityModel.aggregate<NlpSampleDocument>([
return this.model.aggregate<NlpSampleDocument>([
...this.buildFindByEntitiesStages(criterias),
// promote the sample document
{ $replaceRoot: { newRoot: '$sample' } },
// sort / skip / limit
...this.buildPaginationPipelineStages(page),
@ -135,8 +194,7 @@ export class NlpSampleRepository extends BaseRepository<
async findByEntities(
criterias: {
filters: TFilterQuery<NlpSample>;
entityIds: Types.ObjectId[];
valueIds: Types.ObjectId[];
values: NlpValue[];
},
page?: PageQueryDto<NlpSample>,
projection?: ProjectionType<NlpSample>,
@ -153,11 +211,18 @@ export class NlpSampleRepository extends BaseRepository<
);
}
/**
* Find NLP samples by entities and populate them with their related data.
*
* @param criterias - Criteria containing filters and values to match.
* @param page - Optional pagination parameters.
* @param projection - Optional projection to limit fields returned.
* @returns Promise resolving to an array of populated NlpSampleFull objects.
*/
async findByEntitiesAndPopulate(
criterias: {
filters: TFilterQuery<NlpSample>;
entityIds: Types.ObjectId[];
valueIds: Types.ObjectId[];
values: NlpValue[];
},
page?: PageQueryDto<NlpSample>,
projection?: ProjectionType<NlpSample>,
@ -177,32 +242,41 @@ export class NlpSampleRepository extends BaseRepository<
);
}
/**
* Build an aggregation pipeline that counts NLP samples satisfying:
* the extra `filters` (passed to `$match` later on), and
* All of the supplied `entities` / `values`.
*
* @param criterias `{ filters, entities, values }`
* @returns Un-executed aggregation cursor.
*/
countByEntitiesAggregation(criterias: {
filters: TFilterQuery<NlpSample>;
entityIds: Types.ObjectId[];
valueIds: Types.ObjectId[];
values: NlpValue[];
}): Aggregate<{ count: number }[]> {
return this.sampleEntityModel.aggregate<{ count: number }>([
return this.model.aggregate<{ count: number }>([
...this.buildFindByEntitiesStages(criterias),
// Collapse duplicates: one bucket per unique sample
{ $group: { _id: '$sample._id' } },
// Final count
{ $count: 'count' },
]);
}
/**
* Returns the count of samples by filters, entities and/or values
*
* @param criterias `{ filters, entities, values }`
* @returns Promise resolving to `{ count: number }`.
*/
async countByEntities(criterias: {
filters: TFilterQuery<NlpSample>;
entityIds: Types.ObjectId[];
valueIds: Types.ObjectId[];
}): Promise<{ count: number }> {
values: NlpValue[];
}): Promise<number> {
const aggregation = this.countByEntitiesAggregation(criterias);
const [result] = await aggregation.exec();
return { count: result?.count || 0 };
return result?.count || 0;
}
/**

View File

@ -10,9 +10,7 @@ import { CACHE_MANAGER } from '@nestjs/cache-manager';
import { BadRequestException, Inject, Injectable } from '@nestjs/common';
import { OnEvent } from '@nestjs/event-emitter';
import { Cache } from 'cache-manager';
import { Types } from 'mongoose';
import { NlpPattern } from '@/chat/schemas/types/pattern';
import { NLP_MAP_CACHE_KEY } from '@/utils/constants/cache';
import { Cacheable } from '@/utils/decorators/cacheable.decorator';
import { BaseService } from '@/utils/generics/base-service';
@ -74,19 +72,6 @@ export class NlpEntityService extends BaseService<
return await this.repository.updateOne(id, { weight: updatedWeight });
}
async findObjectIdsByPatterns(patterns: NlpPattern[]) {
// resolve pattern → ids (kept here because it uses other services)
return (
await this.find({
name: {
$in: patterns
.filter((p) => p.match === 'entity')
.map((p) => p.entity),
},
})
).map((e) => new Types.ObjectId(e.id));
}
/**
* Stores new entities based on the sample text and sample entities.
* Deletes all values relative to this entity before deleting the entity itself.

View File

@ -16,7 +16,7 @@ import { Document, ProjectionType, Query } from 'mongoose';
import Papa from 'papaparse';
import { Message } from '@/chat/schemas/message.schema';
import { NlpPattern } from '@/chat/schemas/types/pattern';
import { NlpValueMatchPattern } from '@/chat/schemas/types/pattern';
import { Language } from '@/i18n/schemas/language.schema';
import { LanguageService } from '@/i18n/services/language.service';
import { DeleteResult } from '@/utils/generics/base-repository';
@ -56,6 +56,117 @@ export class NlpSampleService extends BaseService<
super(repository);
}
/**
* Retrieve samples that satisfy `filters` **and** reference any entity / value
* contained in `patterns`.
*
* The pattern list is first resolved via `NlpEntityService.findByPatterns`
* and `NlpValueService.findByPatterns`, then delegated to
* `repository.findByEntities`.
*
* @param criterias `{ filters, patterns }`
* @param page Optional paging / sorting descriptor.
* @param projection Optional Mongo projection.
* @returns Promise resolving to the matching samples.
*/
async findByPatterns(
{
filters,
patterns,
}: {
filters: TFilterQuery<NlpSample>;
patterns: NlpValueMatchPattern[];
},
page?: PageQueryDto<NlpSample>,
projection?: ProjectionType<NlpSample>,
): Promise<NlpSample[]> {
const values =
patterns.length > 0
? await this.nlpValueService.findByPatterns(patterns)
: [];
if (values.length === 0) {
return await this.repository.find(filters, page, projection);
}
return await this.repository.findByEntities(
{
filters,
values,
},
page,
projection,
);
}
/**
* Same as `findByPatterns`, but also populates all relations declared
* in the repository (`populatePaths`).
*
* @param criteras `{ filters, patterns }`
* @param page Optional paging / sorting descriptor.
* @param projection Optional Mongo projection.
* @returns Promise resolving to the populated samples.
*/
async findByPatternsAndPopulate(
{
filters,
patterns,
}: {
filters: TFilterQuery<NlpSample>;
patterns: NlpValueMatchPattern[];
},
page?: PageQueryDto<NlpSample>,
projection?: ProjectionType<NlpSample>,
): Promise<NlpSampleFull[]> {
const values =
patterns.length > 0
? await this.nlpValueService.findByPatterns(patterns)
: [];
if (values.length === 0) {
return await this.repository.findAndPopulate(filters, page, projection);
}
return await this.repository.findByEntitiesAndPopulate(
{
filters,
values,
},
page,
projection,
);
}
/**
* Count how many samples satisfy `filters` and reference any entity / value
* present in `patterns`.
*
* @param param0 `{ filters, patterns }`
* @returns Promise resolving to `{ count }`.
*/
async countByPatterns({
filters,
patterns,
}: {
filters: TFilterQuery<NlpSample>;
patterns: NlpValueMatchPattern[];
}): Promise<number> {
const values =
patterns.length > 0
? await this.nlpValueService.findByPatterns(patterns)
: [];
if (values.length === 0) {
return await this.repository.count(filters);
}
return await this.repository.countByEntities({
filters,
values,
});
}
/**
* Fetches the samples and entities for a given sample type.
*
@ -283,66 +394,6 @@ export class NlpSampleService extends BaseService<
}
}
async findByPatterns(
{
filters,
patterns,
}: {
filters: TFilterQuery<NlpSample>;
patterns: NlpPattern[];
},
page?: PageQueryDto<NlpSample>,
projection?: ProjectionType<NlpSample>,
): Promise<NlpSample[]> {
return await this.repository.findByEntities(
{
filters,
entityIds:
await this.nlpEntityService.findObjectIdsByPatterns(patterns),
valueIds: await this.nlpValueService.findObjectIdsByPatterns(patterns),
},
page,
projection,
);
}
async findByPatternsAndPopulate(
{
filters,
patterns,
}: {
filters: TFilterQuery<NlpSample>;
patterns: NlpPattern[];
},
page?: PageQueryDto<NlpSample>,
projection?: ProjectionType<NlpSample>,
): Promise<NlpSampleFull[]> {
return await this.repository.findByEntitiesAndPopulate(
{
filters,
entityIds:
await this.nlpEntityService.findObjectIdsByPatterns(patterns),
valueIds: await this.nlpValueService.findObjectIdsByPatterns(patterns),
},
page,
projection,
);
}
async countByPatterns({
filters,
patterns,
}: {
filters: TFilterQuery<NlpSample>;
patterns: NlpPattern[];
}): Promise<{ count: number }> {
return await this.repository.countByEntities({
filters,
entityIds: await this.nlpEntityService.findObjectIdsByPatterns(patterns),
valueIds: await this.nlpValueService.findObjectIdsByPatterns(patterns),
});
}
@OnEvent('hook:message:preCreate')
async handleNewMessage(doc: THydratedDocument<Message>) {
// If message is sent by the user then add it as an inbox sample

View File

@ -7,9 +7,8 @@
*/
import { forwardRef, Inject, Injectable } from '@nestjs/common';
import { Types } from 'mongoose';
import { NlpPattern } from '@/chat/schemas/types/pattern';
import { NlpValueMatchPattern } from '@/chat/schemas/types/pattern';
import { DeleteResult } from '@/utils/generics/base-repository';
import { BaseService } from '@/utils/generics/base-service';
import { PageQueryDto } from '@/utils/pagination/pagination-query.dto';
@ -44,17 +43,18 @@ export class NlpValueService extends BaseService<
super(repository);
}
async findObjectIdsByPatterns(patterns: NlpPattern[]) {
// resolve pattern → ids (kept here because it uses other services)
return (
await this.find({
value: {
$in: patterns
.map((p) => (p.match === 'value' ? p.value : null))
.filter(Boolean),
},
})
).map((v) => new Types.ObjectId(v.id));
/**
* Fetch values whose `value` field matches the patterns provided.
*
* @param patterns Pattern list
* @returns Promise resolving to the matching values.
*/
async findByPatterns(patterns: NlpValueMatchPattern[]) {
return await this.find({
value: {
$in: patterns.map((p) => p.value),
},
});
}
/**

View File

@ -106,15 +106,50 @@ export abstract class BaseRepository<
this.registerLifeCycleHooks();
}
/**
* Determine whether at least one of the requested populate paths
* is supported by the repository.
*
* @param populate Array of path strings supplied by the caller.
* @returns `true` if any item appears in `this.populatePaths`, else `false`.
*/
canPopulate(populate: string[]): boolean {
return populate.some((p) => this.populatePaths.includes(p as P));
}
/**
* Build the canonical event name used by the repositorys event-bus hooks.
*
* Format: `hook:<entity>:<suffix>`
* where `<entity>` is the lower-cased class name and `<suffix>` is an
* `EHook` value such as `"preCreate"` or `"postUpdate"`.
*
* @param suffix Lifecycle-hook suffix.
* @returns A type-safe event name string.
*/
getEventName(suffix: EHook) {
const entity = this.cls.name.toLocaleLowerCase();
return `hook:${entity}:${suffix}` as `hook:${IHookEntities}:${TNormalizedEvents}`;
}
/**
* Wire all Mongoose lifecycle hooks to the repositorys overridable
* `pre-/post-*` methods **and** to the domain event bus.
*
* For the current repository (`this.cls.name`) the method:
* 1. Retrieves the hook definitions from `LifecycleHookManager`.
* 2. Registers handlers for:
* `validate.pre / validate.post` `preCreateValidate` / `postCreateValidate`
* `save.pre / save.post` `preCreate` / `postCreate`
* `deleteOne.* deleteMany.*` `preDelete` / `postDelete`
* `findOneAndUpdate.*` `preUpdate` / `postUpdate`
* `updateMany.*` `preUpdateMany` / `postUpdateMany`
* 3. Emits the corresponding domain events (`EHook.*`) via `eventEmitter`
* after each repository callback.
*
* If no hooks are registered for the current class, a console warning is
* issued and the method exits gracefully.
*/
private registerLifeCycleHooks(): void {
const repository = this;
const hooks = LifecycleHookManager.getHooks(this.cls.name);
@ -258,6 +293,19 @@ export abstract class BaseRepository<
});
}
/**
* Execute a `find`-style query, convert each lean result to `cls`, and return
* the transformed list.
*
* - The query is run with `lean(this.leanOpts)` for performance.
* - Each plain object is passed through `plainToClass` using
* `this.transformOpts`.
*
* @template R Result type typically the populated or base DTO class.
* @param query Mongoose query returning an array of documents.
* @param cls Constructor used by `plainToClass` for transformation.
* @returns Promise resolving to an array of class instances.
*/
protected async execute<R extends Omit<T, P>>(
query: Query<T[], T>,
cls: new () => R,
@ -266,6 +314,19 @@ export abstract class BaseRepository<
return resultSet.map((doc) => plainToClass(cls, doc, this.transformOpts));
}
/**
* Execute a single-document query, convert the result to `cls`,
* and return it (or `null`).
*
* - Uses `lean(this.leanOpts)` for performance.
* - Falls back to `this.transformOpts` when `options` is not provided.
*
* @template R Result type typically the populated or base DTO class.
* @param query Mongoose query expected to return one document.
* @param cls Constructor used by `plainToClass`.
* @param options Optional `ClassTransformOptions` overriding defaults.
* @returns Promise resolving to a class instance or `null`.
*/
protected async executeOne<R extends Omit<T, P>>(
query: Query<T | null, T>,
cls: new () => R,
@ -275,6 +336,18 @@ export abstract class BaseRepository<
return plainToClass(cls, doc, options ?? this.transformOpts);
}
/**
* Build a `findOne`/`findById` query.
*
* - `criteria` may be an `_id` string or any Mongo filter;
* an empty / falsy value is **not allowed** (throws).
* - Optional `projection` is forwarded unchanged.
*
* @param criteria Document `_id` **or** Mongo filter.
* @param projection Optional Mongo projection.
* @throws Error when `criteria` is empty.
* @returns Un-executed Mongoose query.
*/
protected findOneQuery(
criteria: string | TFilterQuery<T>,
projection?: ProjectionType<T>,
@ -289,6 +362,18 @@ export abstract class BaseRepository<
: this.model.findOne<HydratedDocument<T>>(criteria, projection);
}
/**
* Retrieve a single document and convert it to `this.cls`.
*
* - Returns `null` immediately when `criteria` is falsy.
* - Optional `options` are passed to `plainToClass`.
* - Optional `projection` limits returned fields.
*
* @param criteria Document `_id` **or** Mongo filter.
* @param options Class-transform options.
* @param projection Optional Mongo projection.
* @returns Promise resolving to the found entity or `null`.
*/
async findOne(
criteria: string | TFilterQuery<T>,
options?: ClassTransformOptions,
@ -303,6 +388,16 @@ export abstract class BaseRepository<
return await this.executeOne(query, this.cls, options);
}
/**
* Retrieve a single document with all `populatePaths` relations resolved.
*
* - Throws if population is not configured.
* - Returns `null` when nothing matches `criteria`.
*
* @param criteria Document `_id` **or** Mongo filter.
* @param projection Optional Mongo projection.
* @returns Promise resolving to the populated entity or `null`.
*/
async findOneAndPopulate(
criteria: string | TFilterQuery<T>,
projection?: ProjectionType<T>,
@ -329,6 +424,17 @@ export abstract class BaseRepository<
projection?: ProjectionType<T>,
): Query<T[], T, object, T, 'find', object>;
/**
* Build an un-executed `find` query with optional pagination, sorting,
* and projection.
*
* The returned query can be further chained or passed to `execute`.
*
* @param filter Mongo selector for the documents.
* @param pageQuery Sort tuple **or** paging object (optional).
* @param projection Mongo projection (optional).
* @returns A Mongoose `find` query with `skip`, `limit`, and `sort` applied.
*/
protected findQuery(
filter: TFilterQuery<T>,
pageQuery?: QuerySortDto<T> | PageQueryDto<T>,
@ -366,6 +472,20 @@ export abstract class BaseRepository<
projection?: ProjectionType<T>,
): Promise<T[]>;
/**
* Find documents matching `filter`.
*
* - `pageQuery` may be:
* * a **sort descriptor** (`QuerySortDto`) an array of `[field, dir]`
* * a **paging object** (`PageQueryDto`) `{ limit, skip, sort }`
* - Optional `projection` is forwarded to `findQuery`.
* - Delegates execution to `this.execute`, mapping raw docs to `this.cls`.
*
* @param filter Mongo filter selecting documents.
* @param pageQuery Sort descriptor **or** paging object.
* @param projection Optional Mongo projection.
* @returns Promise resolving to the found documents.
*/
async find(
filter: TFilterQuery<T>,
pageQuery?: QuerySortDto<T> | PageQueryDto<T>,
@ -380,6 +500,14 @@ export abstract class BaseRepository<
return await this.execute(query, this.cls);
}
/**
* Ensure that population is possible for the current repository.
*
* Throws when either `populatePaths` or `clsPopulate` is not configured,
* preventing accidental calls to population-aware methods.
*
* @throws Error if population cannot be performed.
*/
private ensureCanPopulate(): void {
if (!this.populatePaths || !this.clsPopulate) {
throw new Error('Cannot populate query');
@ -401,6 +529,20 @@ export abstract class BaseRepository<
projection?: ProjectionType<T>,
): Promise<TFull[]>;
/**
* Find documents that match `filters` and return them with the relations
* in `populatePaths` resolved.
*
* - `pageQuery` can be either a sort descriptor (`QuerySortDto`) or a full
* paging object (`PageQueryDto`).
* - Optional `projection` is forwarded to `findQuery`.
* - Throws if the repository is not configured for population.
*
* @param filters Mongo filter.
* @param pageQuery Sort or paging information.
* @param projection Optional Mongo projection.
* @returns Promise resolving to the populated documents.
*/
async findAndPopulate(
filters: TFilterQuery<T>,
pageQuery?: QuerySortDto<T> | PageQueryDto<T>,
@ -420,16 +562,37 @@ export abstract class BaseRepository<
return await this.execute(query, this.clsPopulate!);
}
/**
* Build an un-executed query that selects **all** documents,
* applies `sort`, and disables pagination (`limit` / `skip` = 0).
*
* @param sort Optional sort descriptor.
* @returns Mongoose `find` query.
*/
protected findAllQuery(
sort?: QuerySortDto<T>,
): Query<T[], T, object, T, 'find', object> {
return this.findQuery({}, { limit: 0, skip: 0, sort });
}
/**
* Retrieve every document in the collection, optionally sorted.
*
* @param sort Optional sort descriptor.
* @returns Promise resolving to the documents.
*/
async findAll(sort?: QuerySortDto<T>): Promise<T[]> {
return await this.find({}, { limit: 0, skip: 0, sort });
}
/**
* Retrieve every document with all `populatePaths` relations resolved.
*
* - Throws if population is not configured.
*
* @param sort Optional sort descriptor.
* @returns Promise resolving to the populated documents.
*/
async findAllAndPopulate(sort?: QuerySortDto<T>): Promise<TFull[]> {
this.ensureCanPopulate();
const query = this.findAllQuery(sort).populate(this.populatePaths);
@ -474,14 +637,38 @@ export abstract class BaseRepository<
return await this.execute(query, this.clsPopulate!);
}
/**
* Return the total number of documents in the collection
* (uses MongoDBs `estimatedDocumentCount` for speed).
*
* @returns Promise resolving to the estimated document count.
*/
async countAll(): Promise<number> {
return await this.model.estimatedDocumentCount().exec();
}
/**
* Count documents that match the given criteria
* (falls back to all documents when `criteria` is omitted).
*
* @param criteria Optional Mongo filter.
* @returns Promise resolving to the exact document count.
*/
async count(criteria?: TFilterQuery<T>): Promise<number> {
return await this.model.countDocuments(criteria).exec();
}
/**
* Persist a single document and return it as an instance of `this.cls`.
*
* Internally:
* 1. `model.create()` inserts the raw DTO.
* 2. The Mongoose document is converted to a plain object with `leanOpts`.
* 3. `plainToClass()` transforms that object into the domain class.
*
* @param dto Data-transfer object describing the new record.
* @returns A hydrated instance of the domain class.
*/
async create(dto: DtoInfer<DtoAction.Create, Dto, U>): Promise<T> {
const doc = await this.model.create(dto);
@ -492,6 +679,12 @@ export abstract class BaseRepository<
);
}
/**
* Persist an array of documents at once and map each result to `this.cls`.
*
* @param dtoArray Array of DTOs to insert.
* @returns Array of domain-class instances in the same order as `dtoArray`.
*/
async createMany(
dtoArray: DtoInfer<DtoAction.Create, Dto, U>[],
): Promise<T[]> {
@ -502,6 +695,21 @@ export abstract class BaseRepository<
);
}
/**
* Update a **single** document and return the modified version.
*
* Behaviour :
* - `criteria` may be an `_id` string or any Mongo filter object.
* - `dto` is applied via `$set`; when `options.shouldFlatten` is true the
* payload is flattened (e.g. `"a.b": value`) before the update.
* - Fires the `pre|postUpdateValidate` hooks + events.
* - Throws if nothing matches the criteria or if `dto` is empty.
*
* @param criteria `_id` or filter selecting the target document.
* @param dto Partial update payload.
* @param options `new`, `upsert`, `shouldFlatten`, (forwarded to Mongoose).
* @returns The updated document (with `new: true` by default).
*/
async updateOne<D extends Partial<U>>(
criteria: string | TFilterQuery<T>,
dto: UpdateQuery<DtoInfer<DtoAction.Update, Dto, D>>,
@ -550,6 +758,18 @@ export abstract class BaseRepository<
return result;
}
/**
* Update **many** documents at once.
*
* - Applies `$set` with the supplied `dto`.
* - When `options.shouldFlatten` is true, flattens the payload first.
* - Does **not** run the validation / event hooks (use `updateOne` for that).
*
* @param filter Mongo filter selecting the documents to update.
* @param dto Update payload.
* @param options `{ shouldFlatten?: boolean }`.
* @returns MongoDB `UpdateWriteOpResult` describing the operation outcome.
*/
async updateMany<D extends Partial<U>>(
filter: TFilterQuery<T>,
dto: UpdateQuery<D>,
@ -560,6 +780,17 @@ export abstract class BaseRepository<
});
}
/**
* Remove **one** document, unless it is marked as `builtin: true`.
*
* If `criteria` is a string, it is treated as the documents `_id`;
* otherwise it is used as a full Mongo filter.
* The filter is automatically augmented with `{ builtin: { $ne: true } }`
* to protect built-in records from deletion.
*
* @param criteria Document `_id` or Mongo filter.
* @returns Promise that resolves to Mongos `DeleteResult`.
*/
async deleteOne(criteria: string | TFilterQuery<T>): Promise<DeleteResult> {
const filter = typeof criteria === 'string' ? { _id: criteria } : criteria;
@ -568,10 +799,25 @@ export abstract class BaseRepository<
.exec();
}
/**
* Remove **many** documents that match `criteria`, excluding those flagged
* with `builtin: true`.
*
* @param criteria Mongo filter describing the set to delete.
* @returns Promise that resolves to Mongos `DeleteResult`.
*/
async deleteMany(criteria: TFilterQuery<T>): Promise<DeleteResult> {
return await this.model.deleteMany({ ...criteria, builtin: { $ne: true } });
}
/**
* Runs *before* create-validation logic.
* Override to perform domain-specific checks; throw to abort.
*
* @param _doc The document that will be created.
* @param _filterCriteria Optional additional criteria (e.g. conditional create).
* @param _updates Optional update pipeline when upserting.
*/
async preCreateValidate(
_doc: HydratedDocument<T>,
_filterCriteria?: FilterQuery<T>,
@ -580,10 +826,23 @@ export abstract class BaseRepository<
// Nothing ...
}
/**
* Called *after* create-validation passes,
* but before persistence. Override for side-effects (audit logs, events, ).
*
* @param _validated The validated (not yet saved) document.
*/
async postCreateValidate(_validated: HydratedDocument<T>): Promise<void> {
// Nothing ...
}
/**
* Runs *before* validating a single-document update.
* Override to enforce custom rules; throw to abort.
*
* @param _filterCriteria Query criteria used to locate the document.
* @param _updates Update payload or aggregation pipeline.
*/
async preUpdateValidate(
_filterCriteria: FilterQuery<T>,
_updates: UpdateWithAggregationPipeline | UpdateQuery<T>,
@ -591,6 +850,13 @@ export abstract class BaseRepository<
// Nothing ...
}
/**
* Called *after* an update payload is validated,
* just before it is applied.
*
* @param _filterCriteria Same criteria passed to the update.
* @param _updates The validated update payload.
*/
async postUpdateValidate(
_filterCriteria: FilterQuery<T>,
_updates: UpdateWithAggregationPipeline | UpdateQuery<T>,
@ -598,14 +864,33 @@ export abstract class BaseRepository<
// Nothing ...
}
/**
* Rxecutes immediately before persisting a new document.
* Use to inject defaults, timestamps, or derive fields.
*
* @param _doc The document about to be saved.
*/
async preCreate(_doc: HydratedDocument<T>): Promise<void> {
// Nothing ...
}
/**
* Fires right after a document is saved.
* Useful for emitting events or refreshing caches.
*
* @param _created The newly created document.
*/
async postCreate(_created: HydratedDocument<T>): Promise<void> {
// Nothing ...
}
/**
* Runs before a `findOneAndUpdate` operation.
*
* @param _query The Mongoose query object.
* @param _criteria Original filter criteria.
* @param _updates Update payload or pipeline.
*/
async preUpdate(
_query: Query<D, D, unknown, T, 'findOneAndUpdate'>,
_criteria: TFilterQuery<T>,
@ -614,6 +899,13 @@ export abstract class BaseRepository<
// Nothing ...
}
/**
* Runs before an `updateMany` operation.
*
* @param _query The Mongoose query object.
* @param _criteria Filter criteria.
* @param _updates Update payload or pipeline.
*/
async preUpdateMany(
_query: Query<D, D, unknown, T, 'updateMany'>,
_criteria: TFilterQuery<T>,
@ -622,6 +914,12 @@ export abstract class BaseRepository<
// Nothing ...
}
/**
* Fires after an `updateMany` completes.
*
* @param _query The originating query.
* @param _updated Mongoose result object.
*/
async postUpdateMany(
_query: Query<D, D, unknown, T, 'updateMany'>,
_updated: any,
@ -629,6 +927,12 @@ export abstract class BaseRepository<
// Nothing ...
}
/**
* Fires after a `findOneAndUpdate` completes.
*
* @param _query The originating query.
* @param _updated The updated document.
*/
async postUpdate(
_query: Query<D, D, unknown, T, 'findOneAndUpdate'>,
_updated: T,
@ -636,6 +940,12 @@ export abstract class BaseRepository<
// Nothing ...
}
/**
* Runs before a `deleteOne` or `deleteMany`.
*
* @param _query The Mongoose query object.
* @param _criteria Filter criteria.
*/
async preDelete(
_query: Query<DeleteResult, D, unknown, T, 'deleteOne' | 'deleteMany'>,
_criteria: TFilterQuery<T>,
@ -643,6 +953,12 @@ export abstract class BaseRepository<
// Nothing ...
}
/**
* Fires after a `deleteOne` or `deleteMany` completes.
*
* @param _query The originating query.
* @param _result MongoDB `DeleteResult`.
*/
async postDelete(
_query: Query<DeleteResult, D, unknown, T, 'deleteOne' | 'deleteMany'>,
_result: DeleteResult,
@ -650,6 +966,21 @@ export abstract class BaseRepository<
// Nothing ...
}
/**
* Translate a `PageQueryDto` into MongoDB aggregation stages.
*
* Creates, in order:
* 1. **$sort** when `page.sort` is provided. Accepts `1 | -1 | 'asc' | 'desc'`
* (plus `'ascending' | 'descending'`) and normalises them to `1` or `-1`.
* 2. **$skip** when `page.skip` > 0.
* 3. **$limit** when `page.limit` > 0.
*
* If `page` is omitted, an empty array is returned so callers can safely
* spread the result into a pipeline without extra checks.
*
* @param page Optional pagination/sort descriptor.
* @returns Array of `$sort`, `$skip`, and `$limit` stages in the correct order.
*/
buildPaginationPipelineStages<T>(page?: PageQueryDto<T>): PipelineStage[] {
if (!page) return [];
@ -675,6 +1006,13 @@ export abstract class BaseRepository<
return stages;
}
/**
* Populates the provided Mongoose documents with the relations listed in
* `this.populatePaths`, returning lean (plain) objects.
*
* @param docs Hydrated documents to enrich.
* @returns Promise resolving to the populated docs.
*/
async populate(docs: THydratedDocument<T>[]) {
return await this.model.populate(
docs,

View File

@ -1,5 +1,5 @@
/*
* Copyright © 2024 Hexastack. All rights reserved.
* Copyright © 2025 Hexastack. All rights reserved.
*
* Licensed under the GNU Affero General Public License v3.0 (AGPLv3) with the following additional terms:
* 1. The name "Hexabot" is a trademark of Hexastack. You may not use this name in derivative works without express written permission.
@ -28,7 +28,7 @@ export const nlpSampleEntityFixtures: NlpSampleEntityCreateDto[] = [
{
sample: '2',
entity: '0',
value: '2',
value: '3',
},
{
sample: '3',

View File

@ -491,7 +491,7 @@
"original_text": "Original Text",
"inputs": "Inputs",
"outputs": "Outputs",
"any": "- Any -",
"any": "Any",
"full_name": "First and last name",
"password": "Password"
},

View File

@ -492,7 +492,7 @@
"original_text": "Texte par défaut",
"inputs": "Ports d'entrée",
"outputs": "Ports de sortie",
"any": "- Toutes -",
"any": "Toutes",
"full_name": "Nom et Prénom",
"password": "Mot de passe"
},

View File

@ -44,10 +44,11 @@ interface NlpPatternSelectProps
> {
patterns: NlpPattern[];
onChange: (patterns: NlpPattern[]) => void;
noneLabel?: string;
}
const NlpPatternSelect = (
{ patterns, onChange, ...props }: NlpPatternSelectProps,
{ patterns, onChange, noneLabel = "", ...props }: NlpPatternSelectProps,
ref,
) => {
const inputRef = useRef(null);
@ -91,23 +92,29 @@ const NlpPatternSelect = (
valueId: string,
): void => {
const newSelection = patterns.slice(0);
const update = newSelection.find(({ entity: e }) => e === name);
const idx = newSelection.findIndex(({ entity: e }) => e === name);
if (!update) {
if (idx === -1) {
throw new Error("Unable to find nlp entity");
}
if (valueId === id) {
update.match = "entity";
update.value = name;
newSelection[idx] = {
entity: newSelection[idx].entity,
match: "entity",
};
} else {
const value = getNlpValueFromCache(valueId);
if (!value) {
throw new Error("Unable to find nlp value in cache");
}
update.match = "value";
update.value = value.value;
newSelection[idx] = {
entity: newSelection[idx].entity,
match: "value",
value: value.value,
};
}
onChange(newSelection);
@ -119,10 +126,11 @@ const NlpPatternSelect = (
);
}
const defaultValue =
options.filter(({ name }) =>
patterns.find(({ entity: entityName }) => entityName === name),
) || {};
const defaultValue = patterns
.map(({ entity: entityName }) =>
options.find(({ name }) => entityName === name),
)
.filter(Boolean) as INlpEntity[];
return (
<Autocomplete
@ -183,9 +191,9 @@ const NlpPatternSelect = (
const nlpValues = values.map((vId) =>
getNlpValueFromCache(vId),
) as INlpValue[];
const selectedValue = patterns.find(
(e) => e.entity === name,
)?.value;
const currentPattern = patterns.find((e) => e.entity === name);
const selectedValue =
currentPattern?.match === "value" ? currentPattern.value : null;
const { id: selectedId = id } =
nlpValues.find(({ value }) => value === selectedValue) || {};
@ -204,7 +212,7 @@ const NlpPatternSelect = (
}
if (option === id) {
return t("label.any");
return `- ${noneLabel || t("label.any")} -`;
}
return option;

View File

@ -75,7 +75,7 @@ export const DataGrid = <T extends GridValidRowModel = any>({
slots={slots}
slotProps={{
loadingOverlay: {
variant: "linear-progress",
variant: "skeleton",
noRowsVariant: "skeleton",
},
}}

View File

@ -96,7 +96,10 @@ export default function NlpSample() {
$eq: [
...(type !== "all" ? [{ type }] : []),
...(language ? [{ language }] : []),
...(patterns ? [{ patterns }] : []),
// We send only value match patterns
...(patterns
? [{ patterns: patterns.filter(({ match }) => match === "value") }]
: []),
],
$iLike: ["text"],
},
@ -218,6 +221,7 @@ export default function NlpSample() {
{row.entities
.map((e) => getSampleEntityFromCache(e) as INlpSampleEntity)
.filter((e) => !!e)
.sort((a, b) => String(a.entity).localeCompare(String(b.entity)))
.map((entity) => (
<ChipEntity
key={entity.id}
@ -441,10 +445,9 @@ export default function NlpSample() {
>
<NlpPatternSelect
patterns={patterns}
onChange={(patterns: NlpPattern[]) => {
setPatterns(patterns);
}}
onChange={setPatterns}
fullWidth={true}
noneLabel={t("label.select")}
/>
</Grid>
</Grid>

View File

@ -56,24 +56,27 @@ export const useFind = <
entity,
);
const getFromCache = useGetFromCache(entity);
const { data: total } = useCount(entity, params["where"], {
const countQuery = useCount(entity, params["where"], {
enabled: hasCount,
});
const { dataGridPaginationProps, pageQueryPayload } = usePagination(
total?.count,
countQuery.data?.count,
initialPaginationState,
initialSortState,
hasCount,
);
const normalizedParams = { ...pageQueryPayload, ...(params || {}) };
const enabled = !!total || !hasCount;
const enabled = !!countQuery.data || !hasCount;
const { data: ids, ...normalizedQuery } = useQuery({
enabled,
queryFn: async () => {
const data = await api.find(
normalizedParams,
format === Format.FULL && (POPULATE_BY_TYPE[entity] as P),
);
const data =
!hasCount || (hasCount && !!countQuery.data?.count)
? await api.find(
normalizedParams,
format === Format.FULL && (POPULATE_BY_TYPE[entity] as P),
)
: [];
const { result } = normalizeAndCache(data);
return result;
@ -100,7 +103,11 @@ export const useFind = <
dataGridProps: {
...dataGridPaginationProps,
rows: data || [],
loading: normalizedQuery.isLoading || normalizedQuery.isFetching,
loading:
normalizedQuery.isLoading ||
normalizedQuery.isFetching ||
countQuery.isLoading ||
countQuery.isFetching,
},
};
};

View File

@ -68,12 +68,19 @@ export interface PayloadPattern {
type?: PayloadType;
}
export type NlpPattern = {
export type NlpEntityMatchPattern = {
entity: string;
match: "value" | "entity";
match: "entity";
};
export type NlpValueMatchPattern = {
entity: string;
match: "value";
value: string;
};
export type NlpPattern = NlpEntityMatchPattern | NlpValueMatchPattern;
export type Pattern = null | string | PayloadPattern | NlpPattern[];
export type PatternType =