mirror of
https://github.com/hexastack/hexabot
synced 2025-06-26 18:27:28 +00:00
Merge pull request #1097 from Hexastack/feat/nlp-sample-filter-by-entities
Feat/nlp sample filter by entities
This commit is contained in:
@@ -18,19 +18,27 @@ export const payloadPatternSchema = z.object({
|
||||
|
||||
export type PayloadPattern = z.infer<typeof payloadPatternSchema>;
|
||||
|
||||
export const nlpEntityMatchPatternSchema = z.object({
|
||||
entity: z.string(),
|
||||
match: z.literal('entity'),
|
||||
});
|
||||
|
||||
export type NlpEntityMatchPattern = z.infer<typeof nlpEntityMatchPatternSchema>;
|
||||
|
||||
export const nlpValueMatchPatternSchema = z.object({
|
||||
entity: z.string(),
|
||||
match: z.literal('value'),
|
||||
value: z.string(),
|
||||
});
|
||||
|
||||
export type NlpValueMatchPattern = z.infer<typeof nlpValueMatchPatternSchema>;
|
||||
|
||||
export const nlpPatternSchema = z.discriminatedUnion('match', [
|
||||
z.object({
|
||||
entity: z.string(),
|
||||
match: z.literal('entity'),
|
||||
}),
|
||||
z.object({
|
||||
entity: z.string(),
|
||||
match: z.literal('value'),
|
||||
value: z.string(),
|
||||
}),
|
||||
nlpEntityMatchPatternSchema,
|
||||
nlpValueMatchPatternSchema,
|
||||
]);
|
||||
|
||||
export type NlpPattern = z.infer<typeof nlpPatternSchema>;
|
||||
export type NlpPattern = NlpEntityMatchPattern | NlpValueMatchPattern;
|
||||
|
||||
export const stringRegexPatternSchema = z.string().refine(
|
||||
(value) => {
|
||||
|
||||
@@ -65,6 +65,7 @@ import {
|
||||
mockNlpGreetingNamePatterns,
|
||||
mockNlpGreetingPatterns,
|
||||
mockNlpGreetingWrongNamePatterns,
|
||||
mockWebChannelData,
|
||||
} from '@/utils/test/mocks/block';
|
||||
import {
|
||||
contextBlankInstance,
|
||||
@@ -288,11 +289,7 @@ describe('BlockService', () => {
|
||||
text: 'Hello',
|
||||
},
|
||||
},
|
||||
{
|
||||
isSocket: true,
|
||||
ipAddress: '1.1.1.1',
|
||||
agent: 'Chromium',
|
||||
},
|
||||
mockWebChannelData,
|
||||
);
|
||||
const webEventGetStarted = new WebEventWrapper(
|
||||
handlerMock,
|
||||
@@ -303,11 +300,18 @@ describe('BlockService', () => {
|
||||
payload: 'GET_STARTED',
|
||||
},
|
||||
},
|
||||
mockWebChannelData,
|
||||
);
|
||||
|
||||
const webEventAmbiguous = new WebEventWrapper(
|
||||
handlerMock,
|
||||
{
|
||||
isSocket: true,
|
||||
ipAddress: '1.1.1.1',
|
||||
agent: 'Chromium',
|
||||
type: Web.IncomingMessageType.text,
|
||||
data: {
|
||||
text: "It's not a yes or no answer!",
|
||||
},
|
||||
},
|
||||
mockWebChannelData,
|
||||
);
|
||||
|
||||
it('should return undefined when no blocks are provided', async () => {
|
||||
@@ -332,6 +336,24 @@ describe('BlockService', () => {
|
||||
expect(result).toEqual(blockGetStarted);
|
||||
});
|
||||
|
||||
it('should return undefined when multiple matches are not allowed', async () => {
|
||||
const result = await blockService.match(
|
||||
[
|
||||
{
|
||||
...blockEmpty,
|
||||
patterns: ['/yes/'],
|
||||
},
|
||||
{
|
||||
...blockEmpty,
|
||||
patterns: ['/no/'],
|
||||
},
|
||||
],
|
||||
webEventAmbiguous,
|
||||
false,
|
||||
);
|
||||
expect(result).toEqual(undefined);
|
||||
});
|
||||
|
||||
it('should match block with payload', async () => {
|
||||
webEventGetStarted.setSender(subscriberWithLabels);
|
||||
const result = await blockService.match(blocks, webEventGetStarted);
|
||||
|
||||
@@ -64,68 +64,66 @@ export class BlockService extends BaseService<
|
||||
}
|
||||
|
||||
/**
|
||||
* Filters an array of blocks based on the specified channel.
|
||||
* Checks if block is supported on the specified channel.
|
||||
*
|
||||
* This function ensures that only blocks that are either:
|
||||
* - Not restricted to specific trigger channels (`trigger_channels` is undefined or empty), or
|
||||
* - Explicitly allow the given channel
|
||||
*
|
||||
* are included in the returned array.
|
||||
*
|
||||
* @param blocks - The list of blocks to be filtered.
|
||||
* @param block - The block
|
||||
* @param channel - The name of the channel to filter blocks by.
|
||||
*
|
||||
* @returns The filtered array of blocks that are allowed for the given channel.
|
||||
* @returns Whether the block is supported on the given channel.
|
||||
*/
|
||||
filterBlocksByChannel<B extends Block | BlockFull>(
|
||||
blocks: B[],
|
||||
isChannelSupported<B extends Block | BlockFull>(
|
||||
block: B,
|
||||
channel: ChannelName,
|
||||
) {
|
||||
return blocks.filter((b) => {
|
||||
return (
|
||||
!b.trigger_channels ||
|
||||
b.trigger_channels.length === 0 ||
|
||||
b.trigger_channels.includes(channel)
|
||||
);
|
||||
});
|
||||
return (
|
||||
!block.trigger_channels ||
|
||||
block.trigger_channels.length === 0 ||
|
||||
block.trigger_channels.includes(channel)
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Filters an array of blocks based on subscriber labels.
|
||||
* Checks if the block matches the subscriber labels, allowing for two scenarios:
|
||||
* - Has no trigger labels (making it applicable to all subscribers), or
|
||||
* - Contains at least one trigger label that matches a label from the provided list.
|
||||
*
|
||||
* This function selects blocks that either:
|
||||
* - Have no trigger labels (making them applicable to all subscribers), or
|
||||
* - Contain at least one trigger label that matches a label from the provided list.
|
||||
*
|
||||
* The filtered blocks are then **sorted** in descending order by the number of trigger labels,
|
||||
* ensuring that blocks with more specific targeting (more trigger labels) are prioritized.
|
||||
*
|
||||
* @param blocks - The list of blocks to be filtered.
|
||||
* @param block - The block to check.
|
||||
* @param labels - The list of subscriber labels to match against.
|
||||
* @returns The filtered and sorted list of blocks.
|
||||
* @returns True if the block matches the subscriber labels, false otherwise.
|
||||
*/
|
||||
filterBlocksBySubscriberLabels<B extends Block | BlockFull>(
|
||||
blocks: B[],
|
||||
profile?: Subscriber,
|
||||
matchesSubscriberLabels<B extends Block | BlockFull>(
|
||||
block: B,
|
||||
subscriber?: Subscriber,
|
||||
) {
|
||||
if (!profile) {
|
||||
return blocks;
|
||||
if (!subscriber || !subscriber.labels) {
|
||||
return true; // No subscriber or labels to match against
|
||||
}
|
||||
|
||||
return (
|
||||
blocks
|
||||
.filter((b) => {
|
||||
const triggerLabels = b.trigger_labels.map((l) =>
|
||||
typeof l === 'string' ? l : l.id,
|
||||
);
|
||||
return (
|
||||
triggerLabels.length === 0 ||
|
||||
triggerLabels.some((l) => profile.labels.includes(l))
|
||||
);
|
||||
})
|
||||
// Priority goes to block who target users with labels
|
||||
.sort((a, b) => b.trigger_labels.length - a.trigger_labels.length)
|
||||
const triggerLabels = block.trigger_labels.map((l: string | Label) =>
|
||||
typeof l === 'string' ? l : l.id,
|
||||
);
|
||||
return (
|
||||
triggerLabels.length === 0 ||
|
||||
triggerLabels.some((l) => subscriber.labels.includes(l))
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves the configured NLU penalty factor from settings, or falls back to a default value.
|
||||
*
|
||||
* @returns The NLU penalty factor as a number.
|
||||
*/
|
||||
private async getPenaltyFactor(): Promise<number> {
|
||||
const settings = await this.settingService.getSettings();
|
||||
const configured = settings.chatbot_settings?.default_nlu_penalty_factor;
|
||||
|
||||
if (configured == null) {
|
||||
this.logger.warn(
|
||||
'Using fallback NLU penalty factor value: %s',
|
||||
FALLBACK_DEFAULT_NLU_PENALTY_FACTOR,
|
||||
);
|
||||
}
|
||||
return configured ?? FALLBACK_DEFAULT_NLU_PENALTY_FACTOR;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -133,75 +131,88 @@ export class BlockService extends BaseService<
|
||||
*
|
||||
* @param filteredBlocks blocks Starting/Next blocks in the conversation flow
|
||||
* @param event Received channel's message
|
||||
* @param canHaveMultipleMatches Whether to allow multiple matches for the same event
|
||||
* (eg. Yes/No question to which the answer is ambiguous "Sometimes yes, sometimes no")
|
||||
*
|
||||
* @returns The block that matches
|
||||
*/
|
||||
async match(
|
||||
blocks: BlockFull[],
|
||||
event: EventWrapper<any, any>,
|
||||
canHaveMultipleMatches = true,
|
||||
): Promise<BlockFull | undefined> {
|
||||
if (!blocks.length) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
// Search for block matching a given event
|
||||
let block: BlockFull | undefined = undefined;
|
||||
const payload = event.getPayload();
|
||||
// Narrow the search space
|
||||
const channelName = event.getHandler().getName();
|
||||
const sender = event.getSender();
|
||||
const candidates = blocks.filter(
|
||||
(b) =>
|
||||
this.isChannelSupported(b, channelName) &&
|
||||
this.matchesSubscriberLabels(b, sender),
|
||||
);
|
||||
|
||||
// Perform a filter to get the candidates blocks
|
||||
const filteredBlocks = this.filterBlocksBySubscriberLabels(
|
||||
this.filterBlocksByChannel(blocks, event.getHandler().getName()),
|
||||
event.getSender(),
|
||||
if (!candidates.length) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
// Priority goes to block who target users with labels
|
||||
const prioritizedCandidates = candidates.sort(
|
||||
(a, b) => b.trigger_labels.length - a.trigger_labels.length,
|
||||
);
|
||||
|
||||
// Perform a payload match & pick last createdAt
|
||||
const payload = event.getPayload();
|
||||
if (payload) {
|
||||
block = filteredBlocks
|
||||
.filter((b) => {
|
||||
return this.matchPayload(payload, b);
|
||||
})
|
||||
.shift();
|
||||
}
|
||||
|
||||
if (!block) {
|
||||
// Perform a text match (Text or Quick reply)
|
||||
const text = event.getText().trim();
|
||||
|
||||
// Perform a text pattern match
|
||||
block = filteredBlocks
|
||||
.filter((b) => {
|
||||
return this.matchText(text, b);
|
||||
})
|
||||
.shift();
|
||||
|
||||
// Perform an NLP Match
|
||||
const nlp = event.getNLP();
|
||||
if (!block && nlp) {
|
||||
const scoredEntities =
|
||||
await this.nlpService.computePredictionScore(nlp);
|
||||
|
||||
const settings = await this.settingService.getSettings();
|
||||
let penaltyFactor =
|
||||
settings.chatbot_settings?.default_nlu_penalty_factor;
|
||||
if (!penaltyFactor) {
|
||||
this.logger.warn(
|
||||
'Using fallback NLU penalty factor value: %s',
|
||||
FALLBACK_DEFAULT_NLU_PENALTY_FACTOR,
|
||||
);
|
||||
penaltyFactor = FALLBACK_DEFAULT_NLU_PENALTY_FACTOR;
|
||||
}
|
||||
|
||||
if (scoredEntities.entities.length > 0) {
|
||||
block = this.matchBestNLP(
|
||||
filteredBlocks,
|
||||
scoredEntities,
|
||||
penaltyFactor,
|
||||
);
|
||||
}
|
||||
const payloadMatches = prioritizedCandidates.filter((b) => {
|
||||
return this.matchPayload(payload, b);
|
||||
});
|
||||
if (payloadMatches.length > 1 && !canHaveMultipleMatches) {
|
||||
// If the payload matches multiple blocks ,
|
||||
// we return undefined so that we trigger the local fallback
|
||||
return undefined;
|
||||
} else if (payloadMatches.length > 0) {
|
||||
// If we have a payload match, we return the first one
|
||||
// (which is the most recent one due to the sort)
|
||||
// and we don't check for text or NLP matches
|
||||
return payloadMatches[0];
|
||||
}
|
||||
}
|
||||
|
||||
return block;
|
||||
// Perform a text match (Text or Quick reply)
|
||||
const text = event.getText().trim();
|
||||
if (text) {
|
||||
const textMatches = prioritizedCandidates.filter((b) => {
|
||||
return this.matchText(text, b);
|
||||
});
|
||||
|
||||
if (textMatches.length > 1 && !canHaveMultipleMatches) {
|
||||
// If the text matches multiple blocks (especially regex),
|
||||
// we return undefined so that we trigger the local fallback
|
||||
return undefined;
|
||||
} else if (textMatches.length > 0) {
|
||||
return textMatches[0];
|
||||
}
|
||||
}
|
||||
|
||||
// Perform an NLP Match
|
||||
const nlp = event.getNLP();
|
||||
if (nlp) {
|
||||
const scoredEntities = await this.nlpService.computePredictionScore(nlp);
|
||||
|
||||
if (scoredEntities.entities.length) {
|
||||
const penaltyFactor = await this.getPenaltyFactor();
|
||||
return this.matchBestNLP(
|
||||
prioritizedCandidates,
|
||||
scoredEntities,
|
||||
penaltyFactor,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -500,11 +511,19 @@ export class BlockService extends BaseService<
|
||||
envelope: StdOutgoingSystemEnvelope,
|
||||
) {
|
||||
// Perform a filter to get the candidates blocks
|
||||
const filteredBlocks = this.filterBlocksBySubscriberLabels(
|
||||
this.filterBlocksByChannel(blocks, event.getHandler().getName()),
|
||||
event.getSender(),
|
||||
const handlerName = event.getHandler().getName();
|
||||
const sender = event.getSender();
|
||||
const candidates = blocks.filter(
|
||||
(b) =>
|
||||
this.isChannelSupported(b, handlerName) &&
|
||||
this.matchesSubscriberLabels(b, sender),
|
||||
);
|
||||
return filteredBlocks.find((b) => {
|
||||
|
||||
if (!candidates.length) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
return candidates.find((b) => {
|
||||
return b.patterns
|
||||
.filter(
|
||||
(p) => typeof p === 'object' && 'type' in p && p.type === 'outcome',
|
||||
|
||||
@@ -293,7 +293,7 @@ describe('BotService', () => {
|
||||
event.setSender(webSubscriber);
|
||||
|
||||
const clearMock = jest
|
||||
.spyOn(botService, 'handleIncomingMessage')
|
||||
.spyOn(botService, 'handleOngoingConversationMessage')
|
||||
.mockImplementation(
|
||||
async (
|
||||
actualConversation: ConversationFull,
|
||||
|
||||
@@ -253,7 +253,7 @@ export class BotService {
|
||||
*
|
||||
* @returns A promise that resolves with a boolean indicating whether the conversation is active and a matching block was found.
|
||||
*/
|
||||
async handleIncomingMessage(
|
||||
async handleOngoingConversationMessage(
|
||||
convo: ConversationFull,
|
||||
event: EventWrapper<any, any>,
|
||||
) {
|
||||
@@ -272,8 +272,15 @@ export class BotService {
|
||||
max_attempts: 0,
|
||||
};
|
||||
|
||||
// We will avoid having multiple matches when we are not at the start of a conversation
|
||||
// and only if local fallback is enabled
|
||||
const canHaveMultipleMatches = !fallbackOptions.active;
|
||||
// Find the next block that matches
|
||||
const matchedBlock = await this.blockService.match(nextBlocks, event);
|
||||
const matchedBlock = await this.blockService.match(
|
||||
nextBlocks,
|
||||
event,
|
||||
canHaveMultipleMatches,
|
||||
);
|
||||
// If there is no match in next block then loopback (current fallback)
|
||||
// This applies only to text messages + there's a max attempt to be specified
|
||||
let fallbackBlock: BlockFull | undefined;
|
||||
@@ -376,7 +383,7 @@ export class BotService {
|
||||
'Existing conversations',
|
||||
);
|
||||
this.logger.debug('Conversation has been captured! Responding ...');
|
||||
return await this.handleIncomingMessage(conversation, event);
|
||||
return await this.handleOngoingConversationMessage(conversation, event);
|
||||
} catch (err) {
|
||||
this.logger.error(
|
||||
'An error occurred when searching for a conversation ',
|
||||
|
||||
@@ -10,6 +10,7 @@ import { CACHE_MANAGER } from '@nestjs/cache-manager';
|
||||
import { BadRequestException, NotFoundException } from '@nestjs/common';
|
||||
import { MongooseModule } from '@nestjs/mongoose';
|
||||
|
||||
import { NlpValueMatchPattern } from '@/chat/schemas/types/pattern';
|
||||
import { HelperService } from '@/helper/helper.service';
|
||||
import { LanguageRepository } from '@/i18n/repositories/language.repository';
|
||||
import { Language, LanguageModel } from '@/i18n/schemas/language.schema';
|
||||
@@ -181,11 +182,51 @@ describe('NlpSampleController', () => {
|
||||
})),
|
||||
);
|
||||
});
|
||||
|
||||
it('should find nlp samples with patterns', async () => {
|
||||
const pageQuery = getPageQuery<NlpSample>({ sort: ['text', 'desc'] });
|
||||
const patterns: NlpValueMatchPattern[] = [
|
||||
{ entity: 'intent', match: 'value', value: 'greeting' },
|
||||
];
|
||||
const result = await nlpSampleController.findPage(
|
||||
pageQuery,
|
||||
['language', 'entities'],
|
||||
{},
|
||||
patterns,
|
||||
);
|
||||
// Should only return samples matching the pattern
|
||||
const nlpSamples = await nlpSampleService.findByPatternsAndPopulate(
|
||||
{ filters: {}, patterns },
|
||||
pageQuery,
|
||||
);
|
||||
expect(result).toEqualPayload(nlpSamples);
|
||||
});
|
||||
|
||||
it('should return empty array if no samples match the patterns', async () => {
|
||||
const pageQuery = getPageQuery<NlpSample>({ sort: ['text', 'desc'] });
|
||||
const patterns: NlpValueMatchPattern[] = [
|
||||
{ entity: 'intent', match: 'value', value: 'nonexistent' },
|
||||
];
|
||||
jest.spyOn(nlpSampleService, 'findByPatternsAndPopulate');
|
||||
const result = await nlpSampleController.findPage(
|
||||
pageQuery,
|
||||
['language', 'entities'],
|
||||
{},
|
||||
patterns,
|
||||
);
|
||||
expect(nlpSampleService.findByPatternsAndPopulate).toHaveBeenCalledTimes(
|
||||
1,
|
||||
);
|
||||
expect(Array.isArray(result)).toBe(true);
|
||||
expect(result).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('count', () => {
|
||||
it('should count the nlp samples', async () => {
|
||||
jest.spyOn(nlpSampleService, 'count');
|
||||
const result = await nlpSampleController.count({});
|
||||
expect(nlpSampleService.count).toHaveBeenCalledTimes(1);
|
||||
const count = nlpSampleFixtures.length;
|
||||
expect(result).toEqual({ count });
|
||||
});
|
||||
@@ -439,4 +480,34 @@ describe('NlpSampleController', () => {
|
||||
).rejects.toThrow(NotFoundException);
|
||||
});
|
||||
});
|
||||
|
||||
describe('filterCount', () => {
|
||||
it('should count the nlp samples without patterns', async () => {
|
||||
const filters = { text: 'Hello' };
|
||||
jest.spyOn(nlpSampleService, 'countByPatterns');
|
||||
const result = await nlpSampleController.filterCount(filters, []);
|
||||
expect(nlpSampleService.countByPatterns).toHaveBeenCalledTimes(1);
|
||||
expect(result).toEqual({ count: 1 });
|
||||
});
|
||||
|
||||
it('should count the nlp samples with patterns', async () => {
|
||||
const filters = { text: 'Hello' };
|
||||
const patterns: NlpValueMatchPattern[] = [
|
||||
{ entity: 'intent', match: 'value', value: 'greeting' },
|
||||
];
|
||||
jest.spyOn(nlpSampleService, 'countByPatterns');
|
||||
const result = await nlpSampleController.filterCount(filters, patterns);
|
||||
expect(nlpSampleService.countByPatterns).toHaveBeenCalledTimes(1);
|
||||
expect(result).toEqual({ count: 1 });
|
||||
});
|
||||
|
||||
it('should return zero count when no samples match the filters and patterns', async () => {
|
||||
const filters = { text: 'Nonexistent' };
|
||||
const patterns: NlpValueMatchPattern[] = [
|
||||
{ entity: 'intent', match: 'value', value: 'nonexistent' },
|
||||
];
|
||||
const result = await nlpSampleController.filterCount(filters, patterns);
|
||||
expect(result).toEqual({ count: 0 });
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -29,7 +29,12 @@ import {
|
||||
import { FileInterceptor } from '@nestjs/platform-express';
|
||||
import { CsrfCheck } from '@tekuconcept/nestjs-csrf';
|
||||
import { Response } from 'express';
|
||||
import { z } from 'zod';
|
||||
|
||||
import {
|
||||
NlpValueMatchPattern,
|
||||
nlpValueMatchPatternSchema,
|
||||
} from '@/chat/schemas/types/pattern';
|
||||
import { HelperService } from '@/helper/helper.service';
|
||||
import { HelperType } from '@/helper/types';
|
||||
import { LanguageService } from '@/i18n/services/language.service';
|
||||
@@ -40,6 +45,7 @@ import { PageQueryDto } from '@/utils/pagination/pagination-query.dto';
|
||||
import { PageQueryPipe } from '@/utils/pagination/pagination-query.pipe';
|
||||
import { PopulatePipe } from '@/utils/pipes/populate.pipe';
|
||||
import { SearchFilterPipe } from '@/utils/pipes/search-filter.pipe';
|
||||
import { ZodQueryParamPipe } from '@/utils/pipes/zod.pipe';
|
||||
import { TFilterQuery } from '@/utils/types/filter.types';
|
||||
|
||||
import { NlpSampleDto, TNlpSampleDto } from '../dto/nlp-sample.dto';
|
||||
@@ -184,9 +190,22 @@ export class NlpSampleController extends BaseController<
|
||||
allowedFields: ['text', 'type', 'language'],
|
||||
}),
|
||||
)
|
||||
filters?: TFilterQuery<NlpSample>,
|
||||
filters: TFilterQuery<NlpSample> = {},
|
||||
@Query(
|
||||
new ZodQueryParamPipe(
|
||||
z.array(nlpValueMatchPatternSchema),
|
||||
(q) => q?.where?.patterns,
|
||||
),
|
||||
)
|
||||
patterns: NlpValueMatchPattern[] = [],
|
||||
) {
|
||||
return await this.count(filters);
|
||||
const count = await this.nlpSampleService.countByPatterns({
|
||||
filters,
|
||||
patterns,
|
||||
});
|
||||
return {
|
||||
count,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -285,10 +304,23 @@ export class NlpSampleController extends BaseController<
|
||||
}),
|
||||
)
|
||||
filters: TFilterQuery<NlpSample>,
|
||||
@Query(
|
||||
new ZodQueryParamPipe(
|
||||
z.array(nlpValueMatchPatternSchema),
|
||||
(q) => q?.where?.patterns,
|
||||
),
|
||||
)
|
||||
patterns: NlpValueMatchPattern[] = [],
|
||||
) {
|
||||
return this.canPopulate(populate)
|
||||
? await this.nlpSampleService.findAndPopulate(filters, pageQuery)
|
||||
: await this.nlpSampleService.find(filters, pageQuery);
|
||||
? await this.nlpSampleService.findByPatternsAndPopulate(
|
||||
{ filters, patterns },
|
||||
pageQuery,
|
||||
)
|
||||
: await this.nlpSampleService.findByPatterns(
|
||||
{ filters, patterns },
|
||||
pageQuery,
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -7,9 +7,11 @@
|
||||
*/
|
||||
|
||||
import { MongooseModule } from '@nestjs/mongoose';
|
||||
import { Types } from 'mongoose';
|
||||
|
||||
import { LanguageRepository } from '@/i18n/repositories/language.repository';
|
||||
import { Language, LanguageModel } from '@/i18n/schemas/language.schema';
|
||||
import { PageQueryDto } from '@/utils/pagination/pagination-query.dto';
|
||||
import { nlpSampleFixtures } from '@/utils/test/fixtures/nlpsample';
|
||||
import { installNlpSampleEntityFixtures } from '@/utils/test/fixtures/nlpsampleentity';
|
||||
import { getPageQuery } from '@/utils/test/pagination';
|
||||
@@ -29,13 +31,16 @@ import {
|
||||
NlpSampleFull,
|
||||
NlpSampleModel,
|
||||
} from '../schemas/nlp-sample.schema';
|
||||
import { NlpValue, NlpValueModel } from '../schemas/nlp-value.schema';
|
||||
|
||||
import { NlpSampleEntityRepository } from './nlp-sample-entity.repository';
|
||||
import { NlpSampleRepository } from './nlp-sample.repository';
|
||||
import { NlpValueRepository } from './nlp-value.repository';
|
||||
|
||||
describe('NlpSampleRepository', () => {
|
||||
let nlpSampleRepository: NlpSampleRepository;
|
||||
let nlpSampleEntityRepository: NlpSampleEntityRepository;
|
||||
let nlpValueRepository: NlpValueRepository;
|
||||
let languageRepository: LanguageRepository;
|
||||
let nlpSampleEntity: NlpSampleEntity | null;
|
||||
let noNlpSample: NlpSample | null;
|
||||
@@ -48,21 +53,28 @@ describe('NlpSampleRepository', () => {
|
||||
MongooseModule.forFeature([
|
||||
NlpSampleModel,
|
||||
NlpSampleEntityModel,
|
||||
NlpValueModel,
|
||||
LanguageModel,
|
||||
]),
|
||||
],
|
||||
providers: [
|
||||
NlpSampleRepository,
|
||||
NlpSampleEntityRepository,
|
||||
NlpValueRepository,
|
||||
LanguageRepository,
|
||||
],
|
||||
});
|
||||
[nlpSampleRepository, nlpSampleEntityRepository, languageRepository] =
|
||||
await getMocks([
|
||||
NlpSampleRepository,
|
||||
NlpSampleEntityRepository,
|
||||
LanguageRepository,
|
||||
]);
|
||||
[
|
||||
nlpSampleRepository,
|
||||
nlpSampleEntityRepository,
|
||||
nlpValueRepository,
|
||||
languageRepository,
|
||||
] = await getMocks([
|
||||
NlpSampleRepository,
|
||||
NlpSampleEntityRepository,
|
||||
NlpValueRepository,
|
||||
LanguageRepository,
|
||||
]);
|
||||
noNlpSample = await nlpSampleRepository.findOne({ text: 'No' });
|
||||
nlpSampleEntity = await nlpSampleEntityRepository.findOne({
|
||||
sample: noNlpSample!.id,
|
||||
@@ -141,4 +153,149 @@ describe('NlpSampleRepository', () => {
|
||||
expect(sampleEntities.length).toEqual(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('findByEntities', () => {
|
||||
it('should return mapped NlpSample instances for matching entities', async () => {
|
||||
const filters = {};
|
||||
const values = await nlpValueRepository.find({ value: 'greeting' });
|
||||
|
||||
const result = await nlpSampleRepository.findByEntities({
|
||||
filters,
|
||||
values,
|
||||
});
|
||||
expect(result).toHaveLength(2);
|
||||
expect(result[0]).toBeInstanceOf(NlpSample);
|
||||
expect(result[0].text).toBe('Hello');
|
||||
});
|
||||
|
||||
it('should return an empty array if no samples match', async () => {
|
||||
const filters = {};
|
||||
const values = [
|
||||
{
|
||||
id: new Types.ObjectId().toHexString(),
|
||||
entity: new Types.ObjectId().toHexString(),
|
||||
value: 'nonexistent',
|
||||
},
|
||||
] as NlpValue[];
|
||||
|
||||
const result = await nlpSampleRepository.findByEntities({
|
||||
filters,
|
||||
values,
|
||||
});
|
||||
|
||||
expect(Array.isArray(result)).toBe(true);
|
||||
expect(result).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('findByEntitiesAndPopulate', () => {
|
||||
it('should return populated NlpSampleFull instances for matching entities', async () => {
|
||||
const filters = {};
|
||||
const values = await nlpValueRepository.find({ value: 'greeting' });
|
||||
|
||||
const result = await nlpSampleRepository.findByEntitiesAndPopulate({
|
||||
filters,
|
||||
values,
|
||||
});
|
||||
|
||||
expect(result.length).toBe(2);
|
||||
result.forEach((sample) => {
|
||||
expect(sample).toBeInstanceOf(NlpSampleFull);
|
||||
expect(sample.entities).toBeDefined();
|
||||
expect(Array.isArray(sample.entities)).toBe(true);
|
||||
expect(sample.language).toBeDefined();
|
||||
});
|
||||
});
|
||||
|
||||
it('should return an empty array if no samples match', async () => {
|
||||
const filters = {};
|
||||
const values = [
|
||||
{
|
||||
id: new Types.ObjectId().toHexString(),
|
||||
entity: new Types.ObjectId().toHexString(),
|
||||
value: 'nonexistent',
|
||||
},
|
||||
] as NlpValue[];
|
||||
|
||||
const result = await nlpSampleRepository.findByEntitiesAndPopulate({
|
||||
filters,
|
||||
values,
|
||||
});
|
||||
|
||||
expect(Array.isArray(result)).toBe(true);
|
||||
expect(result).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should support pagination and projection', async () => {
|
||||
const filters = {};
|
||||
const values = await nlpValueRepository.find({ value: 'greeting' });
|
||||
const page = {
|
||||
limit: 1,
|
||||
skip: 0,
|
||||
sort: ['text', 'asc'],
|
||||
} as PageQueryDto<NlpSample>;
|
||||
const projection = { text: 1 };
|
||||
|
||||
const result = await nlpSampleRepository.findByEntitiesAndPopulate(
|
||||
{ filters, values },
|
||||
page,
|
||||
projection,
|
||||
);
|
||||
|
||||
expect(Array.isArray(result)).toBe(true);
|
||||
expect(result.length).toBe(1);
|
||||
if (result.length > 0) {
|
||||
expect(result[0]).toHaveProperty('text');
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('countByEntities', () => {
|
||||
it('should return the correct count for matching entities', async () => {
|
||||
const filters = {};
|
||||
const values = await nlpValueRepository.find({ value: 'greeting' });
|
||||
|
||||
const count = await nlpSampleRepository.countByEntities({
|
||||
filters,
|
||||
values,
|
||||
});
|
||||
|
||||
expect(typeof count).toBe('number');
|
||||
expect(count).toBe(2);
|
||||
});
|
||||
|
||||
it('should return 0 if no samples match', async () => {
|
||||
const filters = {};
|
||||
const values = [
|
||||
{
|
||||
id: new Types.ObjectId().toHexString(),
|
||||
entity: new Types.ObjectId().toHexString(),
|
||||
value: 'nonexistent',
|
||||
},
|
||||
] as NlpValue[];
|
||||
|
||||
const count = await nlpSampleRepository.countByEntities({
|
||||
filters,
|
||||
values,
|
||||
});
|
||||
|
||||
expect(count).toBe(0);
|
||||
});
|
||||
|
||||
it('should respect filters (e.g. language)', async () => {
|
||||
const values = await nlpValueRepository.find({ value: 'greeting' });
|
||||
const language = languages[0];
|
||||
const filters = { language: language.id };
|
||||
|
||||
const count = await nlpSampleRepository.countByEntities({
|
||||
filters,
|
||||
values,
|
||||
});
|
||||
|
||||
// Should be <= total greeting samples, and >= 0
|
||||
expect(typeof count).toBe('number');
|
||||
expect(count).toBeGreaterThanOrEqual(0);
|
||||
expect(count).toBeLessThanOrEqual(2);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -8,18 +8,31 @@
|
||||
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { InjectModel } from '@nestjs/mongoose';
|
||||
import { Document, Model, Query } from 'mongoose';
|
||||
import { plainToClass } from 'class-transformer';
|
||||
import {
|
||||
Aggregate,
|
||||
Document,
|
||||
Model,
|
||||
PipelineStage,
|
||||
ProjectionType,
|
||||
Query,
|
||||
Types,
|
||||
} from 'mongoose';
|
||||
|
||||
import { BaseRepository, DeleteResult } from '@/utils/generics/base-repository';
|
||||
import { PageQueryDto } from '@/utils/pagination/pagination-query.dto';
|
||||
import { TFilterQuery } from '@/utils/types/filter.types';
|
||||
|
||||
import { TNlpSampleDto } from '../dto/nlp-sample.dto';
|
||||
import { NlpSampleEntity } from '../schemas/nlp-sample-entity.schema';
|
||||
import {
|
||||
NLP_SAMPLE_POPULATE,
|
||||
NlpSample,
|
||||
NlpSampleDocument,
|
||||
NlpSampleFull,
|
||||
NlpSamplePopulate,
|
||||
} from '../schemas/nlp-sample.schema';
|
||||
import { NlpValue } from '../schemas/nlp-value.schema';
|
||||
|
||||
import { NlpSampleEntityRepository } from './nlp-sample-entity.repository';
|
||||
|
||||
@@ -32,11 +45,257 @@ export class NlpSampleRepository extends BaseRepository<
|
||||
> {
|
||||
constructor(
|
||||
@InjectModel(NlpSample.name) readonly model: Model<NlpSample>,
|
||||
@InjectModel(NlpSampleEntity.name)
|
||||
readonly sampleEntityModel: Model<NlpSampleEntity>,
|
||||
private readonly nlpSampleEntityRepository: NlpSampleEntityRepository,
|
||||
) {
|
||||
super(model, NlpSample, NLP_SAMPLE_POPULATE, NlpSampleFull);
|
||||
}
|
||||
|
||||
/**
|
||||
* Normalize the filter query.
|
||||
*
|
||||
* @param filters - The filters to normalize.
|
||||
* @returns The normalized filters.
|
||||
*/
|
||||
private normalizeFilters(
|
||||
filters: TFilterQuery<NlpSample>,
|
||||
): TFilterQuery<NlpSample> {
|
||||
if (filters?.$and) {
|
||||
return {
|
||||
...filters,
|
||||
$and: filters.$and.map((condition) => {
|
||||
// @todo: think of a better way to handle language to objectId conversion
|
||||
// This is a workaround for the fact that language is stored as an ObjectId
|
||||
// in the database, but we want to filter by its string representation.
|
||||
if ('language' in condition && condition.language) {
|
||||
return {
|
||||
...condition,
|
||||
language: new Types.ObjectId(condition.language as string),
|
||||
};
|
||||
}
|
||||
return condition;
|
||||
}),
|
||||
};
|
||||
}
|
||||
return filters;
|
||||
}
|
||||
|
||||
/**
|
||||
* Build the aggregation stages that restrict a *nlpSampleEntities* collection
|
||||
* to links which:
|
||||
* 1. Reference all of the supplied `values`, and
|
||||
* 2. Whose document satisfies the optional `filters`.
|
||||
*
|
||||
* @param criterias Object with:
|
||||
* @param criterias.filters Extra filters to be applied on *nlpsamples*.
|
||||
* @param criterias.entities Entity documents whose IDs should match `entity`.
|
||||
* @param criterias.values Value documents whose IDs should match `value`.
|
||||
* @returns Array of aggregation `PipelineStage`s ready to be concatenated
|
||||
* into a larger pipeline.
|
||||
*/
|
||||
buildFindByEntitiesStages({
|
||||
filters,
|
||||
values,
|
||||
}: {
|
||||
filters: TFilterQuery<NlpSample>;
|
||||
values: NlpValue[];
|
||||
}): PipelineStage[] {
|
||||
const requiredPairs = values.map(({ id, entity }) => ({
|
||||
entity: new Types.ObjectId(entity),
|
||||
value: new Types.ObjectId(id),
|
||||
}));
|
||||
|
||||
const normalizedFilters = this.normalizeFilters(filters);
|
||||
|
||||
return [
|
||||
{
|
||||
$match: {
|
||||
...normalizedFilters,
|
||||
},
|
||||
},
|
||||
|
||||
// Fetch the entities for each sample
|
||||
{
|
||||
$lookup: {
|
||||
from: 'nlpsampleentities',
|
||||
localField: '_id', // nlpsamples._id
|
||||
foreignField: 'sample', // nlpsampleentities.sample
|
||||
as: 'sampleentities',
|
||||
pipeline: [
|
||||
{
|
||||
$match: {
|
||||
$or: requiredPairs,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
|
||||
// Filter out empty or less matching
|
||||
{
|
||||
$match: {
|
||||
$expr: {
|
||||
$gte: [{ $size: '$sampleentities' }, requiredPairs.length],
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
// Collapse each link into an { entity, value } object
|
||||
{
|
||||
$addFields: {
|
||||
entities: {
|
||||
$ifNull: [
|
||||
{
|
||||
$map: {
|
||||
input: '$sampleentities',
|
||||
as: 's',
|
||||
in: { entity: '$$s.entity', value: '$$s.value' },
|
||||
},
|
||||
},
|
||||
[],
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
// Keep only the samples whose `entities` array ⊇ `requiredPairs`
|
||||
{
|
||||
$match: {
|
||||
$expr: {
|
||||
$eq: [
|
||||
requiredPairs.length, // target size
|
||||
{
|
||||
$size: {
|
||||
$setIntersection: ['$entities', requiredPairs],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
//drop helper array if you don’t need it downstream
|
||||
{ $project: { entities: 0, sampleentities: 0 } },
|
||||
];
|
||||
}
|
||||
|
||||
findByEntitiesAggregation(
|
||||
criterias: {
|
||||
filters: TFilterQuery<NlpSample>;
|
||||
values: NlpValue[];
|
||||
},
|
||||
page?: PageQueryDto<NlpSample>,
|
||||
projection?: ProjectionType<NlpSample>,
|
||||
): Aggregate<NlpSampleDocument[]> {
|
||||
return this.model.aggregate<NlpSampleDocument>([
|
||||
...this.buildFindByEntitiesStages(criterias),
|
||||
|
||||
// sort / skip / limit
|
||||
...this.buildPaginationPipelineStages(page),
|
||||
|
||||
// projection
|
||||
...(projection
|
||||
? [
|
||||
{
|
||||
$project:
|
||||
typeof projection === 'string'
|
||||
? { [projection]: 1 }
|
||||
: projection,
|
||||
},
|
||||
]
|
||||
: []),
|
||||
]);
|
||||
}
|
||||
|
||||
async findByEntities(
|
||||
criterias: {
|
||||
filters: TFilterQuery<NlpSample>;
|
||||
values: NlpValue[];
|
||||
},
|
||||
page?: PageQueryDto<NlpSample>,
|
||||
projection?: ProjectionType<NlpSample>,
|
||||
): Promise<NlpSample[]> {
|
||||
const aggregation = this.findByEntitiesAggregation(
|
||||
criterias,
|
||||
page,
|
||||
projection,
|
||||
);
|
||||
|
||||
const resultSet = await aggregation.exec();
|
||||
return resultSet.map((doc) =>
|
||||
plainToClass(NlpSample, doc, this.transformOpts),
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Find NLP samples by entities and populate them with their related data.
|
||||
*
|
||||
* @param criterias - Criteria containing filters and values to match.
|
||||
* @param page - Optional pagination parameters.
|
||||
* @param projection - Optional projection to limit fields returned.
|
||||
* @returns Promise resolving to an array of populated NlpSampleFull objects.
|
||||
*/
|
||||
async findByEntitiesAndPopulate(
|
||||
criterias: {
|
||||
filters: TFilterQuery<NlpSample>;
|
||||
values: NlpValue[];
|
||||
},
|
||||
page?: PageQueryDto<NlpSample>,
|
||||
projection?: ProjectionType<NlpSample>,
|
||||
): Promise<NlpSampleFull[]> {
|
||||
const aggregation = this.findByEntitiesAggregation(
|
||||
criterias,
|
||||
page,
|
||||
projection,
|
||||
);
|
||||
|
||||
const docs = await aggregation.exec();
|
||||
|
||||
const populatedResultSet = await this.populate(docs);
|
||||
|
||||
return populatedResultSet.map((doc) =>
|
||||
plainToClass(NlpSampleFull, doc, this.transformOpts),
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Build an aggregation pipeline that counts NLP samples satisfying:
|
||||
* – the extra `filters` (passed to `$match` later on), and
|
||||
* – All of the supplied `entities` / `values`.
|
||||
*
|
||||
* @param criterias `{ filters, entities, values }`
|
||||
* @returns Un-executed aggregation cursor.
|
||||
*/
|
||||
countByEntitiesAggregation(criterias: {
|
||||
filters: TFilterQuery<NlpSample>;
|
||||
values: NlpValue[];
|
||||
}): Aggregate<{ count: number }[]> {
|
||||
return this.model.aggregate<{ count: number }>([
|
||||
...this.buildFindByEntitiesStages(criterias),
|
||||
|
||||
// Final count
|
||||
{ $count: 'count' },
|
||||
]);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the count of samples by filters, entities and/or values
|
||||
*
|
||||
* @param criterias `{ filters, entities, values }`
|
||||
* @returns Promise resolving to the count.
|
||||
*/
|
||||
async countByEntities(criterias: {
|
||||
filters: TFilterQuery<NlpSample>;
|
||||
values: NlpValue[];
|
||||
}): Promise<number> {
|
||||
const aggregation = this.countByEntitiesAggregation(criterias);
|
||||
|
||||
const [result] = await aggregation.exec();
|
||||
|
||||
return result?.count || 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Deletes NLP sample entities associated with the provided criteria before deleting the sample itself.
|
||||
*
|
||||
|
||||
@@ -10,9 +10,11 @@ import { CACHE_MANAGER } from '@nestjs/cache-manager';
|
||||
import { BadRequestException, NotFoundException } from '@nestjs/common';
|
||||
import { MongooseModule } from '@nestjs/mongoose';
|
||||
|
||||
import { NlpValueMatchPattern } from '@/chat/schemas/types/pattern';
|
||||
import { LanguageRepository } from '@/i18n/repositories/language.repository';
|
||||
import { Language, LanguageModel } from '@/i18n/schemas/language.schema';
|
||||
import { LanguageService } from '@/i18n/services/language.service';
|
||||
import { PageQueryDto } from '@/utils/pagination/pagination-query.dto';
|
||||
import { nlpSampleFixtures } from '@/utils/test/fixtures/nlpsample';
|
||||
import { installNlpSampleEntityFixtures } from '@/utils/test/fixtures/nlpsampleentity';
|
||||
import { getPageQuery } from '@/utils/test/pagination';
|
||||
@@ -52,6 +54,7 @@ describe('NlpSampleService', () => {
|
||||
let nlpEntityService: NlpEntityService;
|
||||
let nlpSampleService: NlpSampleService;
|
||||
let nlpSampleEntityService: NlpSampleEntityService;
|
||||
let nlpValueService: NlpValueService;
|
||||
let languageService: LanguageService;
|
||||
let nlpSampleEntityRepository: NlpSampleEntityRepository;
|
||||
let nlpSampleRepository: NlpSampleRepository;
|
||||
@@ -98,6 +101,7 @@ describe('NlpSampleService', () => {
|
||||
nlpEntityService,
|
||||
nlpSampleService,
|
||||
nlpSampleEntityService,
|
||||
nlpValueService,
|
||||
nlpSampleRepository,
|
||||
nlpSampleEntityRepository,
|
||||
nlpSampleEntityRepository,
|
||||
@@ -107,6 +111,7 @@ describe('NlpSampleService', () => {
|
||||
NlpEntityService,
|
||||
NlpSampleService,
|
||||
NlpSampleEntityService,
|
||||
NlpValueService,
|
||||
NlpSampleRepository,
|
||||
NlpSampleEntityRepository,
|
||||
NlpSampleEntityRepository,
|
||||
@@ -360,4 +365,200 @@ describe('NlpSampleService', () => {
|
||||
expect(extractSpy).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('findByPatterns', () => {
|
||||
it('should return samples without providing patterns', async () => {
|
||||
const result = await nlpSampleService.findByPatterns(
|
||||
{ filters: {}, patterns: [] },
|
||||
undefined,
|
||||
);
|
||||
|
||||
expect(Array.isArray(result)).toBe(true);
|
||||
expect(result.length).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
it('should return samples matching the given patterns', async () => {
|
||||
// Assume pattern: entity 'intent', value 'greeting'
|
||||
const patterns: NlpValueMatchPattern[] = [
|
||||
{ entity: 'intent', match: 'value', value: 'greeting' },
|
||||
];
|
||||
jest.spyOn(nlpSampleRepository, 'findByEntities');
|
||||
jest.spyOn(nlpValueService, 'findByPatterns');
|
||||
const result = await nlpSampleService.findByPatterns(
|
||||
{ filters: {}, patterns },
|
||||
undefined,
|
||||
);
|
||||
expect(nlpSampleRepository.findByEntities).toHaveBeenCalled();
|
||||
expect(nlpValueService.findByPatterns).toHaveBeenCalled();
|
||||
expect(Array.isArray(result)).toBe(true);
|
||||
expect(result[0].text).toBe('Hello');
|
||||
});
|
||||
|
||||
it('should return an empty array if no samples match the patterns', async () => {
|
||||
const patterns: NlpValueMatchPattern[] = [
|
||||
{ entity: 'intent', match: 'value', value: 'nonexistent' },
|
||||
];
|
||||
|
||||
jest.spyOn(nlpSampleRepository, 'findByEntities');
|
||||
jest.spyOn(nlpValueService, 'findByPatterns');
|
||||
const result = await nlpSampleService.findByPatterns(
|
||||
{ filters: {}, patterns },
|
||||
undefined,
|
||||
);
|
||||
|
||||
expect(nlpSampleRepository.findByEntities).not.toHaveBeenCalled();
|
||||
expect(nlpValueService.findByPatterns).toHaveBeenCalled();
|
||||
expect(Array.isArray(result)).toBe(true);
|
||||
expect(result).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should support pagination', async () => {
|
||||
const patterns: NlpValueMatchPattern[] = [
|
||||
{ entity: 'intent', match: 'value', value: 'greeting' },
|
||||
];
|
||||
const page: PageQueryDto<NlpSample> = {
|
||||
limit: 1,
|
||||
skip: 0,
|
||||
sort: ['text', 'asc'],
|
||||
};
|
||||
|
||||
const result = await nlpSampleService.findByPatterns(
|
||||
{ filters: {}, patterns },
|
||||
page,
|
||||
);
|
||||
|
||||
expect(Array.isArray(result)).toBe(true);
|
||||
expect(result.length).toBe(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('findByPatternsAndPopulate', () => {
|
||||
it('should return populated NlpSampleFull instances for matching patterns', async () => {
|
||||
const patterns: NlpValueMatchPattern[] = [
|
||||
{ entity: 'intent', match: 'value', value: 'greeting' },
|
||||
];
|
||||
|
||||
const result = await nlpSampleService.findByPatternsAndPopulate(
|
||||
{ filters: {}, patterns },
|
||||
undefined,
|
||||
);
|
||||
|
||||
expect(Array.isArray(result)).toBe(true);
|
||||
expect(result.length).toBeGreaterThan(0);
|
||||
result.forEach((sample) => {
|
||||
expect(sample).toBeInstanceOf(NlpSampleFull);
|
||||
expect(sample.entities).toBeDefined();
|
||||
expect(Array.isArray(sample.entities)).toBe(true);
|
||||
expect(sample.language).toBeDefined();
|
||||
});
|
||||
});
|
||||
|
||||
it('should return populated NlpSampleFull without providing patterns', async () => {
|
||||
const result = await nlpSampleService.findByPatternsAndPopulate(
|
||||
{ filters: { text: /Hello/gi }, patterns: [] },
|
||||
undefined,
|
||||
);
|
||||
|
||||
expect(Array.isArray(result)).toBe(true);
|
||||
expect(result.length).toBe(1);
|
||||
expect(result[0]).toBeInstanceOf(NlpSampleFull);
|
||||
expect(result[0].entities).toBeDefined();
|
||||
expect(Array.isArray(result[0].entities)).toBe(true);
|
||||
});
|
||||
|
||||
it('should return an empty array if no samples match the patterns', async () => {
|
||||
const patterns: NlpValueMatchPattern[] = [
|
||||
{ entity: 'intent', match: 'value', value: 'nonexistent' },
|
||||
];
|
||||
|
||||
const result = await nlpSampleService.findByPatternsAndPopulate(
|
||||
{ filters: {}, patterns },
|
||||
undefined,
|
||||
);
|
||||
|
||||
expect(Array.isArray(result)).toBe(true);
|
||||
expect(result).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should support pagination and projection', async () => {
|
||||
const patterns: NlpValueMatchPattern[] = [
|
||||
{ entity: 'intent', match: 'value', value: 'greeting' },
|
||||
];
|
||||
const page: PageQueryDto<NlpSample> = {
|
||||
limit: 1,
|
||||
skip: 0,
|
||||
sort: ['text', 'asc'],
|
||||
};
|
||||
|
||||
const result = await nlpSampleService.findByPatternsAndPopulate(
|
||||
{ filters: {}, patterns },
|
||||
page,
|
||||
);
|
||||
|
||||
expect(Array.isArray(result)).toBe(true);
|
||||
expect(result.length).toBe(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('countByPatterns', () => {
|
||||
it('should return the correct count for matching patterns', async () => {
|
||||
const patterns: NlpValueMatchPattern[] = [
|
||||
{ entity: 'intent', match: 'value', value: 'greeting' },
|
||||
];
|
||||
|
||||
jest.spyOn(nlpSampleRepository, 'countByEntities');
|
||||
jest.spyOn(nlpValueService, 'findByPatterns');
|
||||
const count = await nlpSampleService.countByPatterns({
|
||||
filters: {},
|
||||
patterns,
|
||||
});
|
||||
|
||||
expect(nlpSampleRepository.countByEntities).toHaveBeenCalled();
|
||||
expect(nlpValueService.findByPatterns).toHaveBeenCalled();
|
||||
expect(typeof count).toBe('number');
|
||||
expect(count).toBe(2);
|
||||
});
|
||||
|
||||
it('should return the correct count without providing patterns', async () => {
|
||||
jest.spyOn(nlpSampleRepository, 'findByEntities');
|
||||
jest.spyOn(nlpValueService, 'findByPatterns');
|
||||
const count = await nlpSampleService.countByPatterns({
|
||||
filters: {},
|
||||
patterns: [],
|
||||
});
|
||||
|
||||
expect(nlpSampleRepository.findByEntities).not.toHaveBeenCalled();
|
||||
expect(nlpValueService.findByPatterns).not.toHaveBeenCalled();
|
||||
expect(typeof count).toBe('number');
|
||||
expect(count).toBeGreaterThan(2);
|
||||
});
|
||||
|
||||
it('should return 0 if no samples match the patterns', async () => {
|
||||
const patterns: NlpValueMatchPattern[] = [
|
||||
{ entity: 'intent', match: 'value', value: 'nonexistent' },
|
||||
];
|
||||
|
||||
const count = await nlpSampleService.countByPatterns({
|
||||
filters: {},
|
||||
patterns,
|
||||
});
|
||||
|
||||
expect(count).toBe(0);
|
||||
});
|
||||
|
||||
it('should respect filters (e.g. language)', async () => {
|
||||
const patterns: NlpValueMatchPattern[] = [
|
||||
{ entity: 'intent', match: 'value', value: 'greeting' },
|
||||
];
|
||||
const filters = { text: 'Hello' };
|
||||
|
||||
const count = await nlpSampleService.countByPatterns({
|
||||
filters,
|
||||
patterns,
|
||||
});
|
||||
|
||||
expect(typeof count).toBe('number');
|
||||
expect(count).toBe(1);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -12,14 +12,16 @@ import {
|
||||
NotFoundException,
|
||||
} from '@nestjs/common';
|
||||
import { OnEvent } from '@nestjs/event-emitter';
|
||||
import { Document, Query } from 'mongoose';
|
||||
import { Document, ProjectionType, Query } from 'mongoose';
|
||||
import Papa from 'papaparse';
|
||||
|
||||
import { Message } from '@/chat/schemas/message.schema';
|
||||
import { NlpValueMatchPattern } from '@/chat/schemas/types/pattern';
|
||||
import { Language } from '@/i18n/schemas/language.schema';
|
||||
import { LanguageService } from '@/i18n/services/language.service';
|
||||
import { DeleteResult } from '@/utils/generics/base-repository';
|
||||
import { BaseService } from '@/utils/generics/base-service';
|
||||
import { PageQueryDto } from '@/utils/pagination/pagination-query.dto';
|
||||
import { TFilterQuery, THydratedDocument } from '@/utils/types/filter.types';
|
||||
|
||||
import { NlpSampleEntityCreateDto } from '../dto/nlp-sample-entity.dto';
|
||||
@@ -35,6 +37,7 @@ import { NlpSampleEntityValue, NlpSampleState } from '../schemas/types';
|
||||
|
||||
import { NlpEntityService } from './nlp-entity.service';
|
||||
import { NlpSampleEntityService } from './nlp-sample-entity.service';
|
||||
import { NlpValueService } from './nlp-value.service';
|
||||
|
||||
@Injectable()
|
||||
export class NlpSampleService extends BaseService<
|
||||
@@ -47,11 +50,126 @@ export class NlpSampleService extends BaseService<
|
||||
readonly repository: NlpSampleRepository,
|
||||
private readonly nlpSampleEntityService: NlpSampleEntityService,
|
||||
private readonly nlpEntityService: NlpEntityService,
|
||||
private readonly nlpValueService: NlpValueService,
|
||||
private readonly languageService: LanguageService,
|
||||
) {
|
||||
super(repository);
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve samples that satisfy `filters` **and** reference any entity / value
|
||||
* contained in `patterns`.
|
||||
*
|
||||
* The pattern list is first resolved via `NlpEntityService.findByPatterns`
|
||||
* and `NlpValueService.findByPatterns`, then delegated to
|
||||
* `repository.findByEntities`.
|
||||
*
|
||||
* @param criterias `{ filters, patterns }`
|
||||
* @param page Optional paging / sorting descriptor.
|
||||
* @param projection Optional Mongo projection.
|
||||
* @returns Promise resolving to the matching samples.
|
||||
*/
|
||||
async findByPatterns(
|
||||
{
|
||||
filters,
|
||||
patterns,
|
||||
}: {
|
||||
filters: TFilterQuery<NlpSample>;
|
||||
patterns: NlpValueMatchPattern[];
|
||||
},
|
||||
page?: PageQueryDto<NlpSample>,
|
||||
projection?: ProjectionType<NlpSample>,
|
||||
): Promise<NlpSample[]> {
|
||||
if (!patterns.length) {
|
||||
return await this.repository.find(filters, page, projection);
|
||||
}
|
||||
|
||||
const values = await this.nlpValueService.findByPatterns(patterns);
|
||||
|
||||
if (!values.length) {
|
||||
return [];
|
||||
}
|
||||
|
||||
return await this.repository.findByEntities(
|
||||
{
|
||||
filters,
|
||||
values,
|
||||
},
|
||||
page,
|
||||
projection,
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Same as `findByPatterns`, but also populates all relations declared
|
||||
* in the repository (`populatePaths`).
|
||||
*
|
||||
* @param criteria `{ filters, patterns }`
|
||||
* @param page Optional paging / sorting descriptor.
|
||||
* @param projection Optional Mongo projection.
|
||||
* @returns Promise resolving to the populated samples.
|
||||
*/
|
||||
async findByPatternsAndPopulate(
|
||||
{
|
||||
filters,
|
||||
patterns,
|
||||
}: {
|
||||
filters: TFilterQuery<NlpSample>;
|
||||
patterns: NlpValueMatchPattern[];
|
||||
},
|
||||
page?: PageQueryDto<NlpSample>,
|
||||
projection?: ProjectionType<NlpSample>,
|
||||
): Promise<NlpSampleFull[]> {
|
||||
if (!patterns.length) {
|
||||
return await this.repository.findAndPopulate(filters, page, projection);
|
||||
}
|
||||
|
||||
const values = await this.nlpValueService.findByPatterns(patterns);
|
||||
|
||||
if (!values.length) {
|
||||
return [];
|
||||
}
|
||||
|
||||
return await this.repository.findByEntitiesAndPopulate(
|
||||
{
|
||||
filters,
|
||||
values,
|
||||
},
|
||||
page,
|
||||
projection,
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Count how many samples satisfy `filters` and reference any entity / value
|
||||
* present in `patterns`.
|
||||
*
|
||||
* @param param0 `{ filters, patterns }`
|
||||
* @returns Promise resolving to the count.
|
||||
*/
|
||||
async countByPatterns({
|
||||
filters,
|
||||
patterns,
|
||||
}: {
|
||||
filters: TFilterQuery<NlpSample>;
|
||||
patterns: NlpValueMatchPattern[];
|
||||
}): Promise<number> {
|
||||
if (!patterns.length) {
|
||||
return await this.repository.count(filters);
|
||||
}
|
||||
|
||||
const values = await this.nlpValueService.findByPatterns(patterns);
|
||||
|
||||
if (!values.length) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
return await this.repository.countByEntities({
|
||||
filters,
|
||||
values,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetches the samples and entities for a given sample type.
|
||||
*
|
||||
|
||||
@@ -8,6 +8,7 @@
|
||||
|
||||
import { forwardRef, Inject, Injectable } from '@nestjs/common';
|
||||
|
||||
import { NlpValueMatchPattern } from '@/chat/schemas/types/pattern';
|
||||
import { DeleteResult } from '@/utils/generics/base-repository';
|
||||
import { BaseService } from '@/utils/generics/base-service';
|
||||
import { PageQueryDto } from '@/utils/pagination/pagination-query.dto';
|
||||
@@ -42,6 +43,20 @@ export class NlpValueService extends BaseService<
|
||||
super(repository);
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch values whose `value` field matches the patterns provided.
|
||||
*
|
||||
* @param patterns Pattern list
|
||||
* @returns Promise resolving to the matching values.
|
||||
*/
|
||||
async findByPatterns(patterns: NlpValueMatchPattern[]) {
|
||||
return await this.find({
|
||||
value: {
|
||||
$in: patterns.map((p) => p.value),
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Deletes an NLP value by its ID, cascading any dependent data.
|
||||
*
|
||||
|
||||
@@ -19,6 +19,7 @@ import {
|
||||
FlattenMaps,
|
||||
HydratedDocument,
|
||||
Model,
|
||||
PipelineStage,
|
||||
ProjectionType,
|
||||
Query,
|
||||
SortOrder,
|
||||
@@ -31,6 +32,7 @@ import { LoggerService } from '@/logger/logger.service';
|
||||
import {
|
||||
TFilterQuery,
|
||||
TFlattenOption,
|
||||
THydratedDocument,
|
||||
TQueryOptions,
|
||||
} from '@/utils/types/filter.types';
|
||||
|
||||
@@ -81,9 +83,13 @@ export abstract class BaseRepository<
|
||||
U extends Omit<T, keyof BaseSchema> = Omit<T, keyof BaseSchema>,
|
||||
D = Document<T>,
|
||||
> {
|
||||
private readonly transformOpts = { excludePrefixes: ['_', 'password'] };
|
||||
protected readonly transformOpts = { excludePrefixes: ['_', 'password'] };
|
||||
|
||||
private readonly leanOpts = { virtuals: true, defaults: true, getters: true };
|
||||
protected readonly leanOpts = {
|
||||
virtuals: true,
|
||||
defaults: true,
|
||||
getters: true,
|
||||
};
|
||||
|
||||
@Inject(EventEmitter2)
|
||||
readonly eventEmitter: EventEmitter2;
|
||||
@@ -100,15 +106,50 @@ export abstract class BaseRepository<
|
||||
this.registerLifeCycleHooks();
|
||||
}
|
||||
|
||||
/**
|
||||
* Determine whether at least one of the requested populate paths
|
||||
* is supported by the repository.
|
||||
*
|
||||
* @param populate Array of path strings supplied by the caller.
|
||||
* @returns `true` if any item appears in `this.populatePaths`, else `false`.
|
||||
*/
|
||||
canPopulate(populate: string[]): boolean {
|
||||
return populate.some((p) => this.populatePaths.includes(p as P));
|
||||
}
|
||||
|
||||
/**
|
||||
* Build the canonical event name used by the repository’s event-bus hooks.
|
||||
*
|
||||
* Format: `hook:<entity>:<suffix>`
|
||||
* where `<entity>` is the lower-cased class name and `<suffix>` is an
|
||||
* `EHook` value such as `"preCreate"` or `"postUpdate"`.
|
||||
*
|
||||
* @param suffix Lifecycle-hook suffix.
|
||||
* @returns A type-safe event name string.
|
||||
*/
|
||||
getEventName(suffix: EHook) {
|
||||
const entity = this.cls.name.toLocaleLowerCase();
|
||||
return `hook:${entity}:${suffix}` as `hook:${IHookEntities}:${TNormalizedEvents}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Wire all Mongoose lifecycle hooks to the repository’s overridable
|
||||
* `pre-/post-*` methods **and** to the domain event bus.
|
||||
*
|
||||
* For the current repository (`this.cls.name`) the method:
|
||||
* 1. Retrieves the hook definitions from `LifecycleHookManager`.
|
||||
* 2. Registers handlers for:
|
||||
* • `validate.pre / validate.post` → `preCreateValidate` / `postCreateValidate`
|
||||
* • `save.pre / save.post` → `preCreate` / `postCreate`
|
||||
* • `deleteOne.* deleteMany.*` → `preDelete` / `postDelete`
|
||||
* • `findOneAndUpdate.*` → `preUpdate` / `postUpdate`
|
||||
* • `updateMany.*` → `preUpdateMany` / `postUpdateMany`
|
||||
* 3. Emits the corresponding domain events (`EHook.*`) via `eventEmitter`
|
||||
* after each repository callback.
|
||||
*
|
||||
* If no hooks are registered for the current class, a console warning is
|
||||
* issued and the method exits gracefully.
|
||||
*/
|
||||
private registerLifeCycleHooks(): void {
|
||||
const repository = this;
|
||||
const hooks = LifecycleHookManager.getHooks(this.cls.name);
|
||||
@@ -252,6 +293,19 @@ export abstract class BaseRepository<
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute a `find`-style query, convert each lean result to `cls`, and return
|
||||
* the transformed list.
|
||||
*
|
||||
* - The query is run with `lean(this.leanOpts)` for performance.
|
||||
* - Each plain object is passed through `plainToClass` using
|
||||
* `this.transformOpts`.
|
||||
*
|
||||
* @template R Result type – typically the populated or base DTO class.
|
||||
* @param query Mongoose query returning an array of documents.
|
||||
* @param cls Constructor used by `plainToClass` for transformation.
|
||||
* @returns Promise resolving to an array of class instances.
|
||||
*/
|
||||
protected async execute<R extends Omit<T, P>>(
|
||||
query: Query<T[], T>,
|
||||
cls: new () => R,
|
||||
@@ -260,6 +314,19 @@ export abstract class BaseRepository<
|
||||
return resultSet.map((doc) => plainToClass(cls, doc, this.transformOpts));
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute a single-document query, convert the result to `cls`,
|
||||
* and return it (or `null`).
|
||||
*
|
||||
* - Uses `lean(this.leanOpts)` for performance.
|
||||
* - Falls back to `this.transformOpts` when `options` is not provided.
|
||||
*
|
||||
* @template R Result type – typically the populated or base DTO class.
|
||||
* @param query Mongoose query expected to return one document.
|
||||
* @param cls Constructor used by `plainToClass`.
|
||||
* @param options Optional `ClassTransformOptions` overriding defaults.
|
||||
* @returns Promise resolving to a class instance or `null`.
|
||||
*/
|
||||
protected async executeOne<R extends Omit<T, P>>(
|
||||
query: Query<T | null, T>,
|
||||
cls: new () => R,
|
||||
@@ -269,6 +336,18 @@ export abstract class BaseRepository<
|
||||
return plainToClass(cls, doc, options ?? this.transformOpts);
|
||||
}
|
||||
|
||||
/**
|
||||
* Build a `findOne`/`findById` query.
|
||||
*
|
||||
* - `criteria` may be an `_id` string or any Mongo filter;
|
||||
* an empty / falsy value is **not allowed** (throws).
|
||||
* - Optional `projection` is forwarded unchanged.
|
||||
*
|
||||
* @param criteria Document `_id` **or** Mongo filter.
|
||||
* @param projection Optional Mongo projection.
|
||||
* @throws Error when `criteria` is empty.
|
||||
* @returns Un-executed Mongoose query.
|
||||
*/
|
||||
protected findOneQuery(
|
||||
criteria: string | TFilterQuery<T>,
|
||||
projection?: ProjectionType<T>,
|
||||
@@ -283,11 +362,23 @@ export abstract class BaseRepository<
|
||||
: this.model.findOne<HydratedDocument<T>>(criteria, projection);
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve a single document and convert it to `this.cls`.
|
||||
*
|
||||
* - Returns `null` immediately when `criteria` is falsy.
|
||||
* - Optional `options` are passed to `plainToClass`.
|
||||
* - Optional `projection` limits returned fields.
|
||||
*
|
||||
* @param criteria Document `_id` **or** Mongo filter.
|
||||
* @param options Class-transform options.
|
||||
* @param projection Optional Mongo projection.
|
||||
* @returns Promise resolving to the found entity or `null`.
|
||||
*/
|
||||
async findOne(
|
||||
criteria: string | TFilterQuery<T>,
|
||||
options?: ClassTransformOptions,
|
||||
projection?: ProjectionType<T>,
|
||||
) {
|
||||
): Promise<T | null> {
|
||||
if (!criteria) {
|
||||
// @TODO : Issue a warning ?
|
||||
return null;
|
||||
@@ -297,6 +388,16 @@ export abstract class BaseRepository<
|
||||
return await this.executeOne(query, this.cls, options);
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve a single document with all `populatePaths` relations resolved.
|
||||
*
|
||||
* - Throws if population is not configured.
|
||||
* - Returns `null` when nothing matches `criteria`.
|
||||
*
|
||||
* @param criteria Document `_id` **or** Mongo filter.
|
||||
* @param projection Optional Mongo projection.
|
||||
* @returns Promise resolving to the populated entity or `null`.
|
||||
*/
|
||||
async findOneAndPopulate(
|
||||
criteria: string | TFilterQuery<T>,
|
||||
projection?: ProjectionType<T>,
|
||||
@@ -323,6 +424,17 @@ export abstract class BaseRepository<
|
||||
projection?: ProjectionType<T>,
|
||||
): Query<T[], T, object, T, 'find', object>;
|
||||
|
||||
/**
|
||||
* Build an un-executed `find` query with optional pagination, sorting,
|
||||
* and projection.
|
||||
*
|
||||
* The returned query can be further chained or passed to `execute`.
|
||||
*
|
||||
* @param filter Mongo selector for the documents.
|
||||
* @param pageQuery Sort tuple **or** paging object (optional).
|
||||
* @param projection Mongo projection (optional).
|
||||
* @returns A Mongoose `find` query with `skip`, `limit`, and `sort` applied.
|
||||
*/
|
||||
protected findQuery(
|
||||
filter: TFilterQuery<T>,
|
||||
pageQuery?: QuerySortDto<T> | PageQueryDto<T>,
|
||||
@@ -360,6 +472,20 @@ export abstract class BaseRepository<
|
||||
projection?: ProjectionType<T>,
|
||||
): Promise<T[]>;
|
||||
|
||||
/**
|
||||
* Find documents matching `filter`.
|
||||
*
|
||||
* - `pageQuery` may be:
|
||||
* * a **sort descriptor** (`QuerySortDto`) ‒ an array of `[field, dir]`
|
||||
* * a **paging object** (`PageQueryDto`) ‒ `{ limit, skip, sort }`
|
||||
* - Optional `projection` is forwarded to `findQuery`.
|
||||
* - Delegates execution to `this.execute`, mapping raw docs to `this.cls`.
|
||||
*
|
||||
* @param filter Mongo filter selecting documents.
|
||||
* @param pageQuery Sort descriptor **or** paging object.
|
||||
* @param projection Optional Mongo projection.
|
||||
* @returns Promise resolving to the found documents.
|
||||
*/
|
||||
async find(
|
||||
filter: TFilterQuery<T>,
|
||||
pageQuery?: QuerySortDto<T> | PageQueryDto<T>,
|
||||
@@ -374,6 +500,14 @@ export abstract class BaseRepository<
|
||||
return await this.execute(query, this.cls);
|
||||
}
|
||||
|
||||
/**
|
||||
* Ensure that population is possible for the current repository.
|
||||
*
|
||||
* Throws when either `populatePaths` or `clsPopulate` is not configured,
|
||||
* preventing accidental calls to population-aware methods.
|
||||
*
|
||||
* @throws Error if population cannot be performed.
|
||||
*/
|
||||
private ensureCanPopulate(): void {
|
||||
if (!this.populatePaths || !this.clsPopulate) {
|
||||
throw new Error('Cannot populate query');
|
||||
@@ -395,6 +529,20 @@ export abstract class BaseRepository<
|
||||
projection?: ProjectionType<T>,
|
||||
): Promise<TFull[]>;
|
||||
|
||||
/**
|
||||
* Find documents that match `filters` and return them with the relations
|
||||
* in `populatePaths` resolved.
|
||||
*
|
||||
* - `pageQuery` can be either a sort descriptor (`QuerySortDto`) or a full
|
||||
* paging object (`PageQueryDto`).
|
||||
* - Optional `projection` is forwarded to `findQuery`.
|
||||
* - Throws if the repository is not configured for population.
|
||||
*
|
||||
* @param filters Mongo filter.
|
||||
* @param pageQuery Sort or paging information.
|
||||
* @param projection Optional Mongo projection.
|
||||
* @returns Promise resolving to the populated documents.
|
||||
*/
|
||||
async findAndPopulate(
|
||||
filters: TFilterQuery<T>,
|
||||
pageQuery?: QuerySortDto<T> | PageQueryDto<T>,
|
||||
@@ -414,16 +562,37 @@ export abstract class BaseRepository<
|
||||
return await this.execute(query, this.clsPopulate!);
|
||||
}
|
||||
|
||||
/**
|
||||
* Build an un-executed query that selects **all** documents,
|
||||
* applies `sort`, and disables pagination (`limit` / `skip` = 0).
|
||||
*
|
||||
* @param sort Optional sort descriptor.
|
||||
* @returns Mongoose `find` query.
|
||||
*/
|
||||
protected findAllQuery(
|
||||
sort?: QuerySortDto<T>,
|
||||
): Query<T[], T, object, T, 'find', object> {
|
||||
return this.findQuery({}, { limit: 0, skip: 0, sort });
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve every document in the collection, optionally sorted.
|
||||
*
|
||||
* @param sort Optional sort descriptor.
|
||||
* @returns Promise resolving to the documents.
|
||||
*/
|
||||
async findAll(sort?: QuerySortDto<T>): Promise<T[]> {
|
||||
return await this.find({}, { limit: 0, skip: 0, sort });
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve every document with all `populatePaths` relations resolved.
|
||||
*
|
||||
* - Throws if population is not configured.
|
||||
*
|
||||
* @param sort Optional sort descriptor.
|
||||
* @returns Promise resolving to the populated documents.
|
||||
*/
|
||||
async findAllAndPopulate(sort?: QuerySortDto<T>): Promise<TFull[]> {
|
||||
this.ensureCanPopulate();
|
||||
const query = this.findAllQuery(sort).populate(this.populatePaths);
|
||||
@@ -468,14 +637,38 @@ export abstract class BaseRepository<
|
||||
return await this.execute(query, this.clsPopulate!);
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the total number of documents in the collection
|
||||
* (uses MongoDB’s `estimatedDocumentCount` for speed).
|
||||
*
|
||||
* @returns Promise resolving to the estimated document count.
|
||||
*/
|
||||
async countAll(): Promise<number> {
|
||||
return await this.model.estimatedDocumentCount().exec();
|
||||
}
|
||||
|
||||
/**
|
||||
* Count documents that match the given criteria
|
||||
* (falls back to all documents when `criteria` is omitted).
|
||||
*
|
||||
* @param criteria Optional Mongo filter.
|
||||
* @returns Promise resolving to the exact document count.
|
||||
*/
|
||||
async count(criteria?: TFilterQuery<T>): Promise<number> {
|
||||
return await this.model.countDocuments(criteria).exec();
|
||||
}
|
||||
|
||||
/**
|
||||
* Persist a single document and return it as an instance of `this.cls`.
|
||||
*
|
||||
* Internally:
|
||||
* 1. `model.create()` inserts the raw DTO.
|
||||
* 2. The Mongoose document is converted to a plain object with `leanOpts`.
|
||||
* 3. `plainToClass()` transforms that object into the domain class.
|
||||
*
|
||||
* @param dto Data-transfer object describing the new record.
|
||||
* @returns A hydrated instance of the domain class.
|
||||
*/
|
||||
async create(dto: DtoInfer<DtoAction.Create, Dto, U>): Promise<T> {
|
||||
const doc = await this.model.create(dto);
|
||||
|
||||
@@ -486,6 +679,12 @@ export abstract class BaseRepository<
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Persist an array of documents at once and map each result to `this.cls`.
|
||||
*
|
||||
* @param dtoArray Array of DTOs to insert.
|
||||
* @returns Array of domain-class instances in the same order as `dtoArray`.
|
||||
*/
|
||||
async createMany(
|
||||
dtoArray: DtoInfer<DtoAction.Create, Dto, U>[],
|
||||
): Promise<T[]> {
|
||||
@@ -496,6 +695,21 @@ export abstract class BaseRepository<
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Update a **single** document and return the modified version.
|
||||
*
|
||||
* Behaviour :
|
||||
* - `criteria` may be an `_id` string or any Mongo filter object.
|
||||
* - `dto` is applied via `$set`; when `options.shouldFlatten` is true the
|
||||
* payload is flattened (e.g. `"a.b": value`) before the update.
|
||||
* - Fires the `pre|postUpdateValidate` hooks + events.
|
||||
* - Throws if nothing matches the criteria or if `dto` is empty.
|
||||
*
|
||||
* @param criteria `_id` or filter selecting the target document.
|
||||
* @param dto Partial update payload.
|
||||
* @param options `new`, `upsert`, `shouldFlatten`, … (forwarded to Mongoose).
|
||||
* @returns The updated document (with `new: true` by default).
|
||||
*/
|
||||
async updateOne<D extends Partial<U>>(
|
||||
criteria: string | TFilterQuery<T>,
|
||||
dto: UpdateQuery<DtoInfer<DtoAction.Update, Dto, D>>,
|
||||
@@ -544,6 +758,18 @@ export abstract class BaseRepository<
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Update **many** documents at once.
|
||||
*
|
||||
* - Applies `$set` with the supplied `dto`.
|
||||
* - When `options.shouldFlatten` is true, flattens the payload first.
|
||||
* - Does **not** run the validation / event hooks (use `updateOne` for that).
|
||||
*
|
||||
* @param filter Mongo filter selecting the documents to update.
|
||||
* @param dto Update payload.
|
||||
* @param options `{ shouldFlatten?: boolean }`.
|
||||
* @returns Promise that resolves a MongoDB `UpdateWriteOpResult` describing the operation outcome.
|
||||
*/
|
||||
async updateMany<D extends Partial<U>>(
|
||||
filter: TFilterQuery<T>,
|
||||
dto: UpdateQuery<D>,
|
||||
@@ -554,6 +780,17 @@ export abstract class BaseRepository<
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove **one** document, unless it is marked as `builtin: true`.
|
||||
*
|
||||
* If `criteria` is a string, it is treated as the document’s `_id`;
|
||||
* otherwise it is used as a full Mongo filter.
|
||||
* The filter is automatically augmented with `{ builtin: { $ne: true } }`
|
||||
* to protect built-in records from deletion.
|
||||
*
|
||||
* @param criteria Document `_id` or Mongo filter.
|
||||
* @returns Promise that resolves to Mongo’s `DeleteResult`.
|
||||
*/
|
||||
async deleteOne(criteria: string | TFilterQuery<T>): Promise<DeleteResult> {
|
||||
const filter = typeof criteria === 'string' ? { _id: criteria } : criteria;
|
||||
|
||||
@@ -562,10 +799,25 @@ export abstract class BaseRepository<
|
||||
.exec();
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove **many** documents that match `criteria`, excluding those flagged
|
||||
* with `builtin: true`.
|
||||
*
|
||||
* @param criteria Mongo filter describing the set to delete.
|
||||
* @returns Promise that resolves to Mongo’s `DeleteResult`.
|
||||
*/
|
||||
async deleteMany(criteria: TFilterQuery<T>): Promise<DeleteResult> {
|
||||
return await this.model.deleteMany({ ...criteria, builtin: { $ne: true } });
|
||||
}
|
||||
|
||||
/**
|
||||
* Runs *before* create-validation logic.
|
||||
* Override to perform domain-specific checks; throw to abort.
|
||||
*
|
||||
* @param _doc The document that will be created.
|
||||
* @param _filterCriteria Optional additional criteria (e.g. conditional create).
|
||||
* @param _updates Optional update pipeline when upserting.
|
||||
*/
|
||||
async preCreateValidate(
|
||||
_doc: HydratedDocument<T>,
|
||||
_filterCriteria?: FilterQuery<T>,
|
||||
@@ -574,10 +826,23 @@ export abstract class BaseRepository<
|
||||
// Nothing ...
|
||||
}
|
||||
|
||||
/**
|
||||
* Called *after* create-validation passes,
|
||||
* but before persistence. Override for side-effects (audit logs, events, …).
|
||||
*
|
||||
* @param _validated The validated (not yet saved) document.
|
||||
*/
|
||||
async postCreateValidate(_validated: HydratedDocument<T>): Promise<void> {
|
||||
// Nothing ...
|
||||
}
|
||||
|
||||
/**
|
||||
* Runs *before* validating a single-document update.
|
||||
* Override to enforce custom rules; throw to abort.
|
||||
*
|
||||
* @param _filterCriteria Query criteria used to locate the document.
|
||||
* @param _updates Update payload or aggregation pipeline.
|
||||
*/
|
||||
async preUpdateValidate(
|
||||
_filterCriteria: FilterQuery<T>,
|
||||
_updates: UpdateWithAggregationPipeline | UpdateQuery<T>,
|
||||
@@ -585,6 +850,13 @@ export abstract class BaseRepository<
|
||||
// Nothing ...
|
||||
}
|
||||
|
||||
/**
|
||||
* Called *after* an update payload is validated,
|
||||
* just before it is applied.
|
||||
*
|
||||
* @param _filterCriteria Same criteria passed to the update.
|
||||
* @param _updates The validated update payload.
|
||||
*/
|
||||
async postUpdateValidate(
|
||||
_filterCriteria: FilterQuery<T>,
|
||||
_updates: UpdateWithAggregationPipeline | UpdateQuery<T>,
|
||||
@@ -592,14 +864,33 @@ export abstract class BaseRepository<
|
||||
// Nothing ...
|
||||
}
|
||||
|
||||
/**
|
||||
* Rxecutes immediately before persisting a new document.
|
||||
* Use to inject defaults, timestamps, or derive fields.
|
||||
*
|
||||
* @param _doc The document about to be saved.
|
||||
*/
|
||||
async preCreate(_doc: HydratedDocument<T>): Promise<void> {
|
||||
// Nothing ...
|
||||
}
|
||||
|
||||
/**
|
||||
* Fires right after a document is saved.
|
||||
* Useful for emitting events or refreshing caches.
|
||||
*
|
||||
* @param _created The newly created document.
|
||||
*/
|
||||
async postCreate(_created: HydratedDocument<T>): Promise<void> {
|
||||
// Nothing ...
|
||||
}
|
||||
|
||||
/**
|
||||
* Runs before a `findOneAndUpdate` operation.
|
||||
*
|
||||
* @param _query The Mongoose query object.
|
||||
* @param _criteria Original filter criteria.
|
||||
* @param _updates Update payload or pipeline.
|
||||
*/
|
||||
async preUpdate(
|
||||
_query: Query<D, D, unknown, T, 'findOneAndUpdate'>,
|
||||
_criteria: TFilterQuery<T>,
|
||||
@@ -608,6 +899,13 @@ export abstract class BaseRepository<
|
||||
// Nothing ...
|
||||
}
|
||||
|
||||
/**
|
||||
* Runs before an `updateMany` operation.
|
||||
*
|
||||
* @param _query The Mongoose query object.
|
||||
* @param _criteria Filter criteria.
|
||||
* @param _updates Update payload or pipeline.
|
||||
*/
|
||||
async preUpdateMany(
|
||||
_query: Query<D, D, unknown, T, 'updateMany'>,
|
||||
_criteria: TFilterQuery<T>,
|
||||
@@ -616,6 +914,12 @@ export abstract class BaseRepository<
|
||||
// Nothing ...
|
||||
}
|
||||
|
||||
/**
|
||||
* Fires after an `updateMany` completes.
|
||||
*
|
||||
* @param _query The originating query.
|
||||
* @param _updated Mongoose result object.
|
||||
*/
|
||||
async postUpdateMany(
|
||||
_query: Query<D, D, unknown, T, 'updateMany'>,
|
||||
_updated: any,
|
||||
@@ -623,6 +927,12 @@ export abstract class BaseRepository<
|
||||
// Nothing ...
|
||||
}
|
||||
|
||||
/**
|
||||
* Fires after a `findOneAndUpdate` completes.
|
||||
*
|
||||
* @param _query The originating query.
|
||||
* @param _updated The updated document.
|
||||
*/
|
||||
async postUpdate(
|
||||
_query: Query<D, D, unknown, T, 'findOneAndUpdate'>,
|
||||
_updated: T,
|
||||
@@ -630,6 +940,12 @@ export abstract class BaseRepository<
|
||||
// Nothing ...
|
||||
}
|
||||
|
||||
/**
|
||||
* Runs before a `deleteOne` or `deleteMany`.
|
||||
*
|
||||
* @param _query The Mongoose query object.
|
||||
* @param _criteria Filter criteria.
|
||||
*/
|
||||
async preDelete(
|
||||
_query: Query<DeleteResult, D, unknown, T, 'deleteOne' | 'deleteMany'>,
|
||||
_criteria: TFilterQuery<T>,
|
||||
@@ -637,10 +953,73 @@ export abstract class BaseRepository<
|
||||
// Nothing ...
|
||||
}
|
||||
|
||||
/**
|
||||
* Fires after a `deleteOne` or `deleteMany` completes.
|
||||
*
|
||||
* @param _query The originating query.
|
||||
* @param _result MongoDB `DeleteResult`.
|
||||
*/
|
||||
async postDelete(
|
||||
_query: Query<DeleteResult, D, unknown, T, 'deleteOne' | 'deleteMany'>,
|
||||
_result: DeleteResult,
|
||||
): Promise<void> {
|
||||
// Nothing ...
|
||||
}
|
||||
|
||||
/**
|
||||
* Translate a `PageQueryDto` into MongoDB aggregation stages.
|
||||
*
|
||||
* Creates, in order:
|
||||
* 1. **$sort** – when `page.sort` is provided. Accepts `1 | -1 | 'asc' | 'desc'`
|
||||
* (plus `'ascending' | 'descending'`) and normalises them to `1` or `-1`.
|
||||
* 2. **$skip** – when `page.skip` > 0.
|
||||
* 3. **$limit** – when `page.limit` > 0.
|
||||
*
|
||||
* If `page` is omitted, an empty array is returned so callers can safely
|
||||
* spread the result into a pipeline without extra checks.
|
||||
*
|
||||
* @param page Optional pagination/sort descriptor.
|
||||
* @returns Array of `$sort`, `$skip`, and `$limit` stages in the correct order.
|
||||
*/
|
||||
buildPaginationPipelineStages<T>(page?: PageQueryDto<T>): PipelineStage[] {
|
||||
if (!page) return [];
|
||||
|
||||
const stages: PipelineStage[] = [];
|
||||
|
||||
if (page.sort) {
|
||||
const [field, dir] = page.sort;
|
||||
stages.push({
|
||||
$sort: {
|
||||
[field]:
|
||||
typeof dir === 'number'
|
||||
? dir
|
||||
: ['asc', 'ascending'].includes(dir as string)
|
||||
? 1
|
||||
: -1,
|
||||
} as Record<string, 1 | -1>,
|
||||
});
|
||||
}
|
||||
|
||||
if (page.skip) stages.push({ $skip: page.skip });
|
||||
if (page.limit) stages.push({ $limit: page.limit });
|
||||
|
||||
return stages;
|
||||
}
|
||||
|
||||
/**
|
||||
* Populates the provided Mongoose documents with the relations listed in
|
||||
* `this.populatePaths`, returning lean (plain) objects.
|
||||
*
|
||||
* @param docs Hydrated documents to enrich.
|
||||
* @returns Promise resolving to the populated docs.
|
||||
*/
|
||||
async populate(docs: THydratedDocument<T>[]) {
|
||||
return await this.model.populate(
|
||||
docs,
|
||||
this.populatePaths.map((path) => ({
|
||||
path,
|
||||
options: { lean: true },
|
||||
})),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright © 2024 Hexastack. All rights reserved.
|
||||
* Copyright © 2025 Hexastack. All rights reserved.
|
||||
*
|
||||
* Licensed under the GNU Affero General Public License v3.0 (AGPLv3) with the following additional terms:
|
||||
* 1. The name "Hexabot" is a trademark of Hexastack. You may not use this name in derivative works without express written permission.
|
||||
@@ -28,7 +28,7 @@ export const nlpSampleEntityFixtures: NlpSampleEntityCreateDto[] = [
|
||||
{
|
||||
sample: '2',
|
||||
entity: '0',
|
||||
value: '2',
|
||||
value: '3',
|
||||
},
|
||||
{
|
||||
sample: '3',
|
||||
|
||||
@@ -18,6 +18,7 @@ import { OutgoingMessageFormat } from '@/chat/schemas/types/message';
|
||||
import { BlockOptions, ContentOptions } from '@/chat/schemas/types/options';
|
||||
import { NlpPattern, Pattern } from '@/chat/schemas/types/pattern';
|
||||
import { QuickReplyType } from '@/chat/schemas/types/quick-reply';
|
||||
import { WEB_CHANNEL_NAME } from '@/extensions/channels/web/settings';
|
||||
|
||||
import { modelInstance } from './misc';
|
||||
|
||||
@@ -391,3 +392,10 @@ export const blockCarouselMock = {
|
||||
} as unknown as BlockFull;
|
||||
|
||||
export const blocks: BlockFull[] = [blockGetStarted, blockEmpty];
|
||||
|
||||
export const mockWebChannelData: SubscriberChannelDict[typeof WEB_CHANNEL_NAME] =
|
||||
{
|
||||
isSocket: true,
|
||||
ipAddress: '1.1.1.1',
|
||||
agent: 'Chromium',
|
||||
};
|
||||
|
||||
@@ -491,7 +491,7 @@
|
||||
"original_text": "Original Text",
|
||||
"inputs": "Inputs",
|
||||
"outputs": "Outputs",
|
||||
"any": "- Any -",
|
||||
"any": "Any",
|
||||
"full_name": "First and last name",
|
||||
"password": "Password"
|
||||
},
|
||||
|
||||
@@ -492,7 +492,7 @@
|
||||
"original_text": "Texte par défaut",
|
||||
"inputs": "Ports d'entrée",
|
||||
"outputs": "Ports de sortie",
|
||||
"any": "- Toutes -",
|
||||
"any": "Toutes",
|
||||
"full_name": "Nom et Prénom",
|
||||
"password": "Mot de passe"
|
||||
},
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright © 2024 Hexastack. All rights reserved.
|
||||
* Copyright © 2025 Hexastack. All rights reserved.
|
||||
*
|
||||
* Licensed under the GNU Affero General Public License v3.0 (AGPLv3) with the following additional terms:
|
||||
* 1. The name "Hexabot" is a trademark of Hexastack. You may not use this name in derivative works without express written permission.
|
||||
@@ -17,7 +17,7 @@ import {
|
||||
Typography,
|
||||
useTheme,
|
||||
} from "@mui/material";
|
||||
import Autocomplete from "@mui/material/Autocomplete";
|
||||
import Autocomplete, { AutocompleteProps } from "@mui/material/Autocomplete";
|
||||
import { forwardRef, SyntheticEvent, useRef } from "react";
|
||||
|
||||
import { Input } from "@/app-components/inputs/Input";
|
||||
@@ -30,13 +30,25 @@ import { NlpPattern } from "@/types/block.types";
|
||||
import { INlpEntity } from "@/types/nlp-entity.types";
|
||||
import { INlpValue } from "@/types/nlp-value.types";
|
||||
|
||||
type NlpPatternSelectProps = {
|
||||
interface NlpPatternSelectProps
|
||||
extends Omit<
|
||||
AutocompleteProps<INlpEntity, true, true, false>,
|
||||
| "onChange"
|
||||
| "value"
|
||||
| "options"
|
||||
| "multiple"
|
||||
| "disabled"
|
||||
| "renderTags"
|
||||
| "renderOptions"
|
||||
| "renderInput"
|
||||
> {
|
||||
patterns: NlpPattern[];
|
||||
onChange: (patterns: NlpPattern[]) => void;
|
||||
};
|
||||
noneLabel?: string;
|
||||
}
|
||||
|
||||
const NlpPatternSelect = (
|
||||
{ patterns, onChange }: NlpPatternSelectProps,
|
||||
{ patterns, onChange, noneLabel = "", ...props }: NlpPatternSelectProps,
|
||||
ref,
|
||||
) => {
|
||||
const inputRef = useRef(null);
|
||||
@@ -80,23 +92,29 @@ const NlpPatternSelect = (
|
||||
valueId: string,
|
||||
): void => {
|
||||
const newSelection = patterns.slice(0);
|
||||
const update = newSelection.find(({ entity: e }) => e === name);
|
||||
const idx = newSelection.findIndex(({ entity: e }) => e === name);
|
||||
|
||||
if (!update) {
|
||||
if (idx === -1) {
|
||||
throw new Error("Unable to find nlp entity");
|
||||
}
|
||||
|
||||
if (valueId === id) {
|
||||
update.match = "entity";
|
||||
update.value = name;
|
||||
newSelection[idx] = {
|
||||
entity: newSelection[idx].entity,
|
||||
match: "entity",
|
||||
};
|
||||
} else {
|
||||
const value = getNlpValueFromCache(valueId);
|
||||
|
||||
if (!value) {
|
||||
throw new Error("Unable to find nlp value in cache");
|
||||
}
|
||||
update.match = "value";
|
||||
update.value = value.value;
|
||||
|
||||
newSelection[idx] = {
|
||||
entity: newSelection[idx].entity,
|
||||
match: "value",
|
||||
value: value.value,
|
||||
};
|
||||
}
|
||||
|
||||
onChange(newSelection);
|
||||
@@ -108,16 +126,17 @@ const NlpPatternSelect = (
|
||||
);
|
||||
}
|
||||
|
||||
const defaultValue =
|
||||
options.filter(({ name }) =>
|
||||
patterns.find(({ entity: entityName }) => entityName === name),
|
||||
) || {};
|
||||
const defaultValue = patterns
|
||||
.map(({ entity: entityName }) =>
|
||||
options.find(({ name }) => entityName === name),
|
||||
)
|
||||
.filter(Boolean) as INlpEntity[];
|
||||
|
||||
return (
|
||||
<Autocomplete
|
||||
ref={ref}
|
||||
{...props}
|
||||
size="medium"
|
||||
fullWidth={true}
|
||||
disabled={options.length === 0}
|
||||
value={defaultValue}
|
||||
multiple={true}
|
||||
@@ -172,9 +191,9 @@ const NlpPatternSelect = (
|
||||
const nlpValues = values.map((vId) =>
|
||||
getNlpValueFromCache(vId),
|
||||
) as INlpValue[];
|
||||
const selectedValue = patterns.find(
|
||||
(e) => e.entity === name,
|
||||
)?.value;
|
||||
const currentPattern = patterns.find((e) => e.entity === name);
|
||||
const selectedValue =
|
||||
currentPattern?.match === "value" ? currentPattern.value : null;
|
||||
const { id: selectedId = id } =
|
||||
nlpValues.find(({ value }) => value === selectedValue) || {};
|
||||
|
||||
@@ -193,7 +212,7 @@ const NlpPatternSelect = (
|
||||
}
|
||||
|
||||
if (option === id) {
|
||||
return t("label.any");
|
||||
return `- ${noneLabel || t("label.any")} -`;
|
||||
}
|
||||
|
||||
return option;
|
||||
|
||||
@@ -75,7 +75,7 @@ export const DataGrid = <T extends GridValidRowModel = any>({
|
||||
slots={slots}
|
||||
slotProps={{
|
||||
loadingOverlay: {
|
||||
variant: "linear-progress",
|
||||
variant: "skeleton",
|
||||
noRowsVariant: "skeleton",
|
||||
},
|
||||
}}
|
||||
|
||||
@@ -32,6 +32,7 @@ import AutoCompleteEntitySelect from "@/app-components/inputs/AutoCompleteEntity
|
||||
import FileUploadButton from "@/app-components/inputs/FileInput";
|
||||
import { FilterTextfield } from "@/app-components/inputs/FilterTextfield";
|
||||
import { Input } from "@/app-components/inputs/Input";
|
||||
import NlpPatternSelect from "@/app-components/inputs/NlpPatternSelect";
|
||||
import {
|
||||
ActionColumnLabel,
|
||||
getActionsColumn,
|
||||
@@ -51,6 +52,7 @@ import { useSearch } from "@/hooks/useSearch";
|
||||
import { useToast } from "@/hooks/useToast";
|
||||
import { useTranslate } from "@/hooks/useTranslate";
|
||||
import { EntityType, Format } from "@/services/types";
|
||||
import { NlpPattern } from "@/types/block.types";
|
||||
import { ILanguage } from "@/types/language.types";
|
||||
import {
|
||||
INlpDatasetSample,
|
||||
@@ -79,6 +81,7 @@ export default function NlpSample() {
|
||||
const queryClient = useQueryClient();
|
||||
const [type, setType] = useState<NlpSampleType | "all">("all");
|
||||
const [language, setLanguage] = useState<string | undefined>(undefined);
|
||||
const [patterns, setPatterns] = useState<NlpPattern[]>([]);
|
||||
const hasPermission = useHasPermission();
|
||||
const getNlpEntityFromCache = useGetFromCache(EntityType.NLP_ENTITY);
|
||||
const getNlpValueFromCache = useGetFromCache(EntityType.NLP_VALUE);
|
||||
@@ -86,11 +89,17 @@ export default function NlpSample() {
|
||||
EntityType.NLP_SAMPLE_ENTITY,
|
||||
);
|
||||
const getLanguageFromCache = useGetFromCache(EntityType.LANGUAGE);
|
||||
const { onSearch, searchPayload, searchText } = useSearch<INlpSample>(
|
||||
const { onSearch, searchPayload, searchText } = useSearch<
|
||||
INlpSample & { patterns: NlpPattern[] }
|
||||
>(
|
||||
{
|
||||
$eq: [
|
||||
...(type !== "all" ? [{ type }] : []),
|
||||
...(language ? [{ language }] : []),
|
||||
// We send only value match patterns
|
||||
...(patterns
|
||||
? [{ patterns: patterns.filter(({ match }) => match === "value") }]
|
||||
: []),
|
||||
],
|
||||
$iLike: ["text"],
|
||||
},
|
||||
@@ -212,6 +221,7 @@ export default function NlpSample() {
|
||||
{row.entities
|
||||
.map((e) => getSampleEntityFromCache(e) as INlpSampleEntity)
|
||||
.filter((e) => !!e)
|
||||
.sort((a, b) => String(a.entity).localeCompare(String(b.entity)))
|
||||
.map((entity) => (
|
||||
<ChipEntity
|
||||
key={entity.id}
|
||||
@@ -425,6 +435,21 @@ export default function NlpSample() {
|
||||
</Button>
|
||||
</ButtonGroup>
|
||||
</Grid>
|
||||
<Grid
|
||||
container
|
||||
display="flex"
|
||||
flexDirection="row"
|
||||
gap={2}
|
||||
direction="row"
|
||||
mt={2}
|
||||
>
|
||||
<NlpPatternSelect
|
||||
patterns={patterns}
|
||||
onChange={setPatterns}
|
||||
fullWidth={true}
|
||||
noneLabel={t("label.select")}
|
||||
/>
|
||||
</Grid>
|
||||
</Grid>
|
||||
|
||||
<Grid mt={3}>
|
||||
|
||||
@@ -103,6 +103,7 @@ const PatternInput: FC<PatternInputProps> = ({
|
||||
<NlpPatternSelect
|
||||
patterns={pattern as NlpPattern[]}
|
||||
onChange={setPattern}
|
||||
fullWidth={true}
|
||||
/>
|
||||
)}
|
||||
{["payload", "content", "menu"].includes(patternType) ? (
|
||||
|
||||
@@ -56,24 +56,27 @@ export const useFind = <
|
||||
entity,
|
||||
);
|
||||
const getFromCache = useGetFromCache(entity);
|
||||
const { data: total } = useCount(entity, params["where"], {
|
||||
const countQuery = useCount(entity, params["where"], {
|
||||
enabled: hasCount,
|
||||
});
|
||||
const { dataGridPaginationProps, pageQueryPayload } = usePagination(
|
||||
total?.count,
|
||||
countQuery.data?.count,
|
||||
initialPaginationState,
|
||||
initialSortState,
|
||||
hasCount,
|
||||
);
|
||||
const normalizedParams = { ...pageQueryPayload, ...(params || {}) };
|
||||
const enabled = !!total || !hasCount;
|
||||
const enabled = !!countQuery.data || !hasCount;
|
||||
const { data: ids, ...normalizedQuery } = useQuery({
|
||||
enabled,
|
||||
queryFn: async () => {
|
||||
const data = await api.find(
|
||||
normalizedParams,
|
||||
format === Format.FULL && (POPULATE_BY_TYPE[entity] as P),
|
||||
);
|
||||
const data =
|
||||
!hasCount || (hasCount && !!countQuery.data?.count)
|
||||
? await api.find(
|
||||
normalizedParams,
|
||||
format === Format.FULL && (POPULATE_BY_TYPE[entity] as P),
|
||||
)
|
||||
: [];
|
||||
const { result } = normalizeAndCache(data);
|
||||
|
||||
return result;
|
||||
@@ -100,7 +103,11 @@ export const useFind = <
|
||||
dataGridProps: {
|
||||
...dataGridPaginationProps,
|
||||
rows: data || [],
|
||||
loading: normalizedQuery.isLoading || normalizedQuery.isFetching,
|
||||
loading:
|
||||
normalizedQuery.isLoading ||
|
||||
normalizedQuery.isFetching ||
|
||||
countQuery.isLoading ||
|
||||
countQuery.isFetching,
|
||||
},
|
||||
};
|
||||
};
|
||||
|
||||
@@ -68,12 +68,19 @@ export interface PayloadPattern {
|
||||
type?: PayloadType;
|
||||
}
|
||||
|
||||
export type NlpPattern = {
|
||||
export type NlpEntityMatchPattern = {
|
||||
entity: string;
|
||||
match: "value" | "entity";
|
||||
match: "entity";
|
||||
};
|
||||
|
||||
export type NlpValueMatchPattern = {
|
||||
entity: string;
|
||||
match: "value";
|
||||
value: string;
|
||||
};
|
||||
|
||||
export type NlpPattern = NlpEntityMatchPattern | NlpValueMatchPattern;
|
||||
|
||||
export type Pattern = null | string | PayloadPattern | NlpPattern[];
|
||||
|
||||
export type PatternType =
|
||||
|
||||
Reference in New Issue
Block a user