Merge branch 'main' into 754-issue---frontend-packages-vulnerabilities-issue

This commit is contained in:
yassinedorbozgithub
2025-06-12 18:38:05 +01:00
69 changed files with 2667 additions and 620 deletions

4
api/package-lock.json generated
View File

@@ -1,12 +1,12 @@
{
"name": "hexabot",
"version": "2.2.8",
"version": "2.2.9",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "hexabot",
"version": "2.2.8",
"version": "2.2.9",
"hasInstallScript": true,
"license": "AGPL-3.0-only",
"dependencies": {

View File

@@ -1,6 +1,6 @@
{
"name": "hexabot",
"version": "2.2.8",
"version": "2.2.9",
"description": "Hexabot is a solution for creating and managing chatbots across multiple channels, leveraging AI for advanced conversational capabilities. It provides a user-friendly interface for building, training, and deploying chatbots with integrated support for various messaging platforms.",
"author": "Hexastack",
"license": "AGPL-3.0-only",

View File

@@ -1,5 +1,5 @@
/*
* Copyright © 2024 Hexastack. All rights reserved.
* Copyright © 2025 Hexastack. All rights reserved.
*
* Licensed under the GNU Affero General Public License v3.0 (AGPLv3) with the following additional terms:
* 1. The name "Hexabot" is a trademark of Hexastack. You may not use this name in derivative works without express written permission.
@@ -22,6 +22,7 @@ export enum BotStatsType {
new_conversations = 'new_conversations',
returning_users = 'returning_users',
retention = 'retention',
echo = 'echo',
}
export type ToLinesType = {

View File

@@ -1,19 +1,16 @@
/*
* Copyright © 2024 Hexastack. All rights reserved.
* Copyright © 2025 Hexastack. All rights reserved.
*
* Licensed under the GNU Affero General Public License v3.0 (AGPLv3) with the following additional terms:
* 1. The name "Hexabot" is a trademark of Hexastack. You may not use this name in derivative works without express written permission.
* 2. All derivative works must include clear attribution to the original creator and software, Hexastack and Hexabot, in a prominent location (e.g., in the software's "About" section, documentation, and README file).
*/
import { SettingCreateDto } from '@/setting/dto/setting.dto';
import { ExtensionSetting } from '@/setting/schemas/types';
import { HyphenToUnderscore } from '@/utils/types/extension';
export type ChannelName = `${string}-channel`;
export type ChannelSetting<N extends string = string> = Omit<
SettingCreateDto,
'group' | 'weight'
> & {
export type ChannelSetting<N extends string = string> = ExtensionSetting<{
group: HyphenToUnderscore<N>;
};
}>;

View File

@@ -0,0 +1,17 @@
/*
* Copyright © 2025 Hexastack. All rights reserved.
*
* Licensed under the GNU Affero General Public License v3.0 (AGPLv3) with the following additional terms:
* 1. The name "Hexabot" is a trademark of Hexastack. You may not use this name in derivative works without express written permission.
* 2. All derivative works must include clear attribution to the original creator and software, Hexastack and Hexabot, in a prominent location (e.g., in the software's "About" section, documentation, and README file).
*/
import { FallbackOptions } from '../schemas/types/options';
export function getDefaultFallbackOptions(): FallbackOptions {
return {
active: false,
max_attempts: 0,
message: [],
};
}

View File

@@ -0,0 +1,28 @@
/*
* Copyright © 2025 Hexastack. All rights reserved.
*
* Licensed under the GNU Affero General Public License v3.0 (AGPLv3) with the following additional terms:
* 1. The name "Hexabot" is a trademark of Hexastack. You may not use this name in derivative works without express written permission.
* 2. All derivative works must include clear attribution to the original creator and software, Hexastack and Hexabot, in a prominent location (e.g., in the software's "About" section, documentation, and README file).
*/
import { Subscriber } from '../schemas/subscriber.schema';
import { Context } from '../schemas/types/context';
export function getDefaultConversationContext(): Context {
return {
vars: {}, // Used for capturing vars from user entries
user: {
first_name: '',
last_name: '',
// @TODO: Typing is not correct
} as Subscriber,
user_location: {
// Used for capturing geolocation from QR
lat: 0.0,
lon: 0.0,
},
skip: {}, // Used for list pagination
attempt: 0, // Used to track fallback max attempts
};
}

View File

@@ -139,7 +139,7 @@ export class SubscriberRepository extends BaseRepository<
* @returns The found subscriber entity with populated fields.
*/
async findOneByForeignIdAndPopulate(id: string): Promise<SubscriberFull> {
const query = this.findByForeignIdQuery(id).populate(this.populate);
const query = this.findByForeignIdQuery(id).populate(this.populatePaths);
const [result] = await this.execute(query, SubscriberFull);
return result;
}

View File

@@ -17,27 +17,12 @@ import {
THydratedDocument,
} from '@/utils/types/filter.types';
import { getDefaultConversationContext } from '../constants/conversation';
import { Block } from './block.schema';
import { Subscriber } from './subscriber.schema';
import { Context } from './types/context';
export function getDefaultConversationContext(): Context {
return {
vars: {}, // Used for capturing vars from user entries
user: {
first_name: '',
last_name: '',
} as Subscriber,
user_location: {
// Used for capturing geolocation from QR
lat: 0.0,
lon: 0.0,
},
skip: {}, // Used for list pagination
attempt: 0, // Used to track fallback max attempts
};
}
@Schema({ timestamps: true, minimize: false })
class ConversationStub extends BaseSchema {
@Prop({

View File

@@ -29,16 +29,18 @@ export const contentOptionsSchema = z.object({
export type ContentOptions = z.infer<typeof contentOptionsSchema>;
export const fallbackOptionsSchema = z.object({
active: z.boolean(),
message: z.array(z.string()),
max_attempts: z.number().finite(),
});
export type FallbackOptions = z.infer<typeof fallbackOptionsSchema>;
export const BlockOptionsSchema = z.object({
typing: z.number().optional(),
content: contentOptionsSchema.optional(),
fallback: z
.object({
active: z.boolean(),
message: z.array(z.string()),
max_attempts: z.number().finite(),
})
.optional(),
fallback: fallbackOptionsSchema.optional(),
assignTo: z.string().optional(),
effects: z.array(z.string()).optional(),
});

View File

@@ -18,19 +18,27 @@ export const payloadPatternSchema = z.object({
export type PayloadPattern = z.infer<typeof payloadPatternSchema>;
export const nlpEntityMatchPatternSchema = z.object({
entity: z.string(),
match: z.literal('entity'),
});
export type NlpEntityMatchPattern = z.infer<typeof nlpEntityMatchPatternSchema>;
export const nlpValueMatchPatternSchema = z.object({
entity: z.string(),
match: z.literal('value'),
value: z.string(),
});
export type NlpValueMatchPattern = z.infer<typeof nlpValueMatchPatternSchema>;
export const nlpPatternSchema = z.discriminatedUnion('match', [
z.object({
entity: z.string(),
match: z.literal('entity'),
}),
z.object({
entity: z.string(),
match: z.literal('value'),
value: z.string(),
}),
nlpEntityMatchPatternSchema,
nlpValueMatchPatternSchema,
]);
export type NlpPattern = z.infer<typeof nlpPatternSchema>;
export type NlpPattern = NlpEntityMatchPattern | NlpValueMatchPattern;
export const stringRegexPatternSchema = z.string().refine(
(value) => {

View File

@@ -65,6 +65,7 @@ import {
mockNlpGreetingNamePatterns,
mockNlpGreetingPatterns,
mockNlpGreetingWrongNamePatterns,
mockWebChannelData,
} from '@/utils/test/mocks/block';
import {
contextBlankInstance,
@@ -288,11 +289,7 @@ describe('BlockService', () => {
text: 'Hello',
},
},
{
isSocket: true,
ipAddress: '1.1.1.1',
agent: 'Chromium',
},
mockWebChannelData,
);
const webEventGetStarted = new WebEventWrapper(
handlerMock,
@@ -303,11 +300,18 @@ describe('BlockService', () => {
payload: 'GET_STARTED',
},
},
mockWebChannelData,
);
const webEventAmbiguous = new WebEventWrapper(
handlerMock,
{
isSocket: true,
ipAddress: '1.1.1.1',
agent: 'Chromium',
type: Web.IncomingMessageType.text,
data: {
text: "It's not a yes or no answer!",
},
},
mockWebChannelData,
);
it('should return undefined when no blocks are provided', async () => {
@@ -332,6 +336,24 @@ describe('BlockService', () => {
expect(result).toEqual(blockGetStarted);
});
it('should return undefined when multiple matches are not allowed', async () => {
const result = await blockService.match(
[
{
...blockEmpty,
patterns: ['/yes/'],
},
{
...blockEmpty,
patterns: ['/no/'],
},
],
webEventAmbiguous,
false,
);
expect(result).toEqual(undefined);
});
it('should match block with payload', async () => {
webEventGetStarted.setSender(subscriberWithLabels);
const result = await blockService.match(blocks, webEventGetStarted);

View File

@@ -12,7 +12,6 @@ import { OnEvent } from '@nestjs/event-emitter';
import EventWrapper from '@/channel/lib/EventWrapper';
import { ChannelName } from '@/channel/types';
import { ContentService } from '@/cms/services/content.service';
import { CONSOLE_CHANNEL_NAME } from '@/extensions/channels/console/settings';
import { NLU } from '@/helper/types';
import { I18nService } from '@/i18n/services/i18n.service';
import { LanguageService } from '@/i18n/services/language.service';
@@ -24,6 +23,7 @@ import { FALLBACK_DEFAULT_NLU_PENALTY_FACTOR } from '@/utils/constants/nlp';
import { BaseService } from '@/utils/generics/base-service';
import { getRandomElement } from '@/utils/helpers/safeRandom';
import { getDefaultFallbackOptions } from '../constants/block';
import { BlockDto } from '../dto/block.dto';
import { EnvelopeFactory } from '../helpers/envelope-factory';
import { BlockRepository } from '../repositories/block.repository';
@@ -41,6 +41,7 @@ import {
StdOutgoingEnvelope,
StdOutgoingSystemEnvelope,
} from '../schemas/types/message';
import { FallbackOptions } from '../schemas/types/options';
import { NlpPattern, PayloadPattern } from '../schemas/types/pattern';
import { Payload } from '../schemas/types/quick-reply';
import { SubscriberContext } from '../schemas/types/subscriberContext';
@@ -65,68 +66,66 @@ export class BlockService extends BaseService<
}
/**
* Filters an array of blocks based on the specified channel.
* Checks if block is supported on the specified channel.
*
* This function ensures that only blocks that are either:
* - Not restricted to specific trigger channels (`trigger_channels` is undefined or empty), or
* - Explicitly allow the given channel (or the console channel)
*
* are included in the returned array.
*
* @param blocks - The list of blocks to be filtered.
* @param block - The block
* @param channel - The name of the channel to filter blocks by.
*
* @returns The filtered array of blocks that are allowed for the given channel.
* @returns Whether the block is supported on the given channel.
*/
filterBlocksByChannel<B extends Block | BlockFull>(
blocks: B[],
isChannelSupported<B extends Block | BlockFull>(
block: B,
channel: ChannelName,
) {
return blocks.filter((b) => {
return (
!b.trigger_channels ||
b.trigger_channels.length === 0 ||
[...b.trigger_channels, CONSOLE_CHANNEL_NAME].includes(channel)
);
});
return (
!block.trigger_channels ||
block.trigger_channels.length === 0 ||
block.trigger_channels.includes(channel)
);
}
/**
* Filters an array of blocks based on subscriber labels.
* Checks if the block matches the subscriber labels, allowing for two scenarios:
* - Has no trigger labels (making it applicable to all subscribers), or
* - Contains at least one trigger label that matches a label from the provided list.
*
* This function selects blocks that either:
* - Have no trigger labels (making them applicable to all subscribers), or
* - Contain at least one trigger label that matches a label from the provided list.
*
* The filtered blocks are then **sorted** in descending order by the number of trigger labels,
* ensuring that blocks with more specific targeting (more trigger labels) are prioritized.
*
* @param blocks - The list of blocks to be filtered.
* @param block - The block to check.
* @param labels - The list of subscriber labels to match against.
* @returns The filtered and sorted list of blocks.
* @returns True if the block matches the subscriber labels, false otherwise.
*/
filterBlocksBySubscriberLabels<B extends Block | BlockFull>(
blocks: B[],
profile?: Subscriber,
matchesSubscriberLabels<B extends Block | BlockFull>(
block: B,
subscriber?: Subscriber,
) {
if (!profile) {
return blocks;
if (!subscriber || !subscriber.labels) {
return true; // No subscriber or labels to match against
}
return (
blocks
.filter((b) => {
const triggerLabels = b.trigger_labels.map((l) =>
typeof l === 'string' ? l : l.id,
);
return (
triggerLabels.length === 0 ||
triggerLabels.some((l) => profile.labels.includes(l))
);
})
// Priority goes to block who target users with labels
.sort((a, b) => b.trigger_labels.length - a.trigger_labels.length)
const triggerLabels = block.trigger_labels.map((l: string | Label) =>
typeof l === 'string' ? l : l.id,
);
return (
triggerLabels.length === 0 ||
triggerLabels.some((l) => subscriber.labels.includes(l))
);
}
/**
* Retrieves the configured NLU penalty factor from settings, or falls back to a default value.
*
* @returns The NLU penalty factor as a number.
*/
private async getPenaltyFactor(): Promise<number> {
const settings = await this.settingService.getSettings();
const configured = settings.chatbot_settings?.default_nlu_penalty_factor;
if (configured == null) {
this.logger.warn(
'Using fallback NLU penalty factor value: %s',
FALLBACK_DEFAULT_NLU_PENALTY_FACTOR,
);
}
return configured ?? FALLBACK_DEFAULT_NLU_PENALTY_FACTOR;
}
/**
@@ -134,75 +133,88 @@ export class BlockService extends BaseService<
*
* @param filteredBlocks blocks Starting/Next blocks in the conversation flow
* @param event Received channel's message
* @param canHaveMultipleMatches Whether to allow multiple matches for the same event
* (eg. Yes/No question to which the answer is ambiguous "Sometimes yes, sometimes no")
*
* @returns The block that matches
*/
async match(
blocks: BlockFull[],
event: EventWrapper<any, any>,
canHaveMultipleMatches = true,
): Promise<BlockFull | undefined> {
if (!blocks.length) {
return undefined;
}
// Search for block matching a given event
let block: BlockFull | undefined = undefined;
const payload = event.getPayload();
// Narrow the search space
const channelName = event.getHandler().getName();
const sender = event.getSender();
const candidates = blocks.filter(
(b) =>
this.isChannelSupported(b, channelName) &&
this.matchesSubscriberLabels(b, sender),
);
// Perform a filter to get the candidates blocks
const filteredBlocks = this.filterBlocksBySubscriberLabels(
this.filterBlocksByChannel(blocks, event.getHandler().getName()),
event.getSender(),
if (!candidates.length) {
return undefined;
}
// Priority goes to block who target users with labels
const prioritizedCandidates = candidates.sort(
(a, b) => b.trigger_labels.length - a.trigger_labels.length,
);
// Perform a payload match & pick last createdAt
const payload = event.getPayload();
if (payload) {
block = filteredBlocks
.filter((b) => {
return this.matchPayload(payload, b);
})
.shift();
}
if (!block) {
// Perform a text match (Text or Quick reply)
const text = event.getText().trim();
// Perform a text pattern match
block = filteredBlocks
.filter((b) => {
return this.matchText(text, b);
})
.shift();
// Perform an NLP Match
const nlp = event.getNLP();
if (!block && nlp) {
const scoredEntities =
await this.nlpService.computePredictionScore(nlp);
const settings = await this.settingService.getSettings();
let penaltyFactor =
settings.chatbot_settings?.default_nlu_penalty_factor;
if (!penaltyFactor) {
this.logger.warn(
'Using fallback NLU penalty factor value: %s',
FALLBACK_DEFAULT_NLU_PENALTY_FACTOR,
);
penaltyFactor = FALLBACK_DEFAULT_NLU_PENALTY_FACTOR;
}
if (scoredEntities.entities.length > 0) {
block = this.matchBestNLP(
filteredBlocks,
scoredEntities,
penaltyFactor,
);
}
const payloadMatches = prioritizedCandidates.filter((b) => {
return this.matchPayload(payload, b);
});
if (payloadMatches.length > 1 && !canHaveMultipleMatches) {
// If the payload matches multiple blocks ,
// we return undefined so that we trigger the local fallback
return undefined;
} else if (payloadMatches.length > 0) {
// If we have a payload match, we return the first one
// (which is the most recent one due to the sort)
// and we don't check for text or NLP matches
return payloadMatches[0];
}
}
return block;
// Perform a text match (Text or Quick reply)
const text = event.getText().trim();
if (text) {
const textMatches = prioritizedCandidates.filter((b) => {
return this.matchText(text, b);
});
if (textMatches.length > 1 && !canHaveMultipleMatches) {
// If the text matches multiple blocks (especially regex),
// we return undefined so that we trigger the local fallback
return undefined;
} else if (textMatches.length > 0) {
return textMatches[0];
}
}
// Perform an NLP Match
const nlp = event.getNLP();
if (nlp) {
const scoredEntities = await this.nlpService.computePredictionScore(nlp);
if (scoredEntities.entities.length) {
const penaltyFactor = await this.getPenaltyFactor();
return this.matchBestNLP(
prioritizedCandidates,
scoredEntities,
penaltyFactor,
);
}
}
return undefined;
}
/**
@@ -501,11 +513,19 @@ export class BlockService extends BaseService<
envelope: StdOutgoingSystemEnvelope,
) {
// Perform a filter to get the candidates blocks
const filteredBlocks = this.filterBlocksBySubscriberLabels(
this.filterBlocksByChannel(blocks, event.getHandler().getName()),
event.getSender(),
const handlerName = event.getHandler().getName();
const sender = event.getSender();
const candidates = blocks.filter(
(b) =>
this.isChannelSupported(b, handlerName) &&
this.matchesSubscriberLabels(b, sender),
);
return filteredBlocks.find((b) => {
if (!candidates.length) {
return undefined;
}
return candidates.find((b) => {
return b.patterns
.filter(
(p) => typeof p === 'object' && 'type' in p && p.type === 'outcome',
@@ -757,6 +777,16 @@ export class BlockService extends BaseService<
throw new Error('Invalid message format.');
}
/**
* Retrieves the fallback options for a block.
*
* @param block - The block to retrieve fallback options from.
* @returns The fallback options for the block, or default options if not specified.
*/
getFallbackOptions<T extends BlockStub>(block: T): FallbackOptions {
return block.options?.fallback ?? getDefaultFallbackOptions();
}
/**
* Updates the `trigger_labels` and `assign_labels` fields of a block when a label is deleted.
*

View File

@@ -51,6 +51,13 @@ import { SettingService } from '@/setting/services/setting.service';
import { installBlockFixtures } from '@/utils/test/fixtures/block';
import { installContentFixtures } from '@/utils/test/fixtures/content';
import { installSubscriberFixtures } from '@/utils/test/fixtures/subscriber';
import {
buttonsBlock,
mockWebChannelData,
quickRepliesBlock,
textBlock,
} from '@/utils/test/mocks/block';
import { conversationGetStarted } from '@/utils/test/mocks/conversation';
import {
closeInMongodConnection,
rootMongooseTestModule,
@@ -88,6 +95,7 @@ import { SubscriberService } from './subscriber.service';
describe('BotService', () => {
let blockService: BlockService;
let subscriberService: SubscriberService;
let conversationService: ConversationService;
let botService: BotService;
let handler: WebChannelHandler;
let eventEmitter: EventEmitter2;
@@ -192,163 +200,543 @@ describe('BotService', () => {
},
],
});
[subscriberService, botService, blockService, eventEmitter, handler] =
await getMocks([
SubscriberService,
BotService,
BlockService,
EventEmitter2,
WebChannelHandler,
]);
[
subscriberService,
conversationService,
botService,
blockService,
eventEmitter,
handler,
] = await getMocks([
SubscriberService,
ConversationService,
BotService,
BlockService,
EventEmitter2,
WebChannelHandler,
]);
});
afterEach(jest.clearAllMocks);
afterAll(closeInMongodConnection);
it('should start a conversation', async () => {
const triggeredEvents: any[] = [];
eventEmitter.on('hook:stats:entry', (...args) => {
triggeredEvents.push(args);
describe('startConversation', () => {
afterAll(() => {
jest.restoreAllMocks();
});
const event = new WebEventWrapper(handler, webEventText, {
isSocket: false,
ipAddress: '1.1.1.1',
agent: 'Chromium',
});
it('should start a conversation', async () => {
const triggeredEvents: any[] = [];
const [block] = await blockService.findAndPopulate({ patterns: ['Hi'] });
const webSubscriber = (await subscriberService.findOne({
foreign_id: 'foreign-id-web-1',
}))!;
eventEmitter.on('hook:stats:entry', (...args) => {
triggeredEvents.push(args);
});
event.setSender(webSubscriber);
let hasBotSpoken = false;
const clearMock = jest
.spyOn(botService, 'triggerBlock')
.mockImplementation(
(
actualEvent: WebEventWrapper<typeof WEB_CHANNEL_NAME>,
actualConversation: Conversation,
actualBlock: BlockFull,
isFallback: boolean,
) => {
expect(actualConversation).toEqualPayload({
sender: webSubscriber.id,
active: true,
next: [],
context: {
user: {
first_name: webSubscriber.first_name,
last_name: webSubscriber.last_name,
language: 'en',
id: webSubscriber.id,
},
user_location: {
lat: 0,
lon: 0,
},
skip: {},
vars: {},
nlp: null,
payload: null,
attempt: 0,
channel: 'web-channel',
text: webEventText.data.text,
},
});
expect(actualEvent).toEqual(event);
expect(actualBlock).toEqual(block);
expect(isFallback).toEqual(false);
hasBotSpoken = true;
},
const event = new WebEventWrapper(
handler,
webEventText,
mockWebChannelData,
);
await botService.startConversation(event, block);
expect(hasBotSpoken).toEqual(true);
expect(triggeredEvents).toEqual([
['popular', 'hasNextBlocks'],
['new_conversations', 'New conversations'],
]);
clearMock.mockClear();
});
const [block] = await blockService.findAndPopulate({ patterns: ['Hi'] });
const webSubscriber = (await subscriberService.findOne({
foreign_id: 'foreign-id-web-1',
}))!;
it('should capture a conversation', async () => {
const triggeredEvents: any[] = [];
event.setSender(webSubscriber);
eventEmitter.on('hook:stats:entry', (...args) => {
triggeredEvents.push(args);
});
const event = new WebEventWrapper(handler, webEventText, {
isSocket: false,
ipAddress: '1.1.1.1',
agent: 'Chromium',
});
const webSubscriber = (await subscriberService.findOne({
foreign_id: 'foreign-id-web-1',
}))!;
event.setSender(webSubscriber);
const clearMock = jest
.spyOn(botService, 'handleIncomingMessage')
.mockImplementation(
async (
actualConversation: ConversationFull,
event: WebEventWrapper<typeof WEB_CHANNEL_NAME>,
) => {
expect(actualConversation).toEqualPayload({
next: [],
sender: webSubscriber,
active: true,
context: {
user: {
first_name: webSubscriber.first_name,
last_name: webSubscriber.last_name,
language: 'en',
id: webSubscriber.id,
let hasBotSpoken = false;
const clearMock = jest
.spyOn(botService, 'triggerBlock')
.mockImplementation(
(
actualEvent: WebEventWrapper<typeof WEB_CHANNEL_NAME>,
actualConversation: Conversation,
actualBlock: BlockFull,
isFallback: boolean,
) => {
expect(actualConversation).toEqualPayload({
sender: webSubscriber.id,
active: true,
next: [],
context: {
user: {
first_name: webSubscriber.first_name,
last_name: webSubscriber.last_name,
language: 'en',
id: webSubscriber.id,
},
user_location: {
lat: 0,
lon: 0,
},
skip: {},
vars: {},
nlp: null,
payload: null,
attempt: 0,
channel: 'web-channel',
text: webEventText.data.text,
},
user_location: { lat: 0, lon: 0 },
vars: {},
skip: {},
nlp: null,
payload: null,
attempt: 0,
channel: 'web-channel',
text: webEventText.data.text,
},
});
expect(event).toEqual(event);
return true;
},
);
const captured = await botService.processConversationMessage(event);
expect(captured).toBe(true);
expect(triggeredEvents).toEqual([
['existing_conversations', 'Existing conversations'],
]);
clearMock.mockClear();
});
expect(actualEvent).toEqual(event);
expect(actualBlock).toEqual(block);
expect(isFallback).toEqual(false);
hasBotSpoken = true;
},
);
await botService.startConversation(event, block);
expect(hasBotSpoken).toEqual(true);
expect(triggeredEvents).toEqual([
['popular', 'hasNextBlocks'],
['new_conversations', 'New conversations'],
]);
clearMock.mockClear();
});
});
it('has no active conversation', async () => {
const triggeredEvents: any[] = [];
eventEmitter.on('hook:stats:entry', (...args) => {
triggeredEvents.push(args);
describe('processConversationMessage', () => {
afterAll(() => {
jest.restoreAllMocks();
});
const event = new WebEventWrapper(handler, webEventText, {
isSocket: false,
ipAddress: '1.1.1.1',
agent: 'Chromium',
});
const webSubscriber = (await subscriberService.findOne({
foreign_id: 'foreign-id-web-2',
}))!;
event.setSender(webSubscriber);
const captured = await botService.processConversationMessage(event);
expect(captured).toBe(false);
expect(triggeredEvents).toEqual([]);
it('has no active conversation', async () => {
const triggeredEvents: any[] = [];
eventEmitter.on('hook:stats:entry', (...args) => {
triggeredEvents.push(args);
});
const event = new WebEventWrapper(
handler,
webEventText,
mockWebChannelData,
);
const webSubscriber = (await subscriberService.findOne({
foreign_id: 'foreign-id-web-2',
}))!;
event.setSender(webSubscriber);
const captured = await botService.processConversationMessage(event);
expect(captured).toBe(false);
expect(triggeredEvents).toEqual([]);
});
it('should capture a conversation', async () => {
const triggeredEvents: any[] = [];
eventEmitter.on('hook:stats:entry', (...args) => {
triggeredEvents.push(args);
});
const event = new WebEventWrapper(
handler,
webEventText,
mockWebChannelData,
);
const webSubscriber = (await subscriberService.findOne({
foreign_id: 'foreign-id-web-1',
}))!;
event.setSender(webSubscriber);
jest
.spyOn(botService, 'handleOngoingConversationMessage')
.mockImplementation(() => Promise.resolve(true));
const captured = await botService.processConversationMessage(event);
expect(captured).toBe(true);
expect(triggeredEvents).toEqual([
['existing_conversations', 'Existing conversations'],
]);
});
});
describe('proceedToNextBlock', () => {
const mockEvent = new WebEventWrapper(
handler,
webEventText,
mockWebChannelData,
);
afterAll(() => {
jest.restoreAllMocks();
});
it('should emit stats and call triggerBlock, returning true on success and reset attempt if not fallback', async () => {
const mockConvo = {
...conversationGetStarted,
id: 'convo1',
context: { attempt: 2 },
next: [],
sender: 'user1',
active: true,
} as unknown as ConversationFull;
const next = { id: 'block1', name: 'Block 1' } as BlockFull;
const fallback = false;
jest
.spyOn(conversationService, 'storeContextData')
.mockImplementation(() => {
return Promise.resolve(mockConvo as unknown as Conversation);
});
jest.spyOn(botService, 'triggerBlock').mockResolvedValue(undefined);
const emitSpy = jest.spyOn(eventEmitter, 'emit');
const result = await botService.proceedToNextBlock(
mockConvo,
next,
mockEvent,
fallback,
);
expect(emitSpy).toHaveBeenCalledWith(
'hook:stats:entry',
'popular',
next.name,
);
expect(botService.triggerBlock).toHaveBeenCalledWith(
mockEvent,
expect.objectContaining({ id: 'convo1' }),
next,
fallback,
);
expect(result).toBe(true);
expect(mockConvo.context.attempt).toBe(0);
});
it('should increment attempt if fallback is true', async () => {
const mockConvo = {
...conversationGetStarted,
id: 'convo2',
context: { attempt: 1 },
next: [],
sender: 'user2',
active: true,
} as unknown as ConversationFull;
const next = { id: 'block2', name: 'Block 2' } as any;
const fallback = true;
const result = await botService.proceedToNextBlock(
mockConvo,
next,
mockEvent,
fallback,
);
expect(mockConvo.context.attempt).toBe(2);
expect(result).toBe(true);
});
it('should handle errors and emit conversation:end, returning false', async () => {
const mockConvo = {
...conversationGetStarted,
id: 'convo3',
context: { attempt: 1 },
next: [],
sender: 'user3',
active: true,
} as unknown as ConversationFull;
const next = { id: 'block3', name: 'Block 3' } as any;
const fallback = false;
jest
.spyOn(conversationService, 'storeContextData')
.mockRejectedValue(new Error('fail'));
const emitSpy = jest.spyOn(eventEmitter, 'emit');
const result = await botService.proceedToNextBlock(
mockConvo,
next,
mockEvent,
fallback,
);
expect(emitSpy).toHaveBeenCalledWith('hook:conversation:end', mockConvo);
expect(result).toBe(false);
});
});
describe('handleOngoingConversationMessage', () => {
const mockConvo = {
...conversationGetStarted,
id: 'convo1',
context: { ...conversationGetStarted.context, attempt: 0 },
next: [{ id: 'block1' }],
current: {
...conversationGetStarted.current,
id: 'block0',
options: {
...conversationGetStarted.current.options,
fallback: {
active: true,
max_attempts: 2,
message: [],
},
},
},
} as unknown as ConversationFull;
const mockEvent = new WebEventWrapper(
handler,
webEventText,
mockWebChannelData,
);
beforeAll(() => {
jest.clearAllMocks();
});
afterAll(() => {
jest.clearAllMocks();
});
it('should proceed to the matched next block', async () => {
const matchedBlock = {
...textBlock,
id: 'block1',
name: 'Block 1',
} as BlockFull;
jest
.spyOn(blockService, 'findAndPopulate')
.mockResolvedValue([matchedBlock]);
jest.spyOn(blockService, 'match').mockResolvedValue(matchedBlock);
jest.spyOn(botService, 'proceedToNextBlock').mockResolvedValue(true);
const result = await botService.handleOngoingConversationMessage(
mockConvo,
mockEvent,
);
expect(blockService.findAndPopulate).toHaveBeenCalled();
expect(blockService.match).toHaveBeenCalled();
expect(botService.proceedToNextBlock).toHaveBeenCalled();
expect(result).toBe(true);
});
it('should proceed to fallback block if no match and fallback is allowed', async () => {
jest.spyOn(blockService, 'findAndPopulate').mockResolvedValue([]);
jest.spyOn(blockService, 'match').mockResolvedValue(undefined);
const proceedSpy = jest
.spyOn(botService, 'proceedToNextBlock')
.mockResolvedValue(true);
const result = await botService.handleOngoingConversationMessage(
mockConvo,
mockEvent,
);
expect(proceedSpy).toHaveBeenCalledWith(
mockConvo,
expect.objectContaining({ id: 'block0', nextBlocks: mockConvo.next }),
mockEvent,
true,
);
expect(result).toBe(true);
});
it('should end conversation and return false if no match and fallback not allowed', async () => {
const mockConvoWithoutFallback = {
...mockConvo,
current: {
...mockConvo.current,
options: {
...mockConvo.current.options,
fallback: {
active: false,
max_attempts: 2,
message: [],
},
},
},
} as unknown as ConversationFull;
jest.spyOn(blockService, 'findAndPopulate').mockResolvedValue([]);
jest.spyOn(blockService, 'match').mockResolvedValue(undefined);
const emitSpy = jest.spyOn(eventEmitter, 'emit');
const result = await botService.handleOngoingConversationMessage(
mockConvoWithoutFallback,
mockEvent,
);
expect(emitSpy).toHaveBeenCalledWith(
'hook:conversation:end',
mockConvoWithoutFallback,
);
expect(result).toBe(false);
});
it('should end conversation and throw if an error occurs', async () => {
jest
.spyOn(blockService, 'findAndPopulate')
.mockRejectedValue(new Error('fail'));
const emitSpy = jest.spyOn(eventEmitter, 'emit');
await expect(
botService.handleOngoingConversationMessage(mockConvo, mockEvent),
).rejects.toThrow('fail');
expect(emitSpy).toHaveBeenCalledWith('hook:conversation:end', mockConvo);
});
});
describe('shouldAttemptLocalFallback', () => {
const mockEvent = new WebEventWrapper(
handler,
webEventText,
mockWebChannelData,
);
beforeAll(() => {
jest.resetAllMocks();
});
afterAll(() => {
jest.resetAllMocks();
});
it('should return true when fallback is active and max attempts not reached', () => {
const result = botService.shouldAttemptLocalFallback(
{
...conversationGetStarted,
context: { ...conversationGetStarted.context, attempt: 1 },
current: {
...conversationGetStarted.current,
options: {
fallback: {
active: true,
max_attempts: 3,
message: ['Please pick an option.'],
},
},
},
},
mockEvent,
);
expect(result).toBe(true);
});
it('should return false when fallback is not active', () => {
const result = botService.shouldAttemptLocalFallback(
{
...conversationGetStarted,
context: { ...conversationGetStarted.context, attempt: 1 },
current: {
...conversationGetStarted.current,
options: {
fallback: {
active: false,
max_attempts: 0,
message: [],
},
},
},
},
mockEvent,
);
expect(result).toBe(false);
});
it('should return false when max attempts reached', () => {
const result = botService.shouldAttemptLocalFallback(
{
...conversationGetStarted,
context: { ...conversationGetStarted.context, attempt: 3 },
current: {
...conversationGetStarted.current,
options: {
fallback: {
active: true,
max_attempts: 3,
message: ['Please pick an option.'],
},
},
},
},
mockEvent,
);
expect(result).toBe(false);
});
it('should return false when fallback options are missing', () => {
const result = botService.shouldAttemptLocalFallback(
{
...conversationGetStarted,
current: {
...conversationGetStarted.current,
options: {},
},
},
mockEvent,
);
expect(result).toBe(false);
});
});
describe('findNextMatchingBlock', () => {
const mockEvent = new WebEventWrapper(
handler,
webEventText,
mockWebChannelData,
);
beforeAll(() => {
jest.resetAllMocks();
});
afterAll(() => {
jest.resetAllMocks();
});
it('should return a matching block if one is found and fallback is not active', async () => {
jest.spyOn(blockService, 'match').mockResolvedValue(buttonsBlock);
const result = await botService.findNextMatchingBlock(
{
...conversationGetStarted,
current: {
...conversationGetStarted.current,
options: {
fallback: {
active: false,
message: [],
max_attempts: 0,
},
},
},
next: [quickRepliesBlock, buttonsBlock].map((b) => ({
...b,
trigger_labels: b.trigger_labels.map(({ id }) => id),
assign_labels: b.assign_labels.map(({ id }) => id),
nextBlocks: [],
attachedBlock: null,
category: null,
previousBlocks: undefined,
attachedToBlock: undefined,
})),
},
mockEvent,
);
expect(result).toBe(buttonsBlock);
});
it('should return undefined if no matching block is found', async () => {
jest.spyOn(blockService, 'match').mockResolvedValue(undefined);
const result = await botService.findNextMatchingBlock(
{
...conversationGetStarted,
current: {
...conversationGetStarted.current,
options: {
fallback: {
active: true,
message: ['Please pick an option.'],
max_attempts: 1,
},
},
},
},
mockEvent,
);
expect(result).toBeUndefined();
});
});
});

View File

@@ -14,19 +14,17 @@ import EventWrapper from '@/channel/lib/EventWrapper';
import { LoggerService } from '@/logger/logger.service';
import { SettingService } from '@/setting/services/setting.service';
import { getDefaultConversationContext } from '../constants/conversation';
import { MessageCreateDto } from '../dto/message.dto';
import { BlockFull } from '../schemas/block.schema';
import {
Conversation,
ConversationFull,
getDefaultConversationContext,
} from '../schemas/conversation.schema';
import { Conversation, ConversationFull } from '../schemas/conversation.schema';
import { Context } from '../schemas/types/context';
import {
IncomingMessageType,
OutgoingMessageFormat,
StdOutgoingMessageEnvelope,
} from '../schemas/types/message';
import { BlockOptions, FallbackOptions } from '../schemas/types/options';
import { BlockService } from './block.service';
import { ConversationService } from './conversation.service';
@@ -243,6 +241,97 @@ export class BotService {
}
}
/**
* Handles advancing the conversation to the specified *next* block.
*
* 1. Updates “popular blocks” stats.
* 2. Persists the updated conversation context.
* 3. Triggers the next block.
* 4. Ends the conversation if an unrecoverable error occurs.
*/
async proceedToNextBlock(
convo: ConversationFull,
next: BlockFull,
event: EventWrapper<any, any>,
fallback: boolean,
): Promise<boolean> {
// Increment stats about popular blocks
this.eventEmitter.emit('hook:stats:entry', BotStatsType.popular, next.name);
this.logger.debug(
'Proceeding to next block ',
next.id,
' for conversation ',
convo.id,
);
try {
convo.context.attempt = fallback ? convo.context.attempt + 1 : 0;
const updatedConversation =
await this.conversationService.storeContextData(
convo,
next,
event,
// If this is a local fallback then we dont capture vars.
!fallback,
);
await this.triggerBlock(event, updatedConversation, next, fallback);
return true;
} catch (err) {
this.logger.error('Unable to proceed to the next block!', err);
this.eventEmitter.emit('hook:conversation:end', convo);
return false;
}
}
/**
* Finds the next block that matches the event criteria within the conversation's next blocks.
*
* @param convo - The current conversation object containing context and state.
* @param event - The incoming event that triggered the conversation flow.
*
* @returns A promise that resolves with the matched block or undefined if no match is found.
*/
async findNextMatchingBlock(
convo: ConversationFull,
event: EventWrapper<any, any>,
): Promise<BlockFull | undefined> {
const fallbackOptions: FallbackOptions =
this.blockService.getFallbackOptions(convo.current);
// We will avoid having multiple matches when we are not at the start of a conversation
// and only if local fallback is enabled
const canHaveMultipleMatches = !fallbackOptions?.active;
// Find the next block that matches
const nextBlocks = await this.blockService.findAndPopulate({
_id: { $in: convo.next.map(({ id }) => id) },
});
return await this.blockService.match(
nextBlocks,
event,
canHaveMultipleMatches,
);
}
/**
* Determines if a fallback should be attempted based on the event type, fallback options, and conversation context.
*
* @param convo - The current conversation object containing context and state.
* @param event - The incoming event that triggered the conversation flow.
*
* @returns A boolean indicating whether a fallback should be attempted.
*/
shouldAttemptLocalFallback(
convo: ConversationFull,
event: EventWrapper<any, any>,
): boolean {
const fallbackOptions = this.blockService.getFallbackOptions(convo.current);
return (
event.getMessageType() === IncomingMessageType.message &&
!!fallbackOptions?.active &&
convo.context.attempt < (fallbackOptions?.max_attempts ?? 0)
);
}
/**
* Processes and responds to an incoming message within an ongoing conversation flow.
* Determines the next block in the conversation, attempts to match the message with available blocks,
@@ -253,39 +342,40 @@ export class BotService {
*
* @returns A promise that resolves with a boolean indicating whether the conversation is active and a matching block was found.
*/
async handleIncomingMessage(
async handleOngoingConversationMessage(
convo: ConversationFull,
event: EventWrapper<any, any>,
) {
const nextIds = convo.next.map(({ id }) => id);
// Reload blocks in order to populate his nextBlocks
// nextBlocks & trigger/assign _labels
try {
const nextBlocks = await this.blockService.findAndPopulate({
_id: { $in: nextIds },
});
let fallback = false;
const fallbackOptions = convo.current?.options?.fallback
const currentBlock = convo.current;
const fallbackOptions: BlockOptions['fallback'] = convo.current?.options
?.fallback
? convo.current.options.fallback
: {
active: false,
max_attempts: 0,
message: [],
};
// We will avoid having multiple matches when we are not at the start of a conversation
// and only if local fallback is enabled
const canHaveMultipleMatches = !fallbackOptions.active;
// Find the next block that matches
const matchedBlock = await this.blockService.match(nextBlocks, event);
const nextBlocks = await this.blockService.findAndPopulate({
_id: { $in: convo.next.map(({ id }) => id) },
});
const matchedBlock = await this.blockService.match(
nextBlocks,
event,
canHaveMultipleMatches,
);
// If there is no match in next block then loopback (current fallback)
// This applies only to text messages + there's a max attempt to be specified
let fallbackBlock: BlockFull | undefined;
if (
!matchedBlock &&
event.getMessageType() === IncomingMessageType.message &&
fallbackOptions.active &&
convo.context.attempt < fallbackOptions.max_attempts
) {
let fallbackBlock: BlockFull | undefined = undefined;
if (!matchedBlock && this.shouldAttemptLocalFallback(convo, event)) {
// Trigger block fallback
// NOTE : current is not populated, this may cause some anomaly
const currentBlock = convo.current;
fallbackBlock = {
...currentBlock,
nextBlocks: convo.next,
@@ -296,11 +386,7 @@ export class BotService {
category: null,
previousBlocks: [],
};
convo.context.attempt++;
fallback = true;
} else {
convo.context.attempt = 0;
fallbackBlock = undefined;
}
const next = matchedBlock || fallbackBlock;
@@ -308,30 +394,8 @@ export class BotService {
this.logger.debug('Responding ...', convo.id);
if (next) {
// Increment stats about popular blocks
this.eventEmitter.emit(
'hook:stats:entry',
BotStatsType.popular,
next.name,
);
// Go next!
this.logger.debug('Respond to nested conversion! Go next ', next.id);
try {
const updatedConversation =
await this.conversationService.storeContextData(
convo,
next,
event,
// If this is a local fallback then we don't capture vars
// Otherwise, old captured const value may be replaced by another const value
!fallback,
);
await this.triggerBlock(event, updatedConversation, next, fallback);
} catch (err) {
this.logger.error('Unable to store context data!', err);
return this.eventEmitter.emit('hook:conversation:end', convo);
}
return true;
// Proceed to the execution of the next block
return await this.proceedToNextBlock(convo, next, event, fallback);
} else {
// Conversation is still active, but there's no matching block to call next
// We'll end the conversation but this message is probably lost in time and space.
@@ -376,7 +440,7 @@ export class BotService {
'Existing conversations',
);
this.logger.debug('Conversation has been captured! Responding ...');
return await this.handleIncomingMessage(conversation, event);
return await this.handleOngoingConversationMessage(conversation, event);
} catch (err) {
this.logger.error(
'An error occurred when searching for a conversation ',

View File

@@ -0,0 +1,23 @@
/*
* Copyright © 2025 Hexastack. All rights reserved.
*
* Licensed under the GNU Affero General Public License v3.0 (AGPLv3) with the following additional terms:
* 1. The name "Hexabot" is a trademark of Hexastack. You may not use this name in derivative works without express written permission.
* 2. All derivative works must include clear attribution to the original creator and software, Hexastack and Hexabot, in a prominent location (e.g., in the software's "About" section, documentation, and README file).
*/
import { registerDecorator, ValidationOptions } from 'class-validator';
import { UniqueFieldNamesConstraint } from '../validators/validate-unique-names.validator';
export function UniqueFieldNames(validationOptions?: ValidationOptions) {
return function (object: Record<string, any>, propertyName: string) {
registerDecorator({
target: object.constructor,
propertyName,
options: validationOptions,
constraints: [],
validator: UniqueFieldNamesConstraint,
});
};
}

View File

@@ -14,7 +14,6 @@ import {
IsNotEmpty,
IsOptional,
IsString,
Matches,
Validate,
ValidateNested,
} from 'class-validator';
@@ -22,12 +21,12 @@ import {
import { FieldType } from '@/setting/schemas/types';
import { DtoConfig } from '@/utils/types/dto.types';
import { UniqueFieldNames } from '../decorators/unique-field-names.decorator';
import { ValidateRequiredFields } from '../validators/validate-required-fields.validator';
export class ContentField {
@IsString()
@IsNotEmpty()
@Matches(/^[a-z][a-z_0-9]*$/)
name: string;
@IsString()
@@ -58,6 +57,7 @@ export class ContentTypeCreateDto {
@ValidateNested({ each: true })
@Validate(ValidateRequiredFields)
@Type(() => ContentField)
@UniqueFieldNames()
fields?: ContentField[];
}

View File

@@ -7,13 +7,13 @@
*/
import { ModelDefinition, Prop, Schema, SchemaFactory } from '@nestjs/mongoose';
import mongoose from 'mongoose';
import { FieldType } from '@/setting/schemas/types';
import { BaseSchema } from '@/utils/generics/base-schema';
import { LifecycleHookManager } from '@/utils/generics/lifecycle-hook-manager';
import { ContentField } from '../dto/contentType.dto';
import { validateUniqueFields } from '../utilities/field-validation.utils';
@Schema({ timestamps: true })
export class ContentType extends BaseSchema {
@@ -28,7 +28,7 @@ export class ContentType extends BaseSchema {
*/
@Prop({
type: mongoose.Schema.Types.Mixed,
type: [ContentField],
default: [
{
name: 'title',
@@ -41,6 +41,19 @@ export class ContentType extends BaseSchema {
type: FieldType.checkbox,
},
],
required: true,
validate: {
/**
* Ensures every `label` in the fields array is unique.
* Runs on `save`, `create`, `insertMany`, and `findOneAndUpdate`
* when `runValidators: true` is set.
*/
validator(fields: ContentField[]): boolean {
return validateUniqueFields(fields, 'label');
},
message:
'Each element in "fields" must have a unique "label" (duplicate detected)',
},
})
fields: ContentField[];
}

View File

@@ -0,0 +1,13 @@
/*
* Copyright © 2025 Hexastack. All rights reserved.
*
* Licensed under the GNU Affero General Public License v3.0 (AGPLv3) with the following additional terms:
* 1. The name "Hexabot" is a trademark of Hexastack. You may not use this name in derivative works without express written permission.
* 2. All derivative works must include clear attribution to the original creator and software, Hexastack and Hexabot, in a prominent location (e.g., in the software's "About" section, documentation, and README file).
*/
export const validateUniqueFields = <T>(
fields: T[],
fieldName: keyof T,
): boolean =>
new Set(fields.map((f) => f[fieldName] as string)).size === fields.length;

View File

@@ -0,0 +1,29 @@
/*
* Copyright © 2025 Hexastack. All rights reserved.
*
* Licensed under the GNU Affero General Public License v3.0 (AGPLv3) with the following additional terms:
* 1. The name "Hexabot" is a trademark of Hexastack. You may not use this name in derivative works without express written permission.
* 2. All derivative works must include clear attribution to the original creator and software, Hexastack and Hexabot, in a prominent location (e.g., in the software's "About" section, documentation, and README file).
*/
import {
ValidationArguments,
ValidatorConstraint,
ValidatorConstraintInterface,
} from 'class-validator';
import { ContentField } from '../dto/contentType.dto';
import { validateUniqueFields } from '../utilities/field-validation.utils';
@ValidatorConstraint({ async: false })
export class UniqueFieldNamesConstraint
implements ValidatorConstraintInterface
{
validate(fields: ContentField[], _args: ValidationArguments) {
return validateUniqueFields(fields, 'label');
}
defaultMessage(args: ValidationArguments) {
return `${args.property} contains duplicate "label" values; each field.name must be unique`;
}
}

View File

@@ -126,10 +126,10 @@ export default abstract class BaseWebChannelHandler<
try {
const menu = await this.menuService.getTree();
return client.emit('settings', { menu, ...settings });
client.emit('settings', { menu, ...settings });
} catch (err) {
this.logger.warn('Unable to retrieve menu ', err);
return client.emit('settings', settings);
client.emit('settings', settings);
}
} catch (err) {
this.logger.error('Unable to initiate websocket connection', err);

View File

@@ -161,8 +161,13 @@ export class HelperService {
}
const settings = await this.settingService.getSettings();
const defaultHelperKey = `default_${type}_helper`;
if (!(defaultHelperKey in settings.chatbot_settings)) {
throw new Error(`Default ${type.toUpperCase()} helper setting not found`);
}
const defaultHelperName = settings.chatbot_settings[
`default_${type}_helper` as any
defaultHelperKey
] as HelperName;
const defaultHelper = this.get<T>(type, defaultHelperName);

View File

@@ -0,0 +1,52 @@
/*
* Copyright © 2025 Hexastack. All rights reserved.
*
* Licensed under the GNU Affero General Public License v3.0 (AGPLv3) with the following additional terms:
* 1. The name "Hexabot" is a trademark of Hexastack. You may not use this name in derivative works without express written permission.
* 2. All derivative works must include clear attribution to the original creator and software, Hexastack and Hexabot, in a prominent location (e.g., in the software's "About" section, documentation, and README file).
*/
import EventWrapper from '@/channel/lib/EventWrapper';
import { BlockStub } from '@/chat/schemas/block.schema';
import { LoggerService } from '@/logger/logger.service';
import { SettingService } from '@/setting/services/setting.service';
import { HelperService } from '../helper.service';
import { FlowEscape, HelperName, HelperType } from '../types';
import BaseHelper from './base-helper';
export default abstract class BaseFlowEscapeHelper<
N extends HelperName = HelperName,
> extends BaseHelper<N> {
protected readonly type: HelperType = HelperType.FLOW_ESCAPE;
constructor(
name: N,
settingService: SettingService,
helperService: HelperService,
logger: LoggerService,
) {
super(name, settingService, helperService, logger);
}
/**
* Checks if the helper can handle the flow escape for the given block message.
*
* @param _blockMessage - The block message to check.
* @returns - Whether the helper can handle the flow escape for the given block message.
*/
abstract canHandleFlowEscape<T extends BlockStub>(_blockMessage: T): boolean;
/**
* Adjudicates the flow escape event.
*
* @param _event - The event wrapper containing the event data.
* @param _block - The block associated with the event.
* @returns - A promise that resolves to a FlowEscape.AdjudicationResult.
*/
abstract adjudicate<T extends BlockStub>(
_event: EventWrapper<any, any>,
_block: T,
): Promise<FlowEscape.AdjudicationResult>;
}

View File

@@ -6,9 +6,10 @@
* 2. All derivative works must include clear attribution to the original creator and software, Hexastack and Hexabot, in a prominent location (e.g., in the software's "About" section, documentation, and README file).
*/
import { SettingCreateDto } from '@/setting/dto/setting.dto';
import { ExtensionSetting } from '@/setting/schemas/types';
import { HyphenToUnderscore } from '@/utils/types/extension';
import BaseFlowEscapeHelper from './lib/base-flow-escape-helper';
import BaseHelper from './lib/base-helper';
import BaseLlmHelper from './lib/base-llm-helper';
import BaseNlpHelper from './lib/base-nlp-helper';
@@ -93,9 +94,31 @@ export namespace LLM {
}
}
export namespace FlowEscape {
export enum Action {
REPROMPT = 're_prompt',
COERCE = 'coerce_to_option',
NEW_CTX = 'new_context',
}
export type AdjudicationResult =
| {
action: Action.COERCE;
coercedOption: string;
}
| {
action: Action.REPROMPT;
repromptMessage?: string;
}
| {
action: Action.NEW_CTX;
};
}
export enum HelperType {
NLU = 'nlu',
LLM = 'llm',
FLOW_ESCAPE = 'flow_escape',
STORAGE = 'storage',
UTIL = 'util',
}
@@ -105,6 +128,7 @@ export type HelperName = `${string}-helper`;
interface HelperTypeMap {
[HelperType.NLU]: BaseNlpHelper<HelperName>;
[HelperType.LLM]: BaseLlmHelper<HelperName>;
[HelperType.FLOW_ESCAPE]: BaseFlowEscapeHelper<HelperName>;
[HelperType.STORAGE]: BaseStorageHelper<HelperName>;
[HelperType.UTIL]: BaseHelper;
}
@@ -116,9 +140,7 @@ export type HelperRegistry<H extends BaseHelper = BaseHelper> = Map<
Map<string, H>
>;
export type HelperSetting<N extends HelperName = HelperName> = Omit<
SettingCreateDto,
'group' | 'weight'
> & {
group: HyphenToUnderscore<N>;
};
export type HelperSetting<N extends HelperName = HelperName> =
ExtensionSetting<{
group: HyphenToUnderscore<N>;
}>;

View File

@@ -10,6 +10,7 @@ import { CACHE_MANAGER } from '@nestjs/cache-manager';
import { BadRequestException, NotFoundException } from '@nestjs/common';
import { MongooseModule } from '@nestjs/mongoose';
import { NlpValueMatchPattern } from '@/chat/schemas/types/pattern';
import { HelperService } from '@/helper/helper.service';
import { LanguageRepository } from '@/i18n/repositories/language.repository';
import { Language, LanguageModel } from '@/i18n/schemas/language.schema';
@@ -181,11 +182,51 @@ describe('NlpSampleController', () => {
})),
);
});
it('should find nlp samples with patterns', async () => {
const pageQuery = getPageQuery<NlpSample>({ sort: ['text', 'desc'] });
const patterns: NlpValueMatchPattern[] = [
{ entity: 'intent', match: 'value', value: 'greeting' },
];
const result = await nlpSampleController.findPage(
pageQuery,
['language', 'entities'],
{},
patterns,
);
// Should only return samples matching the pattern
const nlpSamples = await nlpSampleService.findByPatternsAndPopulate(
{ filters: {}, patterns },
pageQuery,
);
expect(result).toEqualPayload(nlpSamples);
});
it('should return empty array if no samples match the patterns', async () => {
const pageQuery = getPageQuery<NlpSample>({ sort: ['text', 'desc'] });
const patterns: NlpValueMatchPattern[] = [
{ entity: 'intent', match: 'value', value: 'nonexistent' },
];
jest.spyOn(nlpSampleService, 'findByPatternsAndPopulate');
const result = await nlpSampleController.findPage(
pageQuery,
['language', 'entities'],
{},
patterns,
);
expect(nlpSampleService.findByPatternsAndPopulate).toHaveBeenCalledTimes(
1,
);
expect(Array.isArray(result)).toBe(true);
expect(result).toHaveLength(0);
});
});
describe('count', () => {
it('should count the nlp samples', async () => {
jest.spyOn(nlpSampleService, 'count');
const result = await nlpSampleController.count({});
expect(nlpSampleService.count).toHaveBeenCalledTimes(1);
const count = nlpSampleFixtures.length;
expect(result).toEqual({ count });
});
@@ -439,4 +480,34 @@ describe('NlpSampleController', () => {
).rejects.toThrow(NotFoundException);
});
});
describe('filterCount', () => {
it('should count the nlp samples without patterns', async () => {
const filters = { text: 'Hello' };
jest.spyOn(nlpSampleService, 'countByPatterns');
const result = await nlpSampleController.filterCount(filters, []);
expect(nlpSampleService.countByPatterns).toHaveBeenCalledTimes(1);
expect(result).toEqual({ count: 1 });
});
it('should count the nlp samples with patterns', async () => {
const filters = { text: 'Hello' };
const patterns: NlpValueMatchPattern[] = [
{ entity: 'intent', match: 'value', value: 'greeting' },
];
jest.spyOn(nlpSampleService, 'countByPatterns');
const result = await nlpSampleController.filterCount(filters, patterns);
expect(nlpSampleService.countByPatterns).toHaveBeenCalledTimes(1);
expect(result).toEqual({ count: 1 });
});
it('should return zero count when no samples match the filters and patterns', async () => {
const filters = { text: 'Nonexistent' };
const patterns: NlpValueMatchPattern[] = [
{ entity: 'intent', match: 'value', value: 'nonexistent' },
];
const result = await nlpSampleController.filterCount(filters, patterns);
expect(result).toEqual({ count: 0 });
});
});
});

View File

@@ -29,7 +29,12 @@ import {
import { FileInterceptor } from '@nestjs/platform-express';
import { CsrfCheck } from '@tekuconcept/nestjs-csrf';
import { Response } from 'express';
import { z } from 'zod';
import {
NlpValueMatchPattern,
nlpValueMatchPatternSchema,
} from '@/chat/schemas/types/pattern';
import { HelperService } from '@/helper/helper.service';
import { HelperType } from '@/helper/types';
import { LanguageService } from '@/i18n/services/language.service';
@@ -40,6 +45,7 @@ import { PageQueryDto } from '@/utils/pagination/pagination-query.dto';
import { PageQueryPipe } from '@/utils/pagination/pagination-query.pipe';
import { PopulatePipe } from '@/utils/pipes/populate.pipe';
import { SearchFilterPipe } from '@/utils/pipes/search-filter.pipe';
import { ZodQueryParamPipe } from '@/utils/pipes/zod.pipe';
import { TFilterQuery } from '@/utils/types/filter.types';
import { NlpSampleDto, TNlpSampleDto } from '../dto/nlp-sample.dto';
@@ -184,9 +190,22 @@ export class NlpSampleController extends BaseController<
allowedFields: ['text', 'type', 'language'],
}),
)
filters?: TFilterQuery<NlpSample>,
filters: TFilterQuery<NlpSample> = {},
@Query(
new ZodQueryParamPipe(
z.array(nlpValueMatchPatternSchema),
(q) => q?.where?.patterns,
),
)
patterns: NlpValueMatchPattern[] = [],
) {
return await this.count(filters);
const count = await this.nlpSampleService.countByPatterns({
filters,
patterns,
});
return {
count,
};
}
/**
@@ -285,10 +304,23 @@ export class NlpSampleController extends BaseController<
}),
)
filters: TFilterQuery<NlpSample>,
@Query(
new ZodQueryParamPipe(
z.array(nlpValueMatchPatternSchema),
(q) => q?.where?.patterns,
),
)
patterns: NlpValueMatchPattern[] = [],
) {
return this.canPopulate(populate)
? await this.nlpSampleService.findAndPopulate(filters, pageQuery)
: await this.nlpSampleService.find(filters, pageQuery);
? await this.nlpSampleService.findByPatternsAndPopulate(
{ filters, patterns },
pageQuery,
)
: await this.nlpSampleService.findByPatterns(
{ filters, patterns },
pageQuery,
);
}
/**

View File

@@ -7,9 +7,11 @@
*/
import { MongooseModule } from '@nestjs/mongoose';
import { Types } from 'mongoose';
import { LanguageRepository } from '@/i18n/repositories/language.repository';
import { Language, LanguageModel } from '@/i18n/schemas/language.schema';
import { PageQueryDto } from '@/utils/pagination/pagination-query.dto';
import { nlpSampleFixtures } from '@/utils/test/fixtures/nlpsample';
import { installNlpSampleEntityFixtures } from '@/utils/test/fixtures/nlpsampleentity';
import { getPageQuery } from '@/utils/test/pagination';
@@ -29,13 +31,16 @@ import {
NlpSampleFull,
NlpSampleModel,
} from '../schemas/nlp-sample.schema';
import { NlpValue, NlpValueModel } from '../schemas/nlp-value.schema';
import { NlpSampleEntityRepository } from './nlp-sample-entity.repository';
import { NlpSampleRepository } from './nlp-sample.repository';
import { NlpValueRepository } from './nlp-value.repository';
describe('NlpSampleRepository', () => {
let nlpSampleRepository: NlpSampleRepository;
let nlpSampleEntityRepository: NlpSampleEntityRepository;
let nlpValueRepository: NlpValueRepository;
let languageRepository: LanguageRepository;
let nlpSampleEntity: NlpSampleEntity | null;
let noNlpSample: NlpSample | null;
@@ -48,21 +53,28 @@ describe('NlpSampleRepository', () => {
MongooseModule.forFeature([
NlpSampleModel,
NlpSampleEntityModel,
NlpValueModel,
LanguageModel,
]),
],
providers: [
NlpSampleRepository,
NlpSampleEntityRepository,
NlpValueRepository,
LanguageRepository,
],
});
[nlpSampleRepository, nlpSampleEntityRepository, languageRepository] =
await getMocks([
NlpSampleRepository,
NlpSampleEntityRepository,
LanguageRepository,
]);
[
nlpSampleRepository,
nlpSampleEntityRepository,
nlpValueRepository,
languageRepository,
] = await getMocks([
NlpSampleRepository,
NlpSampleEntityRepository,
NlpValueRepository,
LanguageRepository,
]);
noNlpSample = await nlpSampleRepository.findOne({ text: 'No' });
nlpSampleEntity = await nlpSampleEntityRepository.findOne({
sample: noNlpSample!.id,
@@ -141,4 +153,149 @@ describe('NlpSampleRepository', () => {
expect(sampleEntities.length).toEqual(0);
});
});
describe('findByEntities', () => {
it('should return mapped NlpSample instances for matching entities', async () => {
const filters = {};
const values = await nlpValueRepository.find({ value: 'greeting' });
const result = await nlpSampleRepository.findByEntities({
filters,
values,
});
expect(result).toHaveLength(2);
expect(result[0]).toBeInstanceOf(NlpSample);
expect(result[0].text).toBe('Hello');
});
it('should return an empty array if no samples match', async () => {
const filters = {};
const values = [
{
id: new Types.ObjectId().toHexString(),
entity: new Types.ObjectId().toHexString(),
value: 'nonexistent',
},
] as NlpValue[];
const result = await nlpSampleRepository.findByEntities({
filters,
values,
});
expect(Array.isArray(result)).toBe(true);
expect(result).toHaveLength(0);
});
});
describe('findByEntitiesAndPopulate', () => {
it('should return populated NlpSampleFull instances for matching entities', async () => {
const filters = {};
const values = await nlpValueRepository.find({ value: 'greeting' });
const result = await nlpSampleRepository.findByEntitiesAndPopulate({
filters,
values,
});
expect(result.length).toBe(2);
result.forEach((sample) => {
expect(sample).toBeInstanceOf(NlpSampleFull);
expect(sample.entities).toBeDefined();
expect(Array.isArray(sample.entities)).toBe(true);
expect(sample.language).toBeDefined();
});
});
it('should return an empty array if no samples match', async () => {
const filters = {};
const values = [
{
id: new Types.ObjectId().toHexString(),
entity: new Types.ObjectId().toHexString(),
value: 'nonexistent',
},
] as NlpValue[];
const result = await nlpSampleRepository.findByEntitiesAndPopulate({
filters,
values,
});
expect(Array.isArray(result)).toBe(true);
expect(result).toHaveLength(0);
});
it('should support pagination and projection', async () => {
const filters = {};
const values = await nlpValueRepository.find({ value: 'greeting' });
const page = {
limit: 1,
skip: 0,
sort: ['text', 'asc'],
} as PageQueryDto<NlpSample>;
const projection = { text: 1 };
const result = await nlpSampleRepository.findByEntitiesAndPopulate(
{ filters, values },
page,
projection,
);
expect(Array.isArray(result)).toBe(true);
expect(result.length).toBe(1);
if (result.length > 0) {
expect(result[0]).toHaveProperty('text');
}
});
});
describe('countByEntities', () => {
it('should return the correct count for matching entities', async () => {
const filters = {};
const values = await nlpValueRepository.find({ value: 'greeting' });
const count = await nlpSampleRepository.countByEntities({
filters,
values,
});
expect(typeof count).toBe('number');
expect(count).toBe(2);
});
it('should return 0 if no samples match', async () => {
const filters = {};
const values = [
{
id: new Types.ObjectId().toHexString(),
entity: new Types.ObjectId().toHexString(),
value: 'nonexistent',
},
] as NlpValue[];
const count = await nlpSampleRepository.countByEntities({
filters,
values,
});
expect(count).toBe(0);
});
it('should respect filters (e.g. language)', async () => {
const values = await nlpValueRepository.find({ value: 'greeting' });
const language = languages[0];
const filters = { language: language.id };
const count = await nlpSampleRepository.countByEntities({
filters,
values,
});
// Should be <= total greeting samples, and >= 0
expect(typeof count).toBe('number');
expect(count).toBeGreaterThanOrEqual(0);
expect(count).toBeLessThanOrEqual(2);
});
});
});

View File

@@ -8,18 +8,30 @@
import { Injectable } from '@nestjs/common';
import { InjectModel } from '@nestjs/mongoose';
import { Document, Model, Query } from 'mongoose';
import { plainToClass } from 'class-transformer';
import {
Aggregate,
Document,
Model,
PipelineStage,
ProjectionType,
Query,
Types,
} from 'mongoose';
import { BaseRepository, DeleteResult } from '@/utils/generics/base-repository';
import { PageQueryDto } from '@/utils/pagination/pagination-query.dto';
import { TFilterQuery } from '@/utils/types/filter.types';
import { TNlpSampleDto } from '../dto/nlp-sample.dto';
import {
NLP_SAMPLE_POPULATE,
NlpSample,
NlpSampleDocument,
NlpSampleFull,
NlpSamplePopulate,
} from '../schemas/nlp-sample.schema';
import { NlpValue } from '../schemas/nlp-value.schema';
import { NlpSampleEntityRepository } from './nlp-sample-entity.repository';
@@ -37,6 +49,250 @@ export class NlpSampleRepository extends BaseRepository<
super(model, NlpSample, NLP_SAMPLE_POPULATE, NlpSampleFull);
}
/**
* Normalize the filter query.
*
* @param filters - The filters to normalize.
* @returns The normalized filters.
*/
private normalizeFilters(
filters: TFilterQuery<NlpSample>,
): TFilterQuery<NlpSample> {
if (filters?.$and) {
return {
...filters,
$and: filters.$and.map((condition) => {
// @todo: think of a better way to handle language to objectId conversion
// This is a workaround for the fact that language is stored as an ObjectId
// in the database, but we want to filter by its string representation.
if ('language' in condition && condition.language) {
return {
...condition,
language: new Types.ObjectId(condition.language as string),
};
}
return condition;
}),
};
}
return filters;
}
/**
* Build the aggregation stages that restrict a *nlpSampleEntities* collection
* to links which:
* 1. Reference all of the supplied `values`, and
* 2. Whose document satisfies the optional `filters`.
*
* @param criterias Object with:
* @param criterias.filters Extra filters to be applied on *nlpsamples*.
* @param criterias.entities Entity documents whose IDs should match `entity`.
* @param criterias.values Value documents whose IDs should match `value`.
* @returns Array of aggregation `PipelineStage`s ready to be concatenated
* into a larger pipeline.
*/
buildFindByEntitiesStages({
filters,
values,
}: {
filters: TFilterQuery<NlpSample>;
values: NlpValue[];
}): PipelineStage[] {
const requiredPairs = values.map(({ id, entity }) => ({
entity: new Types.ObjectId(entity),
value: new Types.ObjectId(id),
}));
const normalizedFilters = this.normalizeFilters(filters);
return [
{
$match: {
...normalizedFilters,
},
},
// Fetch the entities for each sample
{
$lookup: {
from: 'nlpsampleentities',
localField: '_id', // nlpsamples._id
foreignField: 'sample', // nlpsampleentities.sample
as: 'sampleentities',
pipeline: [
{
$match: {
$or: requiredPairs,
},
},
],
},
},
// Filter out empty or less matching
{
$match: {
$expr: {
$gte: [{ $size: '$sampleentities' }, requiredPairs.length],
},
},
},
// Collapse each link into an { entity, value } object
{
$addFields: {
entities: {
$ifNull: [
{
$map: {
input: '$sampleentities',
as: 's',
in: { entity: '$$s.entity', value: '$$s.value' },
},
},
[],
],
},
},
},
// Keep only the samples whose `entities` array ⊇ `requiredPairs`
{
$match: {
$expr: {
$eq: [
requiredPairs.length, // target size
{
$size: {
$setIntersection: ['$entities', requiredPairs],
},
},
],
},
},
},
//drop helper array if you dont need it downstream
{ $project: { entities: 0, sampleentities: 0 } },
];
}
findByEntitiesAggregation(
criterias: {
filters: TFilterQuery<NlpSample>;
values: NlpValue[];
},
page?: PageQueryDto<NlpSample>,
projection?: ProjectionType<NlpSample>,
): Aggregate<NlpSampleDocument[]> {
return this.model.aggregate<NlpSampleDocument>([
...this.buildFindByEntitiesStages(criterias),
// sort / skip / limit
...this.buildPaginationPipelineStages(page),
// projection
...(projection
? [
{
$project:
typeof projection === 'string'
? { [projection]: 1 }
: projection,
},
]
: []),
]);
}
async findByEntities(
criterias: {
filters: TFilterQuery<NlpSample>;
values: NlpValue[];
},
page?: PageQueryDto<NlpSample>,
projection?: ProjectionType<NlpSample>,
): Promise<NlpSample[]> {
const aggregation = this.findByEntitiesAggregation(
criterias,
page,
projection,
);
const resultSet = await aggregation.exec();
return resultSet.map((doc) =>
plainToClass(NlpSample, doc, this.transformOpts),
);
}
/**
* Find NLP samples by entities and populate them with their related data.
*
* @param criterias - Criteria containing filters and values to match.
* @param page - Optional pagination parameters.
* @param projection - Optional projection to limit fields returned.
* @returns Promise resolving to an array of populated NlpSampleFull objects.
*/
async findByEntitiesAndPopulate(
criterias: {
filters: TFilterQuery<NlpSample>;
values: NlpValue[];
},
page?: PageQueryDto<NlpSample>,
projection?: ProjectionType<NlpSample>,
): Promise<NlpSampleFull[]> {
const aggregation = this.findByEntitiesAggregation(
criterias,
page,
projection,
);
const docs = await aggregation.exec();
const populatedResultSet = await this.populate(docs);
return populatedResultSet.map((doc) =>
plainToClass(NlpSampleFull, doc, this.transformOpts),
);
}
/**
* Build an aggregation pipeline that counts NLP samples satisfying:
* the extra `filters` (passed to `$match` later on), and
* All of the supplied `entities` / `values`.
*
* @param criterias `{ filters, entities, values }`
* @returns Un-executed aggregation cursor.
*/
countByEntitiesAggregation(criterias: {
filters: TFilterQuery<NlpSample>;
values: NlpValue[];
}): Aggregate<{ count: number }[]> {
return this.model.aggregate<{ count: number }>([
...this.buildFindByEntitiesStages(criterias),
// Final count
{ $count: 'count' },
]);
}
/**
* Returns the count of samples by filters, entities and/or values
*
* @param criterias `{ filters, entities, values }`
* @returns Promise resolving to the count.
*/
async countByEntities(criterias: {
filters: TFilterQuery<NlpSample>;
values: NlpValue[];
}): Promise<number> {
const aggregation = this.countByEntitiesAggregation(criterias);
const [result] = await aggregation.exec();
return result?.count || 0;
}
/**
* Deletes NLP sample entities associated with the provided criteria before deleting the sample itself.
*

View File

@@ -10,9 +10,11 @@ import { CACHE_MANAGER } from '@nestjs/cache-manager';
import { BadRequestException, NotFoundException } from '@nestjs/common';
import { MongooseModule } from '@nestjs/mongoose';
import { NlpValueMatchPattern } from '@/chat/schemas/types/pattern';
import { LanguageRepository } from '@/i18n/repositories/language.repository';
import { Language, LanguageModel } from '@/i18n/schemas/language.schema';
import { LanguageService } from '@/i18n/services/language.service';
import { PageQueryDto } from '@/utils/pagination/pagination-query.dto';
import { nlpSampleFixtures } from '@/utils/test/fixtures/nlpsample';
import { installNlpSampleEntityFixtures } from '@/utils/test/fixtures/nlpsampleentity';
import { getPageQuery } from '@/utils/test/pagination';
@@ -52,6 +54,7 @@ describe('NlpSampleService', () => {
let nlpEntityService: NlpEntityService;
let nlpSampleService: NlpSampleService;
let nlpSampleEntityService: NlpSampleEntityService;
let nlpValueService: NlpValueService;
let languageService: LanguageService;
let nlpSampleEntityRepository: NlpSampleEntityRepository;
let nlpSampleRepository: NlpSampleRepository;
@@ -98,6 +101,7 @@ describe('NlpSampleService', () => {
nlpEntityService,
nlpSampleService,
nlpSampleEntityService,
nlpValueService,
nlpSampleRepository,
nlpSampleEntityRepository,
nlpSampleEntityRepository,
@@ -107,6 +111,7 @@ describe('NlpSampleService', () => {
NlpEntityService,
NlpSampleService,
NlpSampleEntityService,
NlpValueService,
NlpSampleRepository,
NlpSampleEntityRepository,
NlpSampleEntityRepository,
@@ -360,4 +365,200 @@ describe('NlpSampleService', () => {
expect(extractSpy).not.toHaveBeenCalled();
});
});
describe('findByPatterns', () => {
it('should return samples without providing patterns', async () => {
const result = await nlpSampleService.findByPatterns(
{ filters: {}, patterns: [] },
undefined,
);
expect(Array.isArray(result)).toBe(true);
expect(result.length).toBeGreaterThan(0);
});
it('should return samples matching the given patterns', async () => {
// Assume pattern: entity 'intent', value 'greeting'
const patterns: NlpValueMatchPattern[] = [
{ entity: 'intent', match: 'value', value: 'greeting' },
];
jest.spyOn(nlpSampleRepository, 'findByEntities');
jest.spyOn(nlpValueService, 'findByPatterns');
const result = await nlpSampleService.findByPatterns(
{ filters: {}, patterns },
undefined,
);
expect(nlpSampleRepository.findByEntities).toHaveBeenCalled();
expect(nlpValueService.findByPatterns).toHaveBeenCalled();
expect(Array.isArray(result)).toBe(true);
expect(result[0].text).toBe('Hello');
});
it('should return an empty array if no samples match the patterns', async () => {
const patterns: NlpValueMatchPattern[] = [
{ entity: 'intent', match: 'value', value: 'nonexistent' },
];
jest.spyOn(nlpSampleRepository, 'findByEntities');
jest.spyOn(nlpValueService, 'findByPatterns');
const result = await nlpSampleService.findByPatterns(
{ filters: {}, patterns },
undefined,
);
expect(nlpSampleRepository.findByEntities).not.toHaveBeenCalled();
expect(nlpValueService.findByPatterns).toHaveBeenCalled();
expect(Array.isArray(result)).toBe(true);
expect(result).toHaveLength(0);
});
it('should support pagination', async () => {
const patterns: NlpValueMatchPattern[] = [
{ entity: 'intent', match: 'value', value: 'greeting' },
];
const page: PageQueryDto<NlpSample> = {
limit: 1,
skip: 0,
sort: ['text', 'asc'],
};
const result = await nlpSampleService.findByPatterns(
{ filters: {}, patterns },
page,
);
expect(Array.isArray(result)).toBe(true);
expect(result.length).toBe(1);
});
});
describe('findByPatternsAndPopulate', () => {
it('should return populated NlpSampleFull instances for matching patterns', async () => {
const patterns: NlpValueMatchPattern[] = [
{ entity: 'intent', match: 'value', value: 'greeting' },
];
const result = await nlpSampleService.findByPatternsAndPopulate(
{ filters: {}, patterns },
undefined,
);
expect(Array.isArray(result)).toBe(true);
expect(result.length).toBeGreaterThan(0);
result.forEach((sample) => {
expect(sample).toBeInstanceOf(NlpSampleFull);
expect(sample.entities).toBeDefined();
expect(Array.isArray(sample.entities)).toBe(true);
expect(sample.language).toBeDefined();
});
});
it('should return populated NlpSampleFull without providing patterns', async () => {
const result = await nlpSampleService.findByPatternsAndPopulate(
{ filters: { text: /Hello/gi }, patterns: [] },
undefined,
);
expect(Array.isArray(result)).toBe(true);
expect(result.length).toBe(1);
expect(result[0]).toBeInstanceOf(NlpSampleFull);
expect(result[0].entities).toBeDefined();
expect(Array.isArray(result[0].entities)).toBe(true);
});
it('should return an empty array if no samples match the patterns', async () => {
const patterns: NlpValueMatchPattern[] = [
{ entity: 'intent', match: 'value', value: 'nonexistent' },
];
const result = await nlpSampleService.findByPatternsAndPopulate(
{ filters: {}, patterns },
undefined,
);
expect(Array.isArray(result)).toBe(true);
expect(result).toHaveLength(0);
});
it('should support pagination and projection', async () => {
const patterns: NlpValueMatchPattern[] = [
{ entity: 'intent', match: 'value', value: 'greeting' },
];
const page: PageQueryDto<NlpSample> = {
limit: 1,
skip: 0,
sort: ['text', 'asc'],
};
const result = await nlpSampleService.findByPatternsAndPopulate(
{ filters: {}, patterns },
page,
);
expect(Array.isArray(result)).toBe(true);
expect(result.length).toBe(1);
});
});
describe('countByPatterns', () => {
it('should return the correct count for matching patterns', async () => {
const patterns: NlpValueMatchPattern[] = [
{ entity: 'intent', match: 'value', value: 'greeting' },
];
jest.spyOn(nlpSampleRepository, 'countByEntities');
jest.spyOn(nlpValueService, 'findByPatterns');
const count = await nlpSampleService.countByPatterns({
filters: {},
patterns,
});
expect(nlpSampleRepository.countByEntities).toHaveBeenCalled();
expect(nlpValueService.findByPatterns).toHaveBeenCalled();
expect(typeof count).toBe('number');
expect(count).toBe(2);
});
it('should return the correct count without providing patterns', async () => {
jest.spyOn(nlpSampleRepository, 'findByEntities');
jest.spyOn(nlpValueService, 'findByPatterns');
const count = await nlpSampleService.countByPatterns({
filters: {},
patterns: [],
});
expect(nlpSampleRepository.findByEntities).not.toHaveBeenCalled();
expect(nlpValueService.findByPatterns).not.toHaveBeenCalled();
expect(typeof count).toBe('number');
expect(count).toBeGreaterThan(2);
});
it('should return 0 if no samples match the patterns', async () => {
const patterns: NlpValueMatchPattern[] = [
{ entity: 'intent', match: 'value', value: 'nonexistent' },
];
const count = await nlpSampleService.countByPatterns({
filters: {},
patterns,
});
expect(count).toBe(0);
});
it('should respect filters (e.g. language)', async () => {
const patterns: NlpValueMatchPattern[] = [
{ entity: 'intent', match: 'value', value: 'greeting' },
];
const filters = { text: 'Hello' };
const count = await nlpSampleService.countByPatterns({
filters,
patterns,
});
expect(typeof count).toBe('number');
expect(count).toBe(1);
});
});
});

View File

@@ -12,14 +12,16 @@ import {
NotFoundException,
} from '@nestjs/common';
import { OnEvent } from '@nestjs/event-emitter';
import { Document, Query } from 'mongoose';
import { Document, ProjectionType, Query } from 'mongoose';
import Papa from 'papaparse';
import { Message } from '@/chat/schemas/message.schema';
import { NlpValueMatchPattern } from '@/chat/schemas/types/pattern';
import { Language } from '@/i18n/schemas/language.schema';
import { LanguageService } from '@/i18n/services/language.service';
import { DeleteResult } from '@/utils/generics/base-repository';
import { BaseService } from '@/utils/generics/base-service';
import { PageQueryDto } from '@/utils/pagination/pagination-query.dto';
import { TFilterQuery, THydratedDocument } from '@/utils/types/filter.types';
import { NlpSampleEntityCreateDto } from '../dto/nlp-sample-entity.dto';
@@ -35,6 +37,7 @@ import { NlpSampleEntityValue, NlpSampleState } from '../schemas/types';
import { NlpEntityService } from './nlp-entity.service';
import { NlpSampleEntityService } from './nlp-sample-entity.service';
import { NlpValueService } from './nlp-value.service';
@Injectable()
export class NlpSampleService extends BaseService<
@@ -47,11 +50,126 @@ export class NlpSampleService extends BaseService<
readonly repository: NlpSampleRepository,
private readonly nlpSampleEntityService: NlpSampleEntityService,
private readonly nlpEntityService: NlpEntityService,
private readonly nlpValueService: NlpValueService,
private readonly languageService: LanguageService,
) {
super(repository);
}
/**
* Retrieve samples that satisfy `filters` **and** reference any entity / value
* contained in `patterns`.
*
* The pattern list is first resolved via `NlpEntityService.findByPatterns`
* and `NlpValueService.findByPatterns`, then delegated to
* `repository.findByEntities`.
*
* @param criterias `{ filters, patterns }`
* @param page Optional paging / sorting descriptor.
* @param projection Optional Mongo projection.
* @returns Promise resolving to the matching samples.
*/
async findByPatterns(
{
filters,
patterns,
}: {
filters: TFilterQuery<NlpSample>;
patterns: NlpValueMatchPattern[];
},
page?: PageQueryDto<NlpSample>,
projection?: ProjectionType<NlpSample>,
): Promise<NlpSample[]> {
if (!patterns.length) {
return await this.repository.find(filters, page, projection);
}
const values = await this.nlpValueService.findByPatterns(patterns);
if (!values.length) {
return [];
}
return await this.repository.findByEntities(
{
filters,
values,
},
page,
projection,
);
}
/**
* Same as `findByPatterns`, but also populates all relations declared
* in the repository (`populatePaths`).
*
* @param criteria `{ filters, patterns }`
* @param page Optional paging / sorting descriptor.
* @param projection Optional Mongo projection.
* @returns Promise resolving to the populated samples.
*/
async findByPatternsAndPopulate(
{
filters,
patterns,
}: {
filters: TFilterQuery<NlpSample>;
patterns: NlpValueMatchPattern[];
},
page?: PageQueryDto<NlpSample>,
projection?: ProjectionType<NlpSample>,
): Promise<NlpSampleFull[]> {
if (!patterns.length) {
return await this.repository.findAndPopulate(filters, page, projection);
}
const values = await this.nlpValueService.findByPatterns(patterns);
if (!values.length) {
return [];
}
return await this.repository.findByEntitiesAndPopulate(
{
filters,
values,
},
page,
projection,
);
}
/**
* Count how many samples satisfy `filters` and reference any entity / value
* present in `patterns`.
*
* @param param0 `{ filters, patterns }`
* @returns Promise resolving to the count.
*/
async countByPatterns({
filters,
patterns,
}: {
filters: TFilterQuery<NlpSample>;
patterns: NlpValueMatchPattern[];
}): Promise<number> {
if (!patterns.length) {
return await this.repository.count(filters);
}
const values = await this.nlpValueService.findByPatterns(patterns);
if (!values.length) {
return 0;
}
return await this.repository.countByEntities({
filters,
values,
});
}
/**
* Fetches the samples and entities for a given sample type.
*

View File

@@ -8,6 +8,7 @@
import { forwardRef, Inject, Injectable } from '@nestjs/common';
import { NlpValueMatchPattern } from '@/chat/schemas/types/pattern';
import { DeleteResult } from '@/utils/generics/base-repository';
import { BaseService } from '@/utils/generics/base-service';
import { PageQueryDto } from '@/utils/pagination/pagination-query.dto';
@@ -42,6 +43,20 @@ export class NlpValueService extends BaseService<
super(repository);
}
/**
* Fetch values whose `value` field matches the patterns provided.
*
* @param patterns Pattern list
* @returns Promise resolving to the matching values.
*/
async findByPatterns(patterns: NlpValueMatchPattern[]) {
return await this.find({
value: {
$in: patterns.map((p) => p.value),
},
});
}
/**
* Deletes an NLP value by its ID, cascading any dependent data.
*

View File

@@ -65,7 +65,6 @@ export class NlpService {
* Handles the event triggered when a new NLP entity is created. Synchronizes the entity with the external NLP provider.
*
* @param entity - The NLP entity to be created.
* @returns The updated entity after synchronization.
*/
@OnEvent('hook:nlpEntity:create')
async handleEntityCreate(entity: NlpEntityDocument) {
@@ -74,7 +73,7 @@ export class NlpService {
const helper = await this.helperService.getDefaultHelper(HelperType.NLU);
const foreignId = await helper.addEntity(entity);
this.logger.debug('New entity successfully synced!', foreignId);
return await this.nlpEntityService.updateOne(
await this.nlpEntityService.updateOne(
{ _id: entity._id },
{
foreign_id: foreignId,
@@ -82,7 +81,6 @@ export class NlpService {
);
} catch (err) {
this.logger.error('Unable to sync a new entity', err);
return entity;
}
}
@@ -129,8 +127,6 @@ export class NlpService {
* Handles the event triggered when a new NLP value is created. Synchronizes the value with the external NLP provider.
*
* @param value - The NLP value to be created.
*
* @returns The updated value after synchronization.
*/
@OnEvent('hook:nlpValue:create')
async handleValueCreate(value: NlpValueDocument) {
@@ -139,7 +135,7 @@ export class NlpService {
const helper = await this.helperService.getDefaultNluHelper();
const foreignId = await helper.addValue(value);
this.logger.debug('New value successfully synced!', foreignId);
return await this.nlpValueService.updateOne(
await this.nlpValueService.updateOne(
{ _id: value._id },
{
foreign_id: foreignId,
@@ -147,7 +143,6 @@ export class NlpService {
);
} catch (err) {
this.logger.error('Unable to sync a new value', err);
return value;
}
}

View File

@@ -10,7 +10,7 @@ import { ChannelEvent } from '@/channel/lib/EventWrapper';
import { BlockCreateDto } from '@/chat/dto/block.dto';
import { Block } from '@/chat/schemas/block.schema';
import { Conversation } from '@/chat/schemas/conversation.schema';
import { SettingCreateDto } from '@/setting/dto/setting.dto';
import { ExtensionSetting } from '@/setting/schemas/types';
export type PluginName = `${string}-plugin`;
@@ -23,7 +23,7 @@ export interface CustomBlocks {}
type BlockAttrs = Partial<BlockCreateDto> & { name: string };
export type PluginSetting = Omit<SettingCreateDto, 'weight'>;
export type PluginSetting = ExtensionSetting;
export type PluginBlockTemplate = Omit<
BlockAttrs,

View File

@@ -6,6 +6,8 @@
* 2. All derivative works must include clear attribution to the original creator and software, Hexastack and Hexabot, in a prominent location (e.g., in the software's "About" section, documentation, and README file).
*/
import { BaseSchema } from '@/utils/generics/base-schema';
import { Setting } from './setting.schema';
export enum SettingType {
@@ -128,3 +130,17 @@ export type AnySetting =
| MultipleAttachmentSetting;
export type SettingDict = { [group: string]: Setting[] };
export type ExtensionSetting<
E extends object = object,
U extends AnySetting = AnySetting,
K extends keyof BaseSchema = keyof BaseSchema,
> = U extends any
? {
[P in keyof U as P extends K
? never
: U[P] extends never
? never
: P]: U[P];
} & E
: never;

View File

@@ -50,6 +50,20 @@ export const DEFAULT_SETTINGS = [
},
weight: 3,
},
{
group: 'chatbot_settings',
label: 'default_flow_escape_helper',
value: '',
type: SettingType.select,
config: {
multiple: false,
allowCreate: false,
entity: 'Helper',
idKey: 'name',
labelKey: 'name',
},
weight: 3,
},
{
group: 'chatbot_settings',
label: 'default_storage_helper',

View File

@@ -19,6 +19,7 @@ import {
FlattenMaps,
HydratedDocument,
Model,
PipelineStage,
ProjectionType,
Query,
SortOrder,
@@ -31,10 +32,12 @@ import { LoggerService } from '@/logger/logger.service';
import {
TFilterQuery,
TFlattenOption,
THydratedDocument,
TQueryOptions,
} from '@/utils/types/filter.types';
import { flatten } from '../helpers/flatten';
import { camelCase } from '../helpers/misc';
import { PageQueryDto, QuerySortDto } from '../pagination/pagination-query.dto';
import { DtoAction, DtoConfig, DtoInfer } from '../types/dto.types';
@@ -81,9 +84,13 @@ export abstract class BaseRepository<
U extends Omit<T, keyof BaseSchema> = Omit<T, keyof BaseSchema>,
D = Document<T>,
> {
private readonly transformOpts = { excludePrefixes: ['_', 'password'] };
protected readonly transformOpts = { excludePrefixes: ['_', 'password'] };
private readonly leanOpts = { virtuals: true, defaults: true, getters: true };
protected readonly leanOpts = {
virtuals: true,
defaults: true,
getters: true,
};
@Inject(EventEmitter2)
readonly eventEmitter: EventEmitter2;
@@ -94,21 +101,56 @@ export abstract class BaseRepository<
constructor(
readonly model: Model<T>,
private readonly cls: new () => T,
protected readonly populate: P[] = [],
protected readonly populatePaths: P[] = [],
protected readonly clsPopulate?: new () => TFull,
) {
this.registerLifeCycleHooks();
}
/**
* Determine whether at least one of the requested populate paths
* is supported by the repository.
*
* @param populate Array of path strings supplied by the caller.
* @returns `true` if any item appears in `this.populatePaths`, else `false`.
*/
canPopulate(populate: string[]): boolean {
return populate.some((p) => this.populate.includes(p as P));
return populate.some((p) => this.populatePaths.includes(p as P));
}
/**
* Build the canonical event name used by the repositorys event-bus hooks.
*
* Format: `hook:<entity>:<suffix>`
* where `<entity>` is the lower-cased class name and `<suffix>` is an
* `EHook` value such as `"preCreate"` or `"postUpdate"`.
*
* @param suffix Lifecycle-hook suffix.
* @returns A type-safe event name string.
*/
getEventName(suffix: EHook) {
const entity = this.cls.name.toLocaleLowerCase();
const entity = camelCase(this.cls.name);
return `hook:${entity}:${suffix}` as `hook:${IHookEntities}:${TNormalizedEvents}`;
}
/**
* Wire all Mongoose lifecycle hooks to the repositorys overridable
* `pre-/post-*` methods **and** to the domain event bus.
*
* For the current repository (`this.cls.name`) the method:
* 1. Retrieves the hook definitions from `LifecycleHookManager`.
* 2. Registers handlers for:
* • `validate.pre / validate.post` → `preCreateValidate` / `postCreateValidate`
* • `save.pre / save.post` → `preCreate` / `postCreate`
* • `deleteOne.* deleteMany.*` → `preDelete` / `postDelete`
* • `findOneAndUpdate.*` → `preUpdate` / `postUpdate`
* • `updateMany.*` → `preUpdateMany` / `postUpdateMany`
* 3. Emits the corresponding domain events (`EHook.*`) via `eventEmitter`
* after each repository callback.
*
* If no hooks are registered for the current class, a console warning is
* issued and the method exits gracefully.
*/
private registerLifeCycleHooks(): void {
const repository = this;
const hooks = LifecycleHookManager.getHooks(this.cls.name);
@@ -252,6 +294,19 @@ export abstract class BaseRepository<
});
}
/**
* Execute a `find`-style query, convert each lean result to `cls`, and return
* the transformed list.
*
* - The query is run with `lean(this.leanOpts)` for performance.
* - Each plain object is passed through `plainToClass` using
* `this.transformOpts`.
*
* @template R Result type typically the populated or base DTO class.
* @param query Mongoose query returning an array of documents.
* @param cls Constructor used by `plainToClass` for transformation.
* @returns Promise resolving to an array of class instances.
*/
protected async execute<R extends Omit<T, P>>(
query: Query<T[], T>,
cls: new () => R,
@@ -260,6 +315,19 @@ export abstract class BaseRepository<
return resultSet.map((doc) => plainToClass(cls, doc, this.transformOpts));
}
/**
* Execute a single-document query, convert the result to `cls`,
* and return it (or `null`).
*
* - Uses `lean(this.leanOpts)` for performance.
* - Falls back to `this.transformOpts` when `options` is not provided.
*
* @template R Result type typically the populated or base DTO class.
* @param query Mongoose query expected to return one document.
* @param cls Constructor used by `plainToClass`.
* @param options Optional `ClassTransformOptions` overriding defaults.
* @returns Promise resolving to a class instance or `null`.
*/
protected async executeOne<R extends Omit<T, P>>(
query: Query<T | null, T>,
cls: new () => R,
@@ -269,6 +337,18 @@ export abstract class BaseRepository<
return plainToClass(cls, doc, options ?? this.transformOpts);
}
/**
* Build a `findOne`/`findById` query.
*
* - `criteria` may be an `_id` string or any Mongo filter;
* an empty / falsy value is **not allowed** (throws).
* - Optional `projection` is forwarded unchanged.
*
* @param criteria Document `_id` **or** Mongo filter.
* @param projection Optional Mongo projection.
* @throws Error when `criteria` is empty.
* @returns Un-executed Mongoose query.
*/
protected findOneQuery(
criteria: string | TFilterQuery<T>,
projection?: ProjectionType<T>,
@@ -283,11 +363,23 @@ export abstract class BaseRepository<
: this.model.findOne<HydratedDocument<T>>(criteria, projection);
}
/**
* Retrieve a single document and convert it to `this.cls`.
*
* - Returns `null` immediately when `criteria` is falsy.
* - Optional `options` are passed to `plainToClass`.
* - Optional `projection` limits returned fields.
*
* @param criteria Document `_id` **or** Mongo filter.
* @param options Class-transform options.
* @param projection Optional Mongo projection.
* @returns Promise resolving to the found entity or `null`.
*/
async findOne(
criteria: string | TFilterQuery<T>,
options?: ClassTransformOptions,
projection?: ProjectionType<T>,
) {
): Promise<T | null> {
if (!criteria) {
// @TODO : Issue a warning ?
return null;
@@ -297,13 +389,23 @@ export abstract class BaseRepository<
return await this.executeOne(query, this.cls, options);
}
/**
* Retrieve a single document with all `populatePaths` relations resolved.
*
* - Throws if population is not configured.
* - Returns `null` when nothing matches `criteria`.
*
* @param criteria Document `_id` **or** Mongo filter.
* @param projection Optional Mongo projection.
* @returns Promise resolving to the populated entity or `null`.
*/
async findOneAndPopulate(
criteria: string | TFilterQuery<T>,
projection?: ProjectionType<T>,
): Promise<TFull | null> {
this.ensureCanPopulate();
const query = this.findOneQuery(criteria, projection).populate(
this.populate,
this.populatePaths,
);
return await this.executeOne(query, this.clsPopulate!);
}
@@ -323,6 +425,17 @@ export abstract class BaseRepository<
projection?: ProjectionType<T>,
): Query<T[], T, object, T, 'find', object>;
/**
* Build an un-executed `find` query with optional pagination, sorting,
* and projection.
*
* The returned query can be further chained or passed to `execute`.
*
* @param filter Mongo selector for the documents.
* @param pageQuery Sort tuple **or** paging object (optional).
* @param projection Mongo projection (optional).
* @returns A Mongoose `find` query with `skip`, `limit`, and `sort` applied.
*/
protected findQuery(
filter: TFilterQuery<T>,
pageQuery?: QuerySortDto<T> | PageQueryDto<T>,
@@ -360,6 +473,20 @@ export abstract class BaseRepository<
projection?: ProjectionType<T>,
): Promise<T[]>;
/**
* Find documents matching `filter`.
*
* - `pageQuery` may be:
* * a **sort descriptor** (`QuerySortDto`) an array of `[field, dir]`
* * a **paging object** (`PageQueryDto`) `{ limit, skip, sort }`
* - Optional `projection` is forwarded to `findQuery`.
* - Delegates execution to `this.execute`, mapping raw docs to `this.cls`.
*
* @param filter Mongo filter selecting documents.
* @param pageQuery Sort descriptor **or** paging object.
* @param projection Optional Mongo projection.
* @returns Promise resolving to the found documents.
*/
async find(
filter: TFilterQuery<T>,
pageQuery?: QuerySortDto<T> | PageQueryDto<T>,
@@ -374,8 +501,16 @@ export abstract class BaseRepository<
return await this.execute(query, this.cls);
}
/**
* Ensure that population is possible for the current repository.
*
* Throws when either `populatePaths` or `clsPopulate` is not configured,
* preventing accidental calls to population-aware methods.
*
* @throws Error if population cannot be performed.
*/
private ensureCanPopulate(): void {
if (!this.populate || !this.clsPopulate) {
if (!this.populatePaths || !this.clsPopulate) {
throw new Error('Cannot populate query');
}
}
@@ -395,6 +530,20 @@ export abstract class BaseRepository<
projection?: ProjectionType<T>,
): Promise<TFull[]>;
/**
* Find documents that match `filters` and return them with the relations
* in `populatePaths` resolved.
*
* - `pageQuery` can be either a sort descriptor (`QuerySortDto`) or a full
* paging object (`PageQueryDto`).
* - Optional `projection` is forwarded to `findQuery`.
* - Throws if the repository is not configured for population.
*
* @param filters Mongo filter.
* @param pageQuery Sort or paging information.
* @param projection Optional Mongo projection.
* @returns Promise resolving to the populated documents.
*/
async findAndPopulate(
filters: TFilterQuery<T>,
pageQuery?: QuerySortDto<T> | PageQueryDto<T>,
@@ -403,30 +552,51 @@ export abstract class BaseRepository<
this.ensureCanPopulate();
if (Array.isArray(pageQuery)) {
const query = this.findQuery(filters, pageQuery, projection).populate(
this.populate,
this.populatePaths,
);
return await this.execute(query, this.clsPopulate!);
}
const query = this.findQuery(filters, pageQuery, projection).populate(
this.populate,
this.populatePaths,
);
return await this.execute(query, this.clsPopulate!);
}
/**
* Build an un-executed query that selects **all** documents,
* applies `sort`, and disables pagination (`limit` / `skip` = 0).
*
* @param sort Optional sort descriptor.
* @returns Mongoose `find` query.
*/
protected findAllQuery(
sort?: QuerySortDto<T>,
): Query<T[], T, object, T, 'find', object> {
return this.findQuery({}, { limit: 0, skip: 0, sort });
}
/**
* Retrieve every document in the collection, optionally sorted.
*
* @param sort Optional sort descriptor.
* @returns Promise resolving to the documents.
*/
async findAll(sort?: QuerySortDto<T>): Promise<T[]> {
return await this.find({}, { limit: 0, skip: 0, sort });
}
/**
* Retrieve every document with all `populatePaths` relations resolved.
*
* - Throws if population is not configured.
*
* @param sort Optional sort descriptor.
* @returns Promise resolving to the populated documents.
*/
async findAllAndPopulate(sort?: QuerySortDto<T>): Promise<TFull[]> {
this.ensureCanPopulate();
const query = this.findAllQuery(sort).populate(this.populate);
const query = this.findAllQuery(sort).populate(this.populatePaths);
return await this.execute(query, this.clsPopulate!);
}
@@ -463,19 +633,43 @@ export abstract class BaseRepository<
): Promise<TFull[]> {
this.ensureCanPopulate();
const query = this.findPageQuery(filters, pageQuery).populate(
this.populate,
this.populatePaths,
);
return await this.execute(query, this.clsPopulate!);
}
/**
* Return the total number of documents in the collection
* (uses MongoDBs `estimatedDocumentCount` for speed).
*
* @returns Promise resolving to the estimated document count.
*/
async countAll(): Promise<number> {
return await this.model.estimatedDocumentCount().exec();
}
/**
* Count documents that match the given criteria
* (falls back to all documents when `criteria` is omitted).
*
* @param criteria Optional Mongo filter.
* @returns Promise resolving to the exact document count.
*/
async count(criteria?: TFilterQuery<T>): Promise<number> {
return await this.model.countDocuments(criteria).exec();
}
/**
* Persist a single document and return it as an instance of `this.cls`.
*
* Internally:
* 1. `model.create()` inserts the raw DTO.
* 2. The Mongoose document is converted to a plain object with `leanOpts`.
* 3. `plainToClass()` transforms that object into the domain class.
*
* @param dto Data-transfer object describing the new record.
* @returns A hydrated instance of the domain class.
*/
async create(dto: DtoInfer<DtoAction.Create, Dto, U>): Promise<T> {
const doc = await this.model.create(dto);
@@ -486,6 +680,12 @@ export abstract class BaseRepository<
);
}
/**
* Persist an array of documents at once and map each result to `this.cls`.
*
* @param dtoArray Array of DTOs to insert.
* @returns Array of domain-class instances in the same order as `dtoArray`.
*/
async createMany(
dtoArray: DtoInfer<DtoAction.Create, Dto, U>[],
): Promise<T[]> {
@@ -496,6 +696,21 @@ export abstract class BaseRepository<
);
}
/**
* Update a **single** document and return the modified version.
*
* Behaviour :
* - `criteria` may be an `_id` string or any Mongo filter object.
* - `dto` is applied via `$set`; when `options.shouldFlatten` is true the
* payload is flattened (e.g. `"a.b": value`) before the update.
* - Fires the `pre|postUpdateValidate` hooks + events.
* - Throws if nothing matches the criteria or if `dto` is empty.
*
* @param criteria `_id` or filter selecting the target document.
* @param dto Partial update payload.
* @param options `new`, `upsert`, `shouldFlatten`, … (forwarded to Mongoose).
* @returns The updated document (with `new: true` by default).
*/
async updateOne<D extends Partial<U>>(
criteria: string | TFilterQuery<T>,
dto: UpdateQuery<DtoInfer<DtoAction.Update, Dto, D>>,
@@ -544,6 +759,18 @@ export abstract class BaseRepository<
return result;
}
/**
* Update **many** documents at once.
*
* - Applies `$set` with the supplied `dto`.
* - When `options.shouldFlatten` is true, flattens the payload first.
* - Does **not** run the validation / event hooks (use `updateOne` for that).
*
* @param filter Mongo filter selecting the documents to update.
* @param dto Update payload.
* @param options `{ shouldFlatten?: boolean }`.
* @returns Promise that resolves a MongoDB `UpdateWriteOpResult` describing the operation outcome.
*/
async updateMany<D extends Partial<U>>(
filter: TFilterQuery<T>,
dto: UpdateQuery<D>,
@@ -554,6 +781,17 @@ export abstract class BaseRepository<
});
}
/**
* Remove **one** document, unless it is marked as `builtin: true`.
*
* If `criteria` is a string, it is treated as the documents `_id`;
* otherwise it is used as a full Mongo filter.
* The filter is automatically augmented with `{ builtin: { $ne: true } }`
* to protect built-in records from deletion.
*
* @param criteria Document `_id` or Mongo filter.
* @returns Promise that resolves to Mongos `DeleteResult`.
*/
async deleteOne(criteria: string | TFilterQuery<T>): Promise<DeleteResult> {
const filter = typeof criteria === 'string' ? { _id: criteria } : criteria;
@@ -562,10 +800,25 @@ export abstract class BaseRepository<
.exec();
}
/**
* Remove **many** documents that match `criteria`, excluding those flagged
* with `builtin: true`.
*
* @param criteria Mongo filter describing the set to delete.
* @returns Promise that resolves to Mongos `DeleteResult`.
*/
async deleteMany(criteria: TFilterQuery<T>): Promise<DeleteResult> {
return await this.model.deleteMany({ ...criteria, builtin: { $ne: true } });
}
/**
* Runs *before* create-validation logic.
* Override to perform domain-specific checks; throw to abort.
*
* @param _doc The document that will be created.
* @param _filterCriteria Optional additional criteria (e.g. conditional create).
* @param _updates Optional update pipeline when upserting.
*/
async preCreateValidate(
_doc: HydratedDocument<T>,
_filterCriteria?: FilterQuery<T>,
@@ -574,10 +827,23 @@ export abstract class BaseRepository<
// Nothing ...
}
/**
* Called *after* create-validation passes,
* but before persistence. Override for side-effects (audit logs, events, …).
*
* @param _validated The validated (not yet saved) document.
*/
async postCreateValidate(_validated: HydratedDocument<T>): Promise<void> {
// Nothing ...
}
/**
* Runs *before* validating a single-document update.
* Override to enforce custom rules; throw to abort.
*
* @param _filterCriteria Query criteria used to locate the document.
* @param _updates Update payload or aggregation pipeline.
*/
async preUpdateValidate(
_filterCriteria: FilterQuery<T>,
_updates: UpdateWithAggregationPipeline | UpdateQuery<T>,
@@ -585,6 +851,13 @@ export abstract class BaseRepository<
// Nothing ...
}
/**
* Called *after* an update payload is validated,
* just before it is applied.
*
* @param _filterCriteria Same criteria passed to the update.
* @param _updates The validated update payload.
*/
async postUpdateValidate(
_filterCriteria: FilterQuery<T>,
_updates: UpdateWithAggregationPipeline | UpdateQuery<T>,
@@ -592,14 +865,33 @@ export abstract class BaseRepository<
// Nothing ...
}
/**
* Rxecutes immediately before persisting a new document.
* Use to inject defaults, timestamps, or derive fields.
*
* @param _doc The document about to be saved.
*/
async preCreate(_doc: HydratedDocument<T>): Promise<void> {
// Nothing ...
}
/**
* Fires right after a document is saved.
* Useful for emitting events or refreshing caches.
*
* @param _created The newly created document.
*/
async postCreate(_created: HydratedDocument<T>): Promise<void> {
// Nothing ...
}
/**
* Runs before a `findOneAndUpdate` operation.
*
* @param _query The Mongoose query object.
* @param _criteria Original filter criteria.
* @param _updates Update payload or pipeline.
*/
async preUpdate(
_query: Query<D, D, unknown, T, 'findOneAndUpdate'>,
_criteria: TFilterQuery<T>,
@@ -608,6 +900,13 @@ export abstract class BaseRepository<
// Nothing ...
}
/**
* Runs before an `updateMany` operation.
*
* @param _query The Mongoose query object.
* @param _criteria Filter criteria.
* @param _updates Update payload or pipeline.
*/
async preUpdateMany(
_query: Query<D, D, unknown, T, 'updateMany'>,
_criteria: TFilterQuery<T>,
@@ -616,6 +915,12 @@ export abstract class BaseRepository<
// Nothing ...
}
/**
* Fires after an `updateMany` completes.
*
* @param _query The originating query.
* @param _updated Mongoose result object.
*/
async postUpdateMany(
_query: Query<D, D, unknown, T, 'updateMany'>,
_updated: any,
@@ -623,6 +928,12 @@ export abstract class BaseRepository<
// Nothing ...
}
/**
* Fires after a `findOneAndUpdate` completes.
*
* @param _query The originating query.
* @param _updated The updated document.
*/
async postUpdate(
_query: Query<D, D, unknown, T, 'findOneAndUpdate'>,
_updated: T,
@@ -630,6 +941,12 @@ export abstract class BaseRepository<
// Nothing ...
}
/**
* Runs before a `deleteOne` or `deleteMany`.
*
* @param _query The Mongoose query object.
* @param _criteria Filter criteria.
*/
async preDelete(
_query: Query<DeleteResult, D, unknown, T, 'deleteOne' | 'deleteMany'>,
_criteria: TFilterQuery<T>,
@@ -637,10 +954,73 @@ export abstract class BaseRepository<
// Nothing ...
}
/**
* Fires after a `deleteOne` or `deleteMany` completes.
*
* @param _query The originating query.
* @param _result MongoDB `DeleteResult`.
*/
async postDelete(
_query: Query<DeleteResult, D, unknown, T, 'deleteOne' | 'deleteMany'>,
_result: DeleteResult,
): Promise<void> {
// Nothing ...
}
/**
* Translate a `PageQueryDto` into MongoDB aggregation stages.
*
* Creates, in order:
* 1. **$sort** when `page.sort` is provided. Accepts `1 | -1 | 'asc' | 'desc'`
* (plus `'ascending' | 'descending'`) and normalises them to `1` or `-1`.
* 2. **$skip** when `page.skip` > 0.
* 3. **$limit** when `page.limit` > 0.
*
* If `page` is omitted, an empty array is returned so callers can safely
* spread the result into a pipeline without extra checks.
*
* @param page Optional pagination/sort descriptor.
* @returns Array of `$sort`, `$skip`, and `$limit` stages in the correct order.
*/
buildPaginationPipelineStages<T>(page?: PageQueryDto<T>): PipelineStage[] {
if (!page) return [];
const stages: PipelineStage[] = [];
if (page.sort) {
const [field, dir] = page.sort;
stages.push({
$sort: {
[field]:
typeof dir === 'number'
? dir
: ['asc', 'ascending'].includes(dir as string)
? 1
: -1,
} as Record<string, 1 | -1>,
});
}
if (page.skip) stages.push({ $skip: page.skip });
if (page.limit) stages.push({ $limit: page.limit });
return stages;
}
/**
* Populates the provided Mongoose documents with the relations listed in
* `this.populatePaths`, returning lean (plain) objects.
*
* @param docs Hydrated documents to enrich.
* @returns Promise resolving to the populated docs.
*/
async populate(docs: THydratedDocument<T>[]) {
return await this.model.populate(
docs,
this.populatePaths.map((path) => ({
path,
options: { lean: true },
})),
);
}
}

View File

@@ -0,0 +1,59 @@
/*
* Copyright © 2025 Hexastack. All rights reserved.
*
* Licensed under the GNU Affero General Public License v3.0 (AGPLv3) with the following additional terms:
* 1. The name "Hexabot" is a trademark of Hexastack. You may not use this name in derivative works without express written permission.
* 2. All derivative works must include clear attribution to the original creator and software, Hexastack and Hexabot, in a prominent location (e.g., in the software's "About" section, documentation, and README file).
*/
import {
ArgumentMetadata,
BadRequestException,
Injectable,
PipeTransform,
} from '@nestjs/common';
import { ZodError, ZodTypeAny } from 'zod';
/**
* Validates a single query-parameter with a given Zod schema.
*
* @example
* // Controller usage
* @Get()
* listUsers(
* @Query(new ZodQueryParamPipe(z.coerce.number().int().min(1))) query: any,
* ) {
* // query.page is guaranteed to be a positive integer number
* }
*/
@Injectable()
export class ZodQueryParamPipe implements PipeTransform {
constructor(
private readonly schema: ZodTypeAny,
private readonly accessor?: (query: any) => any,
) {}
async transform(query: any, metadata: ArgumentMetadata) {
const payload = this.accessor ? this.accessor(query) : query;
// We care only about query params
if (typeof payload === 'undefined' || metadata.type !== 'query') {
return payload;
}
const parsed = this.schema.safeParse(payload);
if (!parsed.success) {
// Optionally format the error for client readability
const error = parsed.error as ZodError;
throw new BadRequestException({
statusCode: 400,
error: 'Bad Request',
message: `Validation failed for query param`,
details: error.flatten(),
});
}
// Return a new query object with the parsed value injected
return parsed.data;
}
}

View File

@@ -64,7 +64,7 @@ const contentTypes: TContentTypeFixtures['values'][] = [
},
{
name: 'subtitle',
label: 'Image',
label: 'Subtitle',
type: FieldType.file,
},
],

View File

@@ -1,5 +1,5 @@
/*
* Copyright © 2024 Hexastack. All rights reserved.
* Copyright © 2025 Hexastack. All rights reserved.
*
* Licensed under the GNU Affero General Public License v3.0 (AGPLv3) with the following additional terms:
* 1. The name "Hexabot" is a trademark of Hexastack. You may not use this name in derivative works without express written permission.
@@ -28,7 +28,7 @@ export const nlpSampleEntityFixtures: NlpSampleEntityCreateDto[] = [
{
sample: '2',
entity: '0',
value: '2',
value: '3',
},
{
sample: '3',

View File

@@ -18,6 +18,7 @@ import { OutgoingMessageFormat } from '@/chat/schemas/types/message';
import { BlockOptions, ContentOptions } from '@/chat/schemas/types/options';
import { NlpPattern, Pattern } from '@/chat/schemas/types/pattern';
import { QuickReplyType } from '@/chat/schemas/types/quick-reply';
import { WEB_CHANNEL_NAME } from '@/extensions/channels/web/settings';
import { modelInstance } from './misc';
@@ -391,3 +392,10 @@ export const blockCarouselMock = {
} as unknown as BlockFull;
export const blocks: BlockFull[] = [blockGetStarted, blockEmpty];
export const mockWebChannelData: SubscriberChannelDict[typeof WEB_CHANNEL_NAME] =
{
isSocket: true,
ipAddress: '1.1.1.1',
agent: 'Chromium',
};

View File

@@ -1,7 +1,7 @@
{
"name": "hexabot-ui",
"private": true,
"version": "2.2.8",
"version": "2.2.9",
"description": "Hexabot is a solution for creating and managing chatbots across multiple channels, leveraging AI for advanced conversational capabilities. It provides a user-friendly interface for building, training, and deploying chatbots with integrated support for various messaging platforms.",
"author": "Hexastack",
"license": "AGPL-3.0-only",

View File

@@ -9,7 +9,8 @@
"default_nlu_helper": "Default NLU Helper",
"default_llm_helper": "Default LLM Helper",
"default_storage_helper": "Default Storage Helper",
"default_nlu_penalty_factor": "NLU Penalty Factor"
"default_nlu_penalty_factor": "NLU Penalty Factor",
"default_flow_escape_helper": "Default Flow Escape Helper"
},
"help": {
"global_fallback": "Global fallback allows you to send custom messages when user entry does not match any of the block messages.",
@@ -17,6 +18,7 @@
"default_nlu_helper": "The NLU helper is responsible for processing and understanding user inputs, including tasks like intent prediction, language detection, and entity recognition.",
"default_llm_helper": "The LLM helper leverages advanced generative AI to perform tasks such as text generation, chat completion, and complex query responses.",
"default_storage_helper": "The storage helper defines where to store attachment files. By default, the default local storage helper stores them locally, but you can choose to use Minio or any other storage solution.",
"default_nlu_penalty_factor": "The NLU penalty factor is a coefficient (between 0 and 1) applied exclusively to NLU-based entity matching. It reduces the score contribution of patterns that match broadly (e.g. using wildcard values like Any) rather than specific entity values. This helps the engine prioritize blocks triggered by more precise NLU matches, without affecting other matching strategies such as text, regex, or interaction triggers."
"default_nlu_penalty_factor": "The NLU penalty factor is a coefficient (between 0 and 1) applied exclusively to NLU-based entity matching. It reduces the score contribution of patterns that match broadly (e.g. using wildcard values like Any) rather than specific entity values. This helps the engine prioritize blocks triggered by more precise NLU matches, without affecting other matching strategies such as text, regex, or interaction triggers.",
"default_flow_escape_helper": "The Flow Escape helper is used when the users message does not match any option in a flow. It assists the chatbot in deciding whether to re-prompt, provide an explanation, or end the conversation."
}
}

View File

@@ -491,7 +491,7 @@
"original_text": "Original Text",
"inputs": "Inputs",
"outputs": "Outputs",
"any": "- Any -",
"any": "Any",
"full_name": "First and last name",
"password": "Password"
},

View File

@@ -9,7 +9,8 @@
"default_nlu_helper": "Utilitaire NLU par défaut",
"default_llm_helper": "Utilitaire LLM par défaut",
"default_storage_helper": "Utilitaire de stockage par défaut",
"default_nlu_penalty_factor": "Facteur de pénalité NLU"
"default_nlu_penalty_factor": "Facteur de pénalité NLU",
"default_flow_escape_helper": "Utilitaire de secours de flux par défaut"
},
"help": {
"global_fallback": "La réponse de secours globale vous permet d'envoyer des messages personnalisés lorsque l'entrée de l'utilisateur ne correspond à aucun des messages des blocs.",
@@ -17,6 +18,7 @@
"default_nlu_helper": "Utilitaire du traitement et de la compréhension des entrées des utilisateurs, incluant des tâches telles que la prédiction d'intention, la détection de langue et la reconnaissance d'entités.",
"default_llm_helper": "Utilitaire responsable de l'intelligence artificielle générative avancée pour effectuer des tâches telles que la génération de texte, la complétion de chat et les réponses à des requêtes complexes.",
"default_storage_helper": "Utilitaire de stockage définit l'emplacement où stocker les fichiers joints. Par défaut, le stockage local les conserve localement, mais vous pouvez choisir d'utiliser Minio ou toute autre solution de stockage.",
"default_nlu_penalty_factor": "Le facteur de pénalité NLU est un coefficient (entre 0 et 1) appliqué exclusivement aux correspondances d'entités basées sur NLU. Il réduit la contribution au score des motifs qui correspondent de manière générale (par exemple, en utilisant des valeurs génériques comme Any) plutôt que des valeurs d'entité spécifiques. Cela permet au chatbot de donner la priorité aux blocs déclenchés par des correspondances NLU plus précises, sans affecter d'autres stratégies de correspondance telles que le texte, les expressions regex ou les déclencheurs d'interaction."
"default_nlu_penalty_factor": "Le facteur de pénalité NLU est un coefficient (entre 0 et 1) appliqué exclusivement aux correspondances d'entités basées sur NLU. Il réduit la contribution au score des motifs qui correspondent de manière générale (par exemple, en utilisant des valeurs génériques comme Any) plutôt que des valeurs d'entité spécifiques. Cela permet au chatbot de donner la priorité aux blocs déclenchés par des correspondances NLU plus précises, sans affecter d'autres stratégies de correspondance telles que le texte, les expressions regex ou les déclencheurs d'interaction.",
"default_flow_escape_helper": "Lutilitaire de secours de flux est utilisé lorsque le message de lutilisateur ne correspond à aucune option dans un scénario. Il aide le chatbot à décider sil faut reformuler la question, fournir une explication ou mettre fin à la conversation."
}
}

View File

@@ -492,7 +492,7 @@
"original_text": "Texte par défaut",
"inputs": "Ports d'entrée",
"outputs": "Ports de sortie",
"any": "- Toutes -",
"any": "Toutes",
"full_name": "Nom et Prénom",
"password": "Mot de passe"
},

View File

@@ -52,15 +52,9 @@ const AutoCompleteEntitySelect = <
Multiple extends boolean | undefined = true,
>(
{
label,
value,
entity,
format,
searchFields,
multiple,
onChange,
error,
helperText,
preprocess,
idKey = "id",
labelKey,
@@ -106,17 +100,11 @@ const AutoCompleteEntitySelect = <
return (
<AutoCompleteSelect<Value, Label, Multiple>
value={value}
onChange={onChange}
label={label}
multiple={multiple}
ref={ref}
idKey={idKey}
labelKey={labelKey}
options={options || []}
onSearch={onSearch}
error={error}
helperText={helperText}
loading={isFetching}
{...rest}
/>

View File

@@ -1,5 +1,5 @@
/*
* Copyright © 2024 Hexastack. All rights reserved.
* Copyright © 2025 Hexastack. All rights reserved.
*
* Licensed under the GNU Affero General Public License v3.0 (AGPLv3) with the following additional terms:
* 1. The name "Hexabot" is a trademark of Hexastack. You may not use this name in derivative works without express written permission.
@@ -17,7 +17,7 @@ import {
Typography,
useTheme,
} from "@mui/material";
import Autocomplete from "@mui/material/Autocomplete";
import Autocomplete, { AutocompleteProps } from "@mui/material/Autocomplete";
import { forwardRef, SyntheticEvent, useRef } from "react";
import { Input } from "@/app-components/inputs/Input";
@@ -30,13 +30,25 @@ import { NlpPattern } from "@/types/block.types";
import { INlpEntity } from "@/types/nlp-entity.types";
import { INlpValue } from "@/types/nlp-value.types";
type NlpPatternSelectProps = {
interface NlpPatternSelectProps
extends Omit<
AutocompleteProps<INlpEntity, true, true, false>,
| "onChange"
| "value"
| "options"
| "multiple"
| "disabled"
| "renderTags"
| "renderOptions"
| "renderInput"
> {
patterns: NlpPattern[];
onChange: (patterns: NlpPattern[]) => void;
};
noneLabel?: string;
}
const NlpPatternSelect = (
{ patterns, onChange }: NlpPatternSelectProps,
{ patterns, onChange, noneLabel = "", ...props }: NlpPatternSelectProps,
ref,
) => {
const inputRef = useRef(null);
@@ -80,23 +92,29 @@ const NlpPatternSelect = (
valueId: string,
): void => {
const newSelection = patterns.slice(0);
const update = newSelection.find(({ entity: e }) => e === name);
const idx = newSelection.findIndex(({ entity: e }) => e === name);
if (!update) {
if (idx === -1) {
throw new Error("Unable to find nlp entity");
}
if (valueId === id) {
update.match = "entity";
update.value = name;
newSelection[idx] = {
entity: newSelection[idx].entity,
match: "entity",
};
} else {
const value = getNlpValueFromCache(valueId);
if (!value) {
throw new Error("Unable to find nlp value in cache");
}
update.match = "value";
update.value = value.value;
newSelection[idx] = {
entity: newSelection[idx].entity,
match: "value",
value: value.value,
};
}
onChange(newSelection);
@@ -108,16 +126,17 @@ const NlpPatternSelect = (
);
}
const defaultValue =
options.filter(({ name }) =>
patterns.find(({ entity: entityName }) => entityName === name),
) || {};
const defaultValue = patterns
.map(({ entity: entityName }) =>
options.find(({ name }) => entityName === name),
)
.filter(Boolean) as INlpEntity[];
return (
<Autocomplete
ref={ref}
{...props}
size="medium"
fullWidth={true}
disabled={options.length === 0}
value={defaultValue}
multiple={true}
@@ -172,9 +191,9 @@ const NlpPatternSelect = (
const nlpValues = values.map((vId) =>
getNlpValueFromCache(vId),
) as INlpValue[];
const selectedValue = patterns.find(
(e) => e.entity === name,
)?.value;
const currentPattern = patterns.find((e) => e.entity === name);
const selectedValue =
currentPattern?.match === "value" ? currentPattern.value : null;
const { id: selectedId = id } =
nlpValues.find(({ value }) => value === selectedValue) || {};
@@ -193,7 +212,7 @@ const NlpPatternSelect = (
}
if (option === id) {
return t("label.any");
return `- ${noneLabel || t("label.any")} -`;
}
return option;

View File

@@ -14,7 +14,7 @@ import { forwardRef, useState } from "react";
import { Input } from "./Input";
export const PasswordInput = forwardRef<any, TextFieldProps>(
({ onChange, InputProps, ...rest }, ref) => {
({ InputProps, ...rest }, ref) => {
const [showPassword, setShowPassword] = useState(false);
const handleTogglePasswordVisibility = () => {
setShowPassword(!showPassword);
@@ -25,7 +25,6 @@ export const PasswordInput = forwardRef<any, TextFieldProps>(
ref={ref}
type={showPassword ? "text" : "password"}
{...rest}
onChange={onChange}
InputProps={{
...InputProps,
endAdornment: (

View File

@@ -75,7 +75,7 @@ export const DataGrid = <T extends GridValidRowModel = any>({
slots={slots}
slotProps={{
loadingOverlay: {
variant: "linear-progress",
variant: "skeleton",
noRowsVariant: "skeleton",
},
}}

View File

@@ -8,7 +8,7 @@
import AddIcon from "@mui/icons-material/Add";
import { Button } from "@mui/material";
import { FC, Fragment, useEffect } from "react";
import { FC, Fragment } from "react";
import { useFieldArray, useForm } from "react-hook-form";
import { ContentContainer, ContentItem } from "@/app-components/dialogs";
@@ -19,10 +19,14 @@ import { useToast } from "@/hooks/useToast";
import { useTranslate } from "@/hooks/useTranslate";
import { EntityType } from "@/services/types";
import { ComponentFormProps } from "@/types/common/dialogs.types";
import { ContentFieldType, IContentType } from "@/types/content-type.types";
import {
ContentFieldType,
IContentType,
IContentTypeAttributes,
} from "@/types/content-type.types";
import { FieldInput } from "./components/FieldInput";
import { FIELDS_FORM_DEFAULT_VALUES, READ_ONLY_FIELDS } from "./constants";
import { FIELDS_FORM_DEFAULT_VALUES } from "./constants";
export const ContentTypeForm: FC<ComponentFormProps<IContentType>> = ({
data: { defaultValues: contentType },
@@ -33,20 +37,36 @@ export const ContentTypeForm: FC<ComponentFormProps<IContentType>> = ({
const { toast } = useToast();
const { t } = useTranslate();
const {
reset,
control,
register,
setValue,
formState: { errors },
handleSubmit,
} = useForm<Partial<IContentType>>({
defaultValues: {
name: contentType?.name || "",
fields: contentType?.fields || FIELDS_FORM_DEFAULT_VALUES,
},
} = useForm<IContentType>({
defaultValues: contentType
? { name: contentType.name, fields: contentType.fields }
: {
name: "",
fields: FIELDS_FORM_DEFAULT_VALUES,
},
});
const { append, fields, remove } = useFieldArray({
name: "fields",
rules: {
validate: (fields) => {
const hasUniqueLabels =
new Set(fields.map((f) => f["label"] as string)).size ===
fields.length;
if (!hasUniqueLabels) {
toast.error(t("message.duplicate_labels_not_allowed"));
return false;
}
return true;
},
},
control,
});
const options = {
@@ -67,44 +87,14 @@ export const ContentTypeForm: FC<ComponentFormProps<IContentType>> = ({
EntityType.CONTENT_TYPE,
options,
);
const onSubmitForm = (params) => {
const labelCounts: Record<string, number> = params.fields.reduce(
(acc, field) => {
if (!field.label.trim()) return acc;
acc[field.label] = (acc[field.label] || 0) + 1;
return acc;
},
{} as Record<string, number>,
);
const hasDuplicates = Object.values(labelCounts).some(
(count: number) => count > 1,
);
if (hasDuplicates) {
toast.error(t("message.duplicate_labels_not_allowed"));
return;
}
if (contentType) {
const onSubmitForm = (params: IContentTypeAttributes) => {
if (contentType?.id) {
updateContentType({ id: contentType.id, params });
} else {
createContentType(params);
}
};
useEffect(() => {
if (contentType) {
reset({
name: contentType.name,
fields: contentType.fields || FIELDS_FORM_DEFAULT_VALUES,
});
} else {
reset({ name: "", fields: FIELDS_FORM_DEFAULT_VALUES });
}
}, [contentType, reset]);
return (
<Wrapper onSubmit={handleSubmit(onSubmitForm)} {...WrapperProps}>
<form onSubmit={handleSubmit(onSubmitForm)}>
@@ -121,20 +111,19 @@ export const ContentTypeForm: FC<ComponentFormProps<IContentType>> = ({
autoFocus
/>
</ContentItem>
{fields.map((f, index) => (
{fields.map((field, idx) => (
<ContentItem
key={f.id}
key={field.id}
display="flex"
justifyContent="space-between"
gap={2}
>
<FieldInput
setValue={setValue}
control={control}
idx={idx}
name={field.name}
remove={remove}
index={index}
disabled={READ_ONLY_FIELDS.includes(f.label as any)}
control={control}
setValue={setValue}
/>
</ContentItem>
))}
@@ -143,7 +132,11 @@ export const ContentTypeForm: FC<ComponentFormProps<IContentType>> = ({
startIcon={<AddIcon />}
variant="contained"
onClick={() =>
append({ label: "", name: "", type: ContentFieldType.TEXT })
append({
label: "",
name: "",
type: ContentFieldType.TEXT,
})
}
>
{t("button.add")}

View File

@@ -8,14 +8,8 @@
import DeleteOutlineIcon from "@mui/icons-material/DeleteOutline";
import { MenuItem } from "@mui/material";
import { useEffect } from "react";
import {
Control,
Controller,
UseFieldArrayRemove,
UseFormSetValue,
useWatch,
} from "react-hook-form";
import { useMemo } from "react";
import { Control, Controller, UseFormSetValue } from "react-hook-form";
import { IconButton } from "@/app-components/buttons/IconButton";
import { Input } from "@/app-components/inputs/Input";
@@ -23,26 +17,23 @@ import { useTranslate } from "@/hooks/useTranslate";
import { ContentFieldType, IContentType } from "@/types/content-type.types";
import { slugify } from "@/utils/string";
import { READ_ONLY_FIELDS } from "../constants";
export const FieldInput = ({
idx,
name,
remove,
control,
setValue,
index,
...props
}: {
index: number;
disabled?: boolean;
remove: UseFieldArrayRemove;
control: Control<Partial<IContentType>>;
setValue: UseFormSetValue<Partial<IContentType>>;
idx: number;
name: string;
remove: (index?: number | number[]) => void;
control: Control<IContentType>;
setValue: UseFormSetValue<IContentType>;
}) => {
const { t } = useTranslate();
const label = useWatch({
control: props.control,
name: `fields.${index}.label`,
});
useEffect(() => {
setValue(`fields.${index}.name`, label ? slugify(label) : "");
}, [label, setValue, index]);
const isDisabled = useMemo(() => idx < READ_ONLY_FIELDS.length, [idx]);
return (
<>
@@ -50,40 +41,46 @@ export const FieldInput = ({
variant="text"
color="error"
size="medium"
onClick={() => props.remove(index)}
disabled={props.disabled}
onClick={() => remove(idx)}
disabled={isDisabled}
>
<DeleteOutlineIcon strokeWidth={1} fontSize="medium" />
</IconButton>
<Controller
control={props.control}
name={`fields.${index}.label`}
control={control}
name={`fields.${idx}.label`}
rules={{ required: t("message.label_is_required") }}
render={({ field, fieldState }) => (
<Input
disabled={props.disabled}
disabled={isDisabled}
{...field}
label={t("label.label")}
error={!!fieldState.error}
helperText={fieldState.error?.message}
onChange={(e) => {
const value = e.target.value;
if (!name) {
setValue(`fields.${idx}.name`, value ? slugify(value) : "");
}
field.onChange(e);
}}
/>
)}
/>
<Controller
name={`fields.${index}.name`}
name={`fields.${idx}.name`}
render={({ field }) => (
<Input disabled {...field} label={t("label.name")} />
)}
control={props.control}
control={control}
/>
<Controller
name={`fields.${index}.type`}
control={props.control}
name={`fields.${idx}.type`}
control={control}
render={({ field }) => (
<Input
disabled={props.disabled}
disabled={isDisabled}
label={t("label.type")}
{...field}
select

View File

@@ -87,7 +87,7 @@ const componentMap: { [key in FileType]: FC<AttachmentInterface> } = {
<Box>
<Typography
component="span"
className="cs-message__text-content"
className="cs-message__custom-content"
mr={2}
>
{props.name}

View File

@@ -73,8 +73,9 @@ export const useInfiniteLiveSubscribers = (props: {
if (event.op === "newSubscriber") {
const { result } = normalizeAndCache(event.profile);
// Only update the unfiltered (all-subscribers) cache
queryClient.setQueryData(
[QueryType.infinite, EntityType.SUBSCRIBER, params],
[QueryType.infinite, EntityType.SUBSCRIBER, { where: {} }],
(oldData) => {
if (oldData) {
const data = oldData as InfiniteData<string[]>;

View File

@@ -20,7 +20,7 @@ div .cs-message--outgoing .cs-message__content {
background-color: var(--cs-message-outgoing-color) !important;
}
div .cs-message--outgoing .cs-message__text-content {
div .cs-message--outgoing .cs-message__custom-content {
color: var(--cs-message-outgoing-text-color) !important;
}

View File

@@ -32,6 +32,7 @@ import AutoCompleteEntitySelect from "@/app-components/inputs/AutoCompleteEntity
import FileUploadButton from "@/app-components/inputs/FileInput";
import { FilterTextfield } from "@/app-components/inputs/FilterTextfield";
import { Input } from "@/app-components/inputs/Input";
import NlpPatternSelect from "@/app-components/inputs/NlpPatternSelect";
import {
ActionColumnLabel,
getActionsColumn,
@@ -51,6 +52,7 @@ import { useSearch } from "@/hooks/useSearch";
import { useToast } from "@/hooks/useToast";
import { useTranslate } from "@/hooks/useTranslate";
import { EntityType, Format } from "@/services/types";
import { NlpPattern } from "@/types/block.types";
import { ILanguage } from "@/types/language.types";
import {
INlpDatasetSample,
@@ -79,6 +81,7 @@ export default function NlpSample() {
const queryClient = useQueryClient();
const [type, setType] = useState<NlpSampleType | "all">("all");
const [language, setLanguage] = useState<string | undefined>(undefined);
const [patterns, setPatterns] = useState<NlpPattern[]>([]);
const hasPermission = useHasPermission();
const getNlpEntityFromCache = useGetFromCache(EntityType.NLP_ENTITY);
const getNlpValueFromCache = useGetFromCache(EntityType.NLP_VALUE);
@@ -86,11 +89,17 @@ export default function NlpSample() {
EntityType.NLP_SAMPLE_ENTITY,
);
const getLanguageFromCache = useGetFromCache(EntityType.LANGUAGE);
const { onSearch, searchPayload, searchText } = useSearch<INlpSample>(
const { onSearch, searchPayload, searchText } = useSearch<
INlpSample & { patterns: NlpPattern[] }
>(
{
$eq: [
...(type !== "all" ? [{ type }] : []),
...(language ? [{ language }] : []),
// We send only value match patterns
...(patterns
? [{ patterns: patterns.filter(({ match }) => match === "value") }]
: []),
],
$iLike: ["text"],
},
@@ -212,6 +221,7 @@ export default function NlpSample() {
{row.entities
.map((e) => getSampleEntityFromCache(e) as INlpSampleEntity)
.filter((e) => !!e)
.sort((a, b) => String(a.entity).localeCompare(String(b.entity)))
.map((entity) => (
<ChipEntity
key={entity.id}
@@ -425,6 +435,21 @@ export default function NlpSample() {
</Button>
</ButtonGroup>
</Grid>
<Grid
container
display="flex"
flexDirection="row"
gap={2}
direction="row"
mt={2}
>
<NlpPatternSelect
patterns={patterns}
onChange={setPatterns}
fullWidth={true}
noneLabel={t("label.select")}
/>
</Grid>
</Grid>
<Grid mt={3}>

View File

@@ -20,8 +20,8 @@ import { PasswordInput } from "@/app-components/inputs/PasswordInput";
import { useTranslate } from "@/hooks/useTranslate";
import { EntityType, Format } from "@/services/types";
import { AttachmentResourceRef } from "@/types/attachment.types";
import { IEntityMapTypes } from "@/types/base.types";
import { IBlock } from "@/types/block.types";
import { IHelper } from "@/types/helper.types";
import { ISetting, SettingType } from "@/types/setting.types";
import { MIME_TYPES } from "@/utils/attachment";
@@ -32,6 +32,12 @@ interface RenderSettingInputProps {
isDisabled?: (setting: ISetting) => boolean;
}
const DEFAULT_HELPER_ENTITIES: Record<string, keyof IEntityMapTypes> = {
["default_nlu_helper"]: EntityType.NLU_HELPER,
["default_llm_helper"]: EntityType.LLM_HELPER,
["default_flow_escape_helper"]: EntityType.FLOW_ESCAPE_HELPER,
["default_storage_helper"]: EntityType.STORAGE_HELPER,
};
const SettingInput: React.FC<RenderSettingInputProps> = ({
setting,
field,
@@ -125,54 +131,25 @@ const SettingInput: React.FC<RenderSettingInputProps> = ({
{...rest}
/>
);
} else if (setting.label === "default_nlu_helper") {
} else if (
setting.label.startsWith("default_") &&
setting.label.endsWith("_helper")
) {
const { onChange, ...rest } = field;
return (
<AutoCompleteEntitySelect<IHelper, "name", false>
<AutoCompleteEntitySelect<any, string, boolean>
searchFields={["name"]}
entity={EntityType.NLU_HELPER}
entity={DEFAULT_HELPER_ENTITIES[setting.label]}
format={Format.BASIC}
labelKey="name"
idKey="name"
label={t("label.default_nlu_helper")}
helperText={t("help.default_nlu_helper")}
multiple={false}
onChange={(_e, selected, ..._) => onChange(selected?.name)}
{...rest}
/>
);
} else if (setting.label === "default_llm_helper") {
const { onChange, ...rest } = field;
return (
<AutoCompleteEntitySelect<IHelper, "name", false>
searchFields={["name"]}
entity={EntityType.LLM_HELPER}
format={Format.BASIC}
labelKey="name"
idKey="name"
label={t("label.default_llm_helper")}
helperText={t("help.default_llm_helper")}
multiple={false}
onChange={(_e, selected, ..._) => onChange(selected?.name)}
{...rest}
/>
);
} else if (setting.label === "default_storage_helper") {
const { onChange, ...rest } = field;
return (
<AutoCompleteEntitySelect<IHelper, "name", false>
searchFields={["name"]}
entity={EntityType.STORAGE_HELPER}
format={Format.BASIC}
labelKey="name"
idKey="name"
label={t("label.default_storage_helper")}
helperText={t("help.default_storage_helper")}
multiple={false}
onChange={(_e, selected, ..._) => onChange(selected?.name)}
labelKey={setting.config?.labelKey || "name"}
idKey={setting.config?.idKey || "name"}
label={label}
helperText={helperText}
multiple={!!setting.config?.multiple}
onChange={(_e, selected, ..._) =>
onChange(selected?.[setting.config?.idKey || "name"])
}
{...rest}
/>
);

View File

@@ -1,5 +1,5 @@
/*
* Copyright © 2024 Hexastack. All rights reserved.
* Copyright © 2025 Hexastack. All rights reserved.
*
* Licensed under the GNU Affero General Public License v3.0 (AGPLv3) with the following additional terms:
* 1. The name "Hexabot" is a trademark of Hexastack. You may not use this name in derivative works without express written permission.
@@ -63,17 +63,9 @@ const ButtonsMessageForm = () => {
name="message.buttons"
control={control}
defaultValue={block?.message.buttons || []}
render={({ field }) => {
const { value, onChange } = field;
return (
<ButtonsInput
fieldPath="message.buttons"
value={value}
onChange={onChange}
/>
);
}}
render={({ field }) => (
<ButtonsInput {...field} fieldPath="message.buttons" />
)}
/>
</ContentItem>
</>

View File

@@ -295,21 +295,14 @@ const ListMessageForm = () => {
name="options.content.buttons"
control={control}
defaultValue={content?.buttons || []}
render={({ field }) => {
const { value, onChange } = field;
return (
<ButtonsInput
fieldPath="options.content.buttons"
value={value}
onChange={(buttons) => {
onChange(buttons);
}}
disablePayload={true}
maxInput={displayMode === "list" ? 1 : 2}
/>
);
}}
render={({ field }) => (
<ButtonsInput
{...field}
fieldPath="options.content.buttons"
disablePayload={true}
maxInput={displayMode === "list" ? 1 : 2}
/>
)}
/>
</ContentItem>
</Grid>

View File

@@ -103,6 +103,7 @@ const PatternInput: FC<PatternInputProps> = ({
<NlpPatternSelect
patterns={pattern as NlpPattern[]}
onChange={setPattern}
fullWidth={true}
/>
)}
{["payload", "content", "menu"].includes(patternType) ? (

View File

@@ -264,6 +264,19 @@ const Diagrams = () => {
return;
}
const sourceId = entity.getSourcePort().getParent().getOptions()
.id as string;
const targetId = entity.getTargetPort().getParent().getOptions()
.id as string;
const previousData = getBlockFromCache(sourceId!);
// Only add the link if targetId doesn't already exist in nextBlocks
if (previousData?.nextBlocks?.includes(targetId)) {
model.removeLink(link);
return;
}
link.setLocked(true);
link.registerListener({
selectionChanged(event: any) {
@@ -280,17 +293,15 @@ const Diagrams = () => {
}
});
const sourceId = entity.getSourcePort().getParent().getOptions()
.id as string;
const targetId = entity.getTargetPort().getParent().getOptions()
.id as string;
const previousData = getBlockFromCache(sourceId!);
if (
// @ts-expect-error undefined attr
entity.getSourcePort().getOptions()?.label ===
BlockPorts.nextBlocksOutPort
) {
// Only add the link if targetId exists, skip if targetId is null
if (!targetId) {
return;
}
const nextBlocks = [
...(previousData?.nextBlocks || []),
...(targetId ? [targetId] : []),

View File

@@ -56,24 +56,27 @@ export const useFind = <
entity,
);
const getFromCache = useGetFromCache(entity);
const { data: total } = useCount(entity, params["where"], {
const countQuery = useCount(entity, params["where"], {
enabled: hasCount,
});
const { dataGridPaginationProps, pageQueryPayload } = usePagination(
total?.count,
countQuery.data?.count,
initialPaginationState,
initialSortState,
hasCount,
);
const normalizedParams = { ...pageQueryPayload, ...(params || {}) };
const enabled = !!total || !hasCount;
const enabled = !!countQuery.data || !hasCount;
const { data: ids, ...normalizedQuery } = useQuery({
enabled,
queryFn: async () => {
const data = await api.find(
normalizedParams,
format === Format.FULL && (POPULATE_BY_TYPE[entity] as P),
);
const data =
!hasCount || (hasCount && !!countQuery.data?.count)
? await api.find(
normalizedParams,
format === Format.FULL && (POPULATE_BY_TYPE[entity] as P),
)
: [];
const { result } = normalizeAndCache(data);
return result;
@@ -100,7 +103,11 @@ export const useFind = <
dataGridProps: {
...dataGridPaginationProps,
rows: data || [],
loading: normalizedQuery.isLoading || normalizedQuery.isFetching,
loading:
normalizedQuery.isLoading ||
normalizedQuery.isFetching ||
countQuery.isLoading ||
countQuery.isFetching,
},
};
};

View File

@@ -74,6 +74,7 @@ export const ROUTES = {
[EntityType.HELPER]: "/helper",
[EntityType.NLU_HELPER]: "/helper/nlu",
[EntityType.LLM_HELPER]: "/helper/llm",
[EntityType.FLOW_ESCAPE_HELPER]: "helper/flow_escape",
[EntityType.STORAGE_HELPER]: "/helper/storage",
} as const;

View File

@@ -304,6 +304,14 @@ export const LlmHelperEntity = new schema.Entity(
},
);
export const FlowEscapeHelperEntity = new schema.Entity(
EntityType.FLOW_ESCAPE_HELPER,
undefined,
{
idAttribute: ({ name }) => name,
},
);
export const StorageHelperEntity = new schema.Entity(
EntityType.STORAGE_HELPER,
undefined,
@@ -341,5 +349,6 @@ export const ENTITY_MAP = {
[EntityType.HELPER]: HelperEntity,
[EntityType.NLU_HELPER]: NluHelperEntity,
[EntityType.LLM_HELPER]: LlmHelperEntity,
[EntityType.FLOW_ESCAPE_HELPER]: FlowEscapeHelperEntity,
[EntityType.STORAGE_HELPER]: StorageHelperEntity,
} as const;

View File

@@ -38,6 +38,7 @@ export enum EntityType {
HELPER = "Helper",
NLU_HELPER = "NluHelper",
LLM_HELPER = "LlmHelper",
FLOW_ESCAPE_HELPER = "FlowEscapeHelper",
STORAGE_HELPER = "StorageHelper",
}

View File

@@ -116,6 +116,7 @@ export const POPULATE_BY_TYPE = {
[EntityType.HELPER]: [],
[EntityType.NLU_HELPER]: [],
[EntityType.LLM_HELPER]: [],
[EntityType.FLOW_ESCAPE_HELPER]: [],
[EntityType.STORAGE_HELPER]: [],
} as const;
@@ -208,6 +209,7 @@ export interface IEntityMapTypes {
[EntityType.HELPER]: IEntityTypes<IHelperAttributes, IHelper>;
[EntityType.NLU_HELPER]: IEntityTypes<IHelperAttributes, IHelper>;
[EntityType.LLM_HELPER]: IEntityTypes<IHelperAttributes, IHelper>;
[EntityType.FLOW_ESCAPE_HELPER]: IEntityTypes<IHelperAttributes, IHelper>;
[EntityType.STORAGE_HELPER]: IEntityTypes<IHelperAttributes, IHelper>;
}

View File

@@ -68,12 +68,19 @@ export interface PayloadPattern {
type?: PayloadType;
}
export type NlpPattern = {
export type NlpEntityMatchPattern = {
entity: string;
match: "value" | "entity";
match: "entity";
};
export type NlpValueMatchPattern = {
entity: string;
match: "value";
value: string;
};
export type NlpPattern = NlpEntityMatchPattern | NlpValueMatchPattern;
export type Pattern = null | string | PayloadPattern | NlpPattern[];
export type PatternType =

4
package-lock.json generated
View File

@@ -20,7 +20,7 @@
},
"frontend": {
"name": "hexabot-ui",
"version": "2.2.8",
"version": "2.2.9",
"license": "AGPL-3.0-only",
"dependencies": {
"@chatscope/chat-ui-kit-react": "^2.0.3",
@@ -11451,7 +11451,7 @@
},
"widget": {
"name": "hexabot-chat-widget",
"version": "2.2.8",
"version": "2.2.9",
"license": "AGPL-3.0-only",
"dependencies": {
"autolinker": "^4.1.5",

View File

@@ -5,7 +5,7 @@
"frontend",
"widget"
],
"version": "2.2.8",
"version": "2.2.9",
"description": "Hexabot is a solution for creating and managing chatbots across multiple channels, leveraging AI for advanced conversational capabilities. It provides a user-friendly interface for building, training, and deploying chatbots with integrated support for various messaging platforms.",
"author": "Hexastack",
"license": "AGPL-3.0-only",

View File

@@ -1,6 +1,6 @@
{
"name": "hexabot-chat-widget",
"version": "2.2.8",
"version": "2.2.9",
"description": "Hexabot is a solution for creating and managing chatbots across multiple channels, leveraging AI for advanced conversational capabilities. It provides a user-friendly interface for building, training, and deploying chatbots with integrated support for various messaging platforms.",
"author": "Hexastack",
"license": "AGPL-3.0-only",

View File

@@ -24,11 +24,10 @@ const TextMessage: React.FC<TextMessageProps> = ({ message }) => {
useEffect(() => {
autoLink();
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [message]);
const autoLink = () => {
if (message.direction === Direction.received && messageTextRef.current) {
if (messageTextRef.current) {
const text = messageTextRef.current.innerText;
messageTextRef.current.innerHTML = Autolinker.link(text, {