mirror of
https://github.com/hexastack/hexabot
synced 2025-05-08 14:54:45 +00:00
Merge branch 'main' into fix/define-content-type-fields-type
This commit is contained in:
commit
a782c6d3ba
2
api/package-lock.json
generated
2
api/package-lock.json
generated
@ -32,7 +32,6 @@
|
||||
"cache-manager-redis-yet": "^4.1.2",
|
||||
"connect-mongo": "^5.1.0",
|
||||
"cookie-parser": "^1.4.6",
|
||||
"dotenv": "^16.3.1",
|
||||
"express-session": "^1.17.3",
|
||||
"handlebars": "^4.7.8",
|
||||
"module-alias": "^2.2.3",
|
||||
@ -86,6 +85,7 @@
|
||||
"@types/uuid": "^9.0.7",
|
||||
"@typescript-eslint/eslint-plugin": "^6.0.0",
|
||||
"@typescript-eslint/parser": "^6.0.0",
|
||||
"dotenv": "^16.3.1",
|
||||
"eslint": "^8.42.0",
|
||||
"eslint-config-prettier": "^9.0.0",
|
||||
"eslint-import-resolver-typescript": "~3.6.1",
|
||||
|
@ -67,7 +67,6 @@
|
||||
"cache-manager-redis-yet": "^4.1.2",
|
||||
"connect-mongo": "^5.1.0",
|
||||
"cookie-parser": "^1.4.6",
|
||||
"dotenv": "^16.3.1",
|
||||
"express-session": "^1.17.3",
|
||||
"handlebars": "^4.7.8",
|
||||
"module-alias": "^2.2.3",
|
||||
@ -121,6 +120,7 @@
|
||||
"@types/uuid": "^9.0.7",
|
||||
"@typescript-eslint/eslint-plugin": "^6.0.0",
|
||||
"@typescript-eslint/parser": "^6.0.0",
|
||||
"dotenv": "^16.3.1",
|
||||
"eslint": "^8.42.0",
|
||||
"eslint-config-prettier": "^9.0.0",
|
||||
"eslint-import-resolver-typescript": "~3.6.1",
|
||||
|
@ -93,8 +93,9 @@ export class BotStatsService extends BaseService<BotStats> {
|
||||
) {
|
||||
this.eventEmitter.emit(
|
||||
'hook:stats:entry',
|
||||
'retention',
|
||||
BotStatsType.retention,
|
||||
'Retentioned users',
|
||||
subscriber,
|
||||
);
|
||||
}
|
||||
}
|
||||
@ -106,7 +107,11 @@ export class BotStatsService extends BaseService<BotStats> {
|
||||
* @param name - The name or identifier of the statistics entry (e.g., a specific feature or component being tracked).
|
||||
*/
|
||||
@OnEvent('hook:stats:entry')
|
||||
async handleStatEntry(type: BotStatsType, name: string): Promise<void> {
|
||||
async handleStatEntry(
|
||||
type: BotStatsType,
|
||||
name: string,
|
||||
_subscriber: Subscriber,
|
||||
): Promise<void> {
|
||||
const day = new Date();
|
||||
day.setMilliseconds(0);
|
||||
day.setSeconds(0);
|
||||
|
@ -35,6 +35,7 @@ import { ChannelModule } from './channel/channel.module';
|
||||
import { ChatModule } from './chat/chat.module';
|
||||
import { CmsModule } from './cms/cms.module';
|
||||
import { config } from './config';
|
||||
import { ExtensionModule } from './extension/extension.module';
|
||||
import extraModules from './extra';
|
||||
import { HelperModule } from './helper/helper.module';
|
||||
import { I18nModule } from './i18n/i18n.module';
|
||||
@ -152,6 +153,7 @@ const i18nOptions: I18nOptions = {
|
||||
max: config.cache.max,
|
||||
}),
|
||||
MigrationModule,
|
||||
ExtensionModule,
|
||||
...extraModules,
|
||||
],
|
||||
controllers: [AppController],
|
||||
|
@ -361,4 +361,30 @@ describe('BlockController', () => {
|
||||
).toBeDefined();
|
||||
expect(result.patterns).toEqual(updateBlock.patterns);
|
||||
});
|
||||
|
||||
it('should update the block trigger with a content payloadType payload', async () => {
|
||||
jest.spyOn(blockService, 'updateOne');
|
||||
const updateBlock: BlockUpdateDto = {
|
||||
patterns: [
|
||||
{
|
||||
label: 'Content label',
|
||||
value: 'Content value',
|
||||
type: PayloadType.content,
|
||||
},
|
||||
],
|
||||
};
|
||||
const result = await blockController.updateOne(block.id, updateBlock);
|
||||
expect(blockService.updateOne).toHaveBeenCalledWith(block.id, updateBlock);
|
||||
|
||||
expect(
|
||||
result.patterns.find(
|
||||
(pattern) =>
|
||||
typeof pattern === 'object' &&
|
||||
'type' in pattern &&
|
||||
pattern.type === PayloadType.content &&
|
||||
pattern,
|
||||
),
|
||||
).toBeDefined();
|
||||
expect(result.patterns).toEqual(updateBlock.patterns);
|
||||
});
|
||||
});
|
||||
|
@ -16,6 +16,7 @@ import {
|
||||
UpdateWithAggregationPipeline,
|
||||
} from 'mongoose';
|
||||
|
||||
import { BotStatsType } from '@/analytics/schemas/bot-stats.schema';
|
||||
import { BaseRepository } from '@/utils/generics/base-repository';
|
||||
import { TFilterQuery } from '@/utils/types/filter.types';
|
||||
|
||||
@ -47,7 +48,7 @@ export class SubscriberRepository extends BaseRepository<
|
||||
async postCreate(created: SubscriberDocument): Promise<void> {
|
||||
this.eventEmitter.emit(
|
||||
'hook:stats:entry',
|
||||
'new_users',
|
||||
BotStatsType.new_users,
|
||||
'New users',
|
||||
created,
|
||||
);
|
||||
|
@ -42,4 +42,5 @@ export enum PayloadType {
|
||||
button = 'button',
|
||||
outcome = 'outcome',
|
||||
menu = 'menu',
|
||||
content = 'content',
|
||||
}
|
||||
|
@ -243,8 +243,8 @@ describe('BlockService', () => {
|
||||
await botService.startConversation(event, block);
|
||||
expect(hasBotSpoken).toEqual(true);
|
||||
expect(triggeredEvents).toEqual([
|
||||
['popular', 'hasNextBlocks'],
|
||||
['new_conversations', 'New conversations'],
|
||||
['popular', 'hasNextBlocks', webSubscriber],
|
||||
['new_conversations', 'New conversations', webSubscriber],
|
||||
]);
|
||||
clearMock.mockClear();
|
||||
});
|
||||
@ -301,7 +301,7 @@ describe('BlockService', () => {
|
||||
const captured = await botService.processConversationMessage(event);
|
||||
expect(captured).toBe(true);
|
||||
expect(triggeredEvents).toEqual([
|
||||
['existing_conversations', 'Existing conversations'],
|
||||
['existing_conversations', 'Existing conversations', webSubscriber],
|
||||
]);
|
||||
clearMock.mockClear();
|
||||
});
|
||||
|
@ -9,6 +9,7 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { EventEmitter2 } from '@nestjs/event-emitter';
|
||||
|
||||
import { BotStatsType } from '@/analytics/schemas/bot-stats.schema';
|
||||
import EventWrapper from '@/channel/lib/EventWrapper';
|
||||
import { LoggerService } from '@/logger/logger.service';
|
||||
import { SettingService } from '@/setting/services/setting.service';
|
||||
@ -65,8 +66,18 @@ export class BotService {
|
||||
.getHandler()
|
||||
.sendMessage(event, envelope, options, context);
|
||||
|
||||
this.eventEmitter.emit('hook:stats:entry', 'outgoing', 'Outgoing');
|
||||
this.eventEmitter.emit('hook:stats:entry', 'all_messages', 'All Messages');
|
||||
this.eventEmitter.emit(
|
||||
'hook:stats:entry',
|
||||
BotStatsType.outgoing,
|
||||
'Outgoing',
|
||||
recipient,
|
||||
);
|
||||
this.eventEmitter.emit(
|
||||
'hook:stats:entry',
|
||||
BotStatsType.all_messages,
|
||||
'All Messages',
|
||||
recipient,
|
||||
);
|
||||
|
||||
// Trigger sent message event
|
||||
const sentMessage: MessageCreateDto = {
|
||||
@ -165,7 +176,7 @@ export class BotService {
|
||||
return await this.triggerBlock(event, convo, attachedBlock, fallback);
|
||||
} catch (err) {
|
||||
this.logger.error('Unable to retrieve attached block', err);
|
||||
this.eventEmitter.emit('hook:conversation:end', convo, true);
|
||||
this.eventEmitter.emit('hook:conversation:end', convo);
|
||||
}
|
||||
} else if (
|
||||
Array.isArray(block.nextBlocks) &&
|
||||
@ -200,7 +211,7 @@ export class BotService {
|
||||
'Block outcome did not match any of the next blocks',
|
||||
convo,
|
||||
);
|
||||
this.eventEmitter.emit('hook:conversation:end', convo, false);
|
||||
this.eventEmitter.emit('hook:conversation:end', convo);
|
||||
}
|
||||
} else {
|
||||
// Conversation continues : Go forward to next blocks
|
||||
@ -218,11 +229,11 @@ export class BotService {
|
||||
} else {
|
||||
// We need to end the conversation in this case
|
||||
this.logger.debug('No attached/next blocks to execute ...');
|
||||
this.eventEmitter.emit('hook:conversation:end', convo, false);
|
||||
this.eventEmitter.emit('hook:conversation:end', convo);
|
||||
}
|
||||
} catch (err) {
|
||||
this.logger.error('Unable to process/send message.', err);
|
||||
this.eventEmitter.emit('hook:conversation:end', convo, true);
|
||||
this.eventEmitter.emit('hook:conversation:end', convo);
|
||||
}
|
||||
}
|
||||
|
||||
@ -293,7 +304,12 @@ export class BotService {
|
||||
|
||||
if (next) {
|
||||
// Increment stats about popular blocks
|
||||
this.eventEmitter.emit('hook:stats:entry', 'popular', next.name);
|
||||
this.eventEmitter.emit(
|
||||
'hook:stats:entry',
|
||||
BotStatsType.popular,
|
||||
next.name,
|
||||
convo.sender,
|
||||
);
|
||||
// Go next!
|
||||
this.logger.debug('Respond to nested conversion! Go next ', next.id);
|
||||
try {
|
||||
@ -309,19 +325,19 @@ export class BotService {
|
||||
await this.triggerBlock(event, updatedConversation, next, fallback);
|
||||
} catch (err) {
|
||||
this.logger.error('Unable to store context data!', err);
|
||||
return this.eventEmitter.emit('hook:conversation:end', convo, true);
|
||||
return this.eventEmitter.emit('hook:conversation:end', convo);
|
||||
}
|
||||
return true;
|
||||
} else {
|
||||
// Conversation is still active, but there's no matching block to call next
|
||||
// We'll end the conversation but this message is probably lost in time and space.
|
||||
this.logger.debug('No matching block found to call next ', convo.id);
|
||||
this.eventEmitter.emit('hook:conversation:end', convo, false);
|
||||
this.eventEmitter.emit('hook:conversation:end', convo);
|
||||
return false;
|
||||
}
|
||||
} catch (err) {
|
||||
this.logger.error('Unable to populate the next blocks!', err);
|
||||
this.eventEmitter.emit('hook:conversation:end', convo, true);
|
||||
this.eventEmitter.emit('hook:conversation:end', convo);
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
@ -352,8 +368,9 @@ export class BotService {
|
||||
|
||||
this.eventEmitter.emit(
|
||||
'hook:stats:entry',
|
||||
'existing_conversations',
|
||||
BotStatsType.existing_conversations,
|
||||
'Existing conversations',
|
||||
subscriber,
|
||||
);
|
||||
this.logger.debug('Conversation has been captured! Responding ...');
|
||||
return await this.handleIncomingMessage(conversation, event);
|
||||
@ -373,10 +390,15 @@ export class BotService {
|
||||
* @param block - Starting block
|
||||
*/
|
||||
async startConversation(event: EventWrapper<any, any>, block: BlockFull) {
|
||||
// Increment popular stats
|
||||
this.eventEmitter.emit('hook:stats:entry', 'popular', block.name);
|
||||
// Launching a new conversation
|
||||
const subscriber = event.getSender();
|
||||
// Increment popular stats
|
||||
this.eventEmitter.emit(
|
||||
'hook:stats:entry',
|
||||
BotStatsType.popular,
|
||||
block.name,
|
||||
subscriber,
|
||||
);
|
||||
|
||||
try {
|
||||
const convo = await this.conversationService.create({
|
||||
@ -384,8 +406,9 @@ export class BotService {
|
||||
});
|
||||
this.eventEmitter.emit(
|
||||
'hook:stats:entry',
|
||||
'new_conversations',
|
||||
BotStatsType.new_conversations,
|
||||
'New conversations',
|
||||
subscriber,
|
||||
);
|
||||
|
||||
try {
|
||||
@ -405,7 +428,7 @@ export class BotService {
|
||||
return this.triggerBlock(event, updatedConversation, block, false);
|
||||
} catch (err) {
|
||||
this.logger.error('Unable to store context data!', err);
|
||||
this.eventEmitter.emit('hook:conversation:end', convo, true);
|
||||
this.eventEmitter.emit('hook:conversation:end', convo);
|
||||
}
|
||||
} catch (err) {
|
||||
this.logger.error('Unable to start a new conversation with ', err);
|
||||
|
@ -11,6 +11,7 @@ import { EventEmitter2, OnEvent } from '@nestjs/event-emitter';
|
||||
import mime from 'mime';
|
||||
import { v4 as uuidv4 } from 'uuid';
|
||||
|
||||
import { BotStatsType } from '@/analytics/schemas/bot-stats.schema';
|
||||
import { AttachmentService } from '@/attachment/services/attachment.service';
|
||||
import {
|
||||
AttachmentAccess,
|
||||
@ -149,11 +150,17 @@ export class ChatService {
|
||||
}
|
||||
|
||||
this.websocketGateway.broadcastMessageReceived(populatedMsg, subscriber);
|
||||
this.eventEmitter.emit('hook:stats:entry', 'incoming', 'Incoming');
|
||||
this.eventEmitter.emit(
|
||||
'hook:stats:entry',
|
||||
'all_messages',
|
||||
BotStatsType.incoming,
|
||||
'Incoming',
|
||||
subscriber,
|
||||
);
|
||||
this.eventEmitter.emit(
|
||||
'hook:stats:entry',
|
||||
BotStatsType.all_messages,
|
||||
'All Messages',
|
||||
subscriber,
|
||||
);
|
||||
} catch (err) {
|
||||
this.logger.error('Unable to log received message.', err, event);
|
||||
@ -248,7 +255,7 @@ export class ChatService {
|
||||
};
|
||||
|
||||
this.eventEmitter.emit('hook:chatbot:sent', sentMessage, event);
|
||||
this.eventEmitter.emit('hook:stats:entry', 'echo', 'Echo');
|
||||
this.eventEmitter.emit('hook:stats:entry', 'echo', 'Echo', recipient);
|
||||
} catch (err) {
|
||||
this.logger.error('Unable to log echo message', err, event);
|
||||
}
|
||||
|
@ -145,7 +145,7 @@ export class ContentService extends BaseService<
|
||||
...acc,
|
||||
{
|
||||
title: String(title),
|
||||
status: Boolean(status),
|
||||
status: status.trim().toLowerCase() === 'true',
|
||||
entity: targetContentType,
|
||||
dynamicFields: Object.keys(rest)
|
||||
.filter((key) =>
|
||||
|
109
api/src/extension/cleanup.service.spec.ts
Normal file
109
api/src/extension/cleanup.service.spec.ts
Normal file
@ -0,0 +1,109 @@
|
||||
/*
|
||||
* Copyright © 2025 Hexastack. All rights reserved.
|
||||
*
|
||||
* Licensed under the GNU Affero General Public License v3.0 (AGPLv3) with the following additional terms:
|
||||
* 1. The name "Hexabot" is a trademark of Hexastack. You may not use this name in derivative works without express written permission.
|
||||
* 2. All derivative works must include clear attribution to the original creator and software, Hexastack and Hexabot, in a prominent location (e.g., in the software's "About" section, documentation, and README file).
|
||||
*/
|
||||
|
||||
import { CACHE_MANAGER } from '@nestjs/cache-manager';
|
||||
import { MongooseModule } from '@nestjs/mongoose';
|
||||
|
||||
import { AttachmentRepository } from '@/attachment/repositories/attachment.repository';
|
||||
import { AttachmentModel } from '@/attachment/schemas/attachment.schema';
|
||||
import { AttachmentService } from '@/attachment/services/attachment.service';
|
||||
import { ChannelService } from '@/channel/channel.service';
|
||||
import { SubscriberRepository } from '@/chat/repositories/subscriber.repository';
|
||||
import { SubscriberModel } from '@/chat/schemas/subscriber.schema';
|
||||
import { SubscriberService } from '@/chat/services/subscriber.service';
|
||||
import LocalStorageHelper from '@/extensions/helpers/local-storage/index.helper';
|
||||
import { HelperService } from '@/helper/helper.service';
|
||||
import { LoggerService } from '@/logger/logger.service';
|
||||
import { SettingRepository } from '@/setting/repositories/setting.repository';
|
||||
import { Setting, SettingModel } from '@/setting/schemas/setting.schema';
|
||||
import { SettingSeeder } from '@/setting/seeds/setting.seed';
|
||||
import { SettingService } from '@/setting/services/setting.service';
|
||||
import { installSettingFixtures } from '@/utils/test/fixtures/setting';
|
||||
import {
|
||||
closeInMongodConnection,
|
||||
rootMongooseTestModule,
|
||||
} from '@/utils/test/test';
|
||||
import { buildTestingMocks } from '@/utils/test/utils';
|
||||
|
||||
import { CleanupService } from './cleanup.service';
|
||||
import { TNamespace } from './types';
|
||||
|
||||
describe('CleanupService', () => {
|
||||
let initialSettings: Setting[];
|
||||
let helperService: HelperService;
|
||||
let cleanupService: CleanupService;
|
||||
let settingService: SettingService;
|
||||
|
||||
beforeAll(async () => {
|
||||
const { getMocks, resolveMocks } = await buildTestingMocks({
|
||||
imports: [
|
||||
rootMongooseTestModule(installSettingFixtures),
|
||||
MongooseModule.forFeature([
|
||||
SettingModel,
|
||||
SubscriberModel,
|
||||
AttachmentModel,
|
||||
]),
|
||||
],
|
||||
providers: [
|
||||
CleanupService,
|
||||
HelperService,
|
||||
SettingService,
|
||||
SettingRepository,
|
||||
{
|
||||
provide: CACHE_MANAGER,
|
||||
useValue: {
|
||||
del: jest.fn(),
|
||||
get: jest.fn(),
|
||||
set: jest.fn(),
|
||||
},
|
||||
},
|
||||
SettingSeeder,
|
||||
SubscriberService,
|
||||
SubscriberRepository,
|
||||
AttachmentService,
|
||||
AttachmentRepository,
|
||||
ChannelService,
|
||||
],
|
||||
});
|
||||
[cleanupService, settingService, helperService] = await getMocks([
|
||||
CleanupService,
|
||||
SettingService,
|
||||
HelperService,
|
||||
]);
|
||||
|
||||
const [loggerService] = await resolveMocks([LoggerService]);
|
||||
initialSettings = await settingService.findAll();
|
||||
|
||||
helperService.register(
|
||||
new LocalStorageHelper(settingService, helperService, loggerService),
|
||||
);
|
||||
});
|
||||
|
||||
afterAll(closeInMongodConnection);
|
||||
|
||||
afterEach(jest.clearAllMocks);
|
||||
|
||||
describe('delete', () => {
|
||||
it('should delete all the unregistered settings with a group suffix `_channel` or/and `_helper`', async () => {
|
||||
const registeredNamespaces = [
|
||||
...cleanupService.getChannelNamespaces(),
|
||||
...cleanupService.getHelperNamespaces(),
|
||||
];
|
||||
|
||||
await cleanupService.pruneExtensionSettings();
|
||||
const cleanSettings = await settingService.findAll();
|
||||
const filteredSettings = initialSettings.filter(
|
||||
({ group }) =>
|
||||
!/_(channel|helper)$/.test(group) !==
|
||||
registeredNamespaces.includes(group as TNamespace),
|
||||
);
|
||||
|
||||
expect(cleanSettings).toEqualPayload(filteredSettings);
|
||||
});
|
||||
});
|
||||
});
|
86
api/src/extension/cleanup.service.ts
Normal file
86
api/src/extension/cleanup.service.ts
Normal file
@ -0,0 +1,86 @@
|
||||
/*
|
||||
* Copyright © 2025 Hexastack. All rights reserved.
|
||||
*
|
||||
* Licensed under the GNU Affero General Public License v3.0 (AGPLv3) with the following additional terms:
|
||||
* 1. The name "Hexabot" is a trademark of Hexastack. You may not use this name in derivative works without express written permission.
|
||||
* 2. All derivative works must include clear attribution to the original creator and software, Hexastack and Hexabot, in a prominent location (e.g., in the software's "About" section, documentation, and README file).
|
||||
*/
|
||||
|
||||
import { Injectable } from '@nestjs/common';
|
||||
|
||||
import { ChannelService } from '@/channel/channel.service';
|
||||
import { HelperService } from '@/helper/helper.service';
|
||||
import { LoggerService } from '@/logger/logger.service';
|
||||
import { SettingService } from '@/setting/services/setting.service';
|
||||
import { DeleteResult } from '@/utils/generics/base-repository';
|
||||
|
||||
import { TCriteria, TExtractExtension, TExtractNamespace } from './types';
|
||||
|
||||
@Injectable()
|
||||
export class CleanupService {
|
||||
constructor(
|
||||
private readonly helperService: HelperService,
|
||||
private readonly loggerService: LoggerService,
|
||||
private readonly settingService: SettingService,
|
||||
private readonly channelService: ChannelService,
|
||||
) {}
|
||||
|
||||
/**
|
||||
* Deletes unused settings with the specified criteria.
|
||||
*
|
||||
* @param criteria - An array of criteria objects containing:
|
||||
* - suffix: Regex pattern to match setting groups
|
||||
* - namespaces: Array of namespaces to exclude from deletion
|
||||
* @returns A promise that resolves to the result of the deletion operation.
|
||||
*/
|
||||
private async deleteManyBySuffixAndNamespaces(
|
||||
criteria: TCriteria[],
|
||||
): Promise<DeleteResult> {
|
||||
return await this.settingService.deleteMany({
|
||||
$or: criteria.map(({ suffix, namespaces }) => ({
|
||||
group: { $regex: new RegExp(`${suffix}$`), $nin: namespaces },
|
||||
})),
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves a list of channel Namespaces.
|
||||
*
|
||||
* @returns An array of channel Namespaces.
|
||||
*/
|
||||
public getChannelNamespaces(): TExtractNamespace<'channel'>[] {
|
||||
return this.channelService
|
||||
.getAll()
|
||||
.map((channel) => channel.getNamespace<TExtractExtension<'channel'>>());
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves a list of helper Namespaces.
|
||||
*
|
||||
* @returns An array of helper Namespaces.
|
||||
*/
|
||||
public getHelperNamespaces(): TExtractNamespace<'helper'>[] {
|
||||
return this.helperService
|
||||
.getAll()
|
||||
.map((helper) => helper.getNamespace<TExtractExtension<'helper'>>());
|
||||
}
|
||||
|
||||
/**
|
||||
* Prune extensions unused settings.
|
||||
*
|
||||
*/
|
||||
public async pruneExtensionSettings(): Promise<void> {
|
||||
const channels = this.getChannelNamespaces();
|
||||
const helpers = this.getHelperNamespaces();
|
||||
const { deletedCount } = await this.deleteManyBySuffixAndNamespaces([
|
||||
{ suffix: '_channel', namespaces: channels },
|
||||
{ suffix: '_helper', namespaces: helpers },
|
||||
]);
|
||||
|
||||
if (deletedCount > 0) {
|
||||
this.loggerService.log(
|
||||
`${deletedCount} unused setting${deletedCount === 1 ? '' : 's'} ${deletedCount === 1 ? 'is' : 'are'} successfully deleted!`,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
33
api/src/extension/extension.module.ts
Normal file
33
api/src/extension/extension.module.ts
Normal file
@ -0,0 +1,33 @@
|
||||
/*
|
||||
* Copyright © 2025 Hexastack. All rights reserved.
|
||||
*
|
||||
* Licensed under the GNU Affero General Public License v3.0 (AGPLv3) with the following additional terms:
|
||||
* 1. The name "Hexabot" is a trademark of Hexastack. You may not use this name in derivative works without express written permission.
|
||||
* 2. All derivative works must include clear attribution to the original creator and software, Hexastack and Hexabot, in a prominent location (e.g., in the software's "About" section, documentation, and README file).
|
||||
*/
|
||||
|
||||
import { Global, Module, OnApplicationBootstrap } from '@nestjs/common';
|
||||
|
||||
import { LoggerService } from '@/logger/logger.service';
|
||||
|
||||
import { CleanupService } from './cleanup.service';
|
||||
|
||||
@Global()
|
||||
@Module({
|
||||
providers: [CleanupService],
|
||||
exports: [CleanupService],
|
||||
})
|
||||
export class ExtensionModule implements OnApplicationBootstrap {
|
||||
constructor(
|
||||
private readonly loggerService: LoggerService,
|
||||
private readonly cleanupService: CleanupService,
|
||||
) {}
|
||||
|
||||
async onApplicationBootstrap() {
|
||||
try {
|
||||
await this.cleanupService.pruneExtensionSettings();
|
||||
} catch (error) {
|
||||
this.loggerService.error('Unable to delete unused settings', error);
|
||||
}
|
||||
}
|
||||
}
|
41
api/src/extension/types.ts
Normal file
41
api/src/extension/types.ts
Normal file
@ -0,0 +1,41 @@
|
||||
/*
|
||||
* Copyright © 2025 Hexastack. All rights reserved.
|
||||
*
|
||||
* Licensed under the GNU Affero General Public License v3.0 (AGPLv3) with the following additional terms:
|
||||
* 1. The name "Hexabot" is a trademark of Hexastack. You may not use this name in derivative works without express written permission.
|
||||
* 2. All derivative works must include clear attribution to the original creator and software, Hexastack and Hexabot, in a prominent location (e.g., in the software's "About" section, documentation, and README file).
|
||||
*/
|
||||
|
||||
import { ExtensionName } from '@/utils/types/extension';
|
||||
|
||||
type TExcludedExtension = 'plugin';
|
||||
|
||||
type TExcludeSuffix<
|
||||
T,
|
||||
S extends string = '_',
|
||||
Suffix extends string = `${S}${TExcludedExtension}`,
|
||||
> = T extends `${infer _Base}${Suffix}` ? never : T;
|
||||
|
||||
export type TExtensionName = TExcludeSuffix<ExtensionName, '-'>;
|
||||
|
||||
export type TExtension =
|
||||
Extract<TExtensionName, `${string}-${string}`> extends `${string}-${infer S}`
|
||||
? `${S}`
|
||||
: never;
|
||||
|
||||
export type TNamespace = HyphenToUnderscore<TExtensionName>;
|
||||
|
||||
export type TExtractNamespace<
|
||||
T extends TExtension = TExtension,
|
||||
M extends TExtensionName = TExtensionName,
|
||||
> = M extends `${string}${T}` ? HyphenToUnderscore<M> : never;
|
||||
|
||||
export type TExtractExtension<
|
||||
T extends TExtension = TExtension,
|
||||
M extends TExtensionName = TExtensionName,
|
||||
> = M extends `${string}${T}` ? M : never;
|
||||
|
||||
export type TCriteria = {
|
||||
suffix: `_${TExtension}`;
|
||||
namespaces: TNamespace[];
|
||||
};
|
@ -12,6 +12,7 @@ import { OnEvent } from '@nestjs/event-emitter';
|
||||
import { I18nService } from '@/i18n/services/i18n.service';
|
||||
import { PluginService } from '@/plugins/plugins.service';
|
||||
import { PluginType } from '@/plugins/types';
|
||||
import { SettingType } from '@/setting/schemas/types';
|
||||
import { SettingService } from '@/setting/services/setting.service';
|
||||
import { BaseService } from '@/utils/generics/base-service';
|
||||
|
||||
@ -57,21 +58,35 @@ export class TranslationService extends BaseService<Translation> {
|
||||
PluginType.block,
|
||||
block.message.plugin,
|
||||
);
|
||||
const defaultSettings = await plugin?.getDefaultSettings();
|
||||
const defaultSettings = (await plugin?.getDefaultSettings()) || [];
|
||||
const filteredSettings = defaultSettings.filter(
|
||||
({ translatable, type }) =>
|
||||
[
|
||||
SettingType.text,
|
||||
SettingType.textarea,
|
||||
SettingType.multiple_text,
|
||||
].includes(type) &&
|
||||
(translatable === undefined || translatable === true),
|
||||
);
|
||||
const settingTypeMap = new Map(
|
||||
filteredSettings.map((setting) => [setting.label, setting.type]),
|
||||
);
|
||||
|
||||
// plugin
|
||||
Object.entries(block.message.args).forEach(([l, arg]) => {
|
||||
const setting = defaultSettings?.find(({ label }) => label === l);
|
||||
if (setting?.translatable) {
|
||||
if (Array.isArray(arg)) {
|
||||
// array of text
|
||||
strings = strings.concat(arg);
|
||||
} else if (typeof arg === 'string') {
|
||||
// text
|
||||
strings.push(arg);
|
||||
for (const [key, value] of Object.entries(block.message.args)) {
|
||||
const settingType = settingTypeMap.get(key);
|
||||
|
||||
switch (settingType) {
|
||||
case SettingType.multiple_text:
|
||||
strings = strings.concat(value);
|
||||
break;
|
||||
case SettingType.text:
|
||||
case SettingType.textarea:
|
||||
strings.push(value);
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
});
|
||||
} else if ('text' in block.message && Array.isArray(block.message.text)) {
|
||||
// array of text
|
||||
strings = strings.concat(block.message.text);
|
||||
|
@ -10,17 +10,14 @@ import { BadRequestException, NotFoundException } from '@nestjs/common';
|
||||
import { MongooseModule } from '@nestjs/mongoose';
|
||||
|
||||
import { getUpdateOneError } from '@/utils/test/errors/messages';
|
||||
import { nlpEntityFixtures } from '@/utils/test/fixtures/nlpentity';
|
||||
import {
|
||||
installNlpValueFixtures,
|
||||
nlpValueFixtures,
|
||||
} from '@/utils/test/fixtures/nlpvalue';
|
||||
import { getPageQuery } from '@/utils/test/pagination';
|
||||
import {
|
||||
closeInMongodConnection,
|
||||
rootMongooseTestModule,
|
||||
} from '@/utils/test/test';
|
||||
import { TFixtures } from '@/utils/test/types';
|
||||
import { buildTestingMocks } from '@/utils/test/utils';
|
||||
|
||||
import { NlpValueCreateDto } from '../dto/nlp-value.dto';
|
||||
@ -29,11 +26,7 @@ import { NlpSampleEntityRepository } from '../repositories/nlp-sample-entity.rep
|
||||
import { NlpValueRepository } from '../repositories/nlp-value.repository';
|
||||
import { NlpEntityModel } from '../schemas/nlp-entity.schema';
|
||||
import { NlpSampleEntityModel } from '../schemas/nlp-sample-entity.schema';
|
||||
import {
|
||||
NlpValue,
|
||||
NlpValueFull,
|
||||
NlpValueModel,
|
||||
} from '../schemas/nlp-value.schema';
|
||||
import { NlpValue, NlpValueModel } from '../schemas/nlp-value.schema';
|
||||
import { NlpEntityService } from '../services/nlp-entity.service';
|
||||
import { NlpValueService } from '../services/nlp-value.service';
|
||||
|
||||
@ -80,63 +73,6 @@ describe('NlpValueController', () => {
|
||||
|
||||
afterEach(jest.clearAllMocks);
|
||||
|
||||
describe('findPage', () => {
|
||||
it('should find nlp Values, and foreach nlp value populate the corresponding entity', async () => {
|
||||
const pageQuery = getPageQuery<NlpValue>({
|
||||
sort: ['value', 'desc'],
|
||||
});
|
||||
const result = await nlpValueController.findPage(
|
||||
pageQuery,
|
||||
['entity'],
|
||||
{},
|
||||
);
|
||||
|
||||
const nlpValueFixturesWithEntities = nlpValueFixtures.reduce(
|
||||
(acc, curr) => {
|
||||
acc.push({
|
||||
...curr,
|
||||
entity: nlpEntityFixtures[
|
||||
parseInt(curr.entity!)
|
||||
] as NlpValueFull['entity'],
|
||||
builtin: curr.builtin!,
|
||||
expressions: curr.expressions!,
|
||||
metadata: curr.metadata!,
|
||||
});
|
||||
return acc;
|
||||
},
|
||||
[] as TFixtures<NlpValueFull>[],
|
||||
);
|
||||
expect(result).toEqualPayload(nlpValueFixturesWithEntities);
|
||||
});
|
||||
|
||||
it('should find nlp Values', async () => {
|
||||
const pageQuery = getPageQuery<NlpValue>({
|
||||
sort: ['value', 'desc'],
|
||||
});
|
||||
const result = await nlpValueController.findPage(
|
||||
pageQuery,
|
||||
['invalidCriteria'],
|
||||
{},
|
||||
);
|
||||
const nlpEntities = await nlpEntityService.findAll();
|
||||
const nlpValueFixturesWithEntities = nlpValueFixtures.reduce(
|
||||
(acc, curr) => {
|
||||
const ValueWithEntities = {
|
||||
...curr,
|
||||
entity: curr.entity ? nlpEntities[parseInt(curr.entity!)].id : null,
|
||||
expressions: curr.expressions!,
|
||||
metadata: curr.metadata!,
|
||||
builtin: curr.builtin!,
|
||||
};
|
||||
acc.push(ValueWithEntities);
|
||||
return acc;
|
||||
},
|
||||
[] as TFixtures<NlpValueCreateDto>[],
|
||||
);
|
||||
expect(result).toEqualPayload(nlpValueFixturesWithEntities);
|
||||
});
|
||||
});
|
||||
|
||||
describe('count', () => {
|
||||
it('should count the nlp Values', async () => {
|
||||
const result = await nlpValueController.filterCount();
|
||||
|
@ -30,6 +30,7 @@ import { PageQueryPipe } from '@/utils/pagination/pagination-query.pipe';
|
||||
import { PopulatePipe } from '@/utils/pipes/populate.pipe';
|
||||
import { SearchFilterPipe } from '@/utils/pipes/search-filter.pipe';
|
||||
import { TFilterQuery } from '@/utils/types/filter.types';
|
||||
import { Format } from '@/utils/types/format.types';
|
||||
|
||||
import { NlpValueCreateDto, NlpValueUpdateDto } from '../dto/nlp-value.dto';
|
||||
import {
|
||||
@ -126,7 +127,7 @@ export class NlpValueController extends BaseController<
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves a paginated list of NLP values.
|
||||
* Retrieves a paginated list of NLP values with NLP Samples count.
|
||||
*
|
||||
* Supports filtering, pagination, and optional population of related entities.
|
||||
*
|
||||
@ -134,10 +135,10 @@ export class NlpValueController extends BaseController<
|
||||
* @param populate - An array of related entities to populate.
|
||||
* @param filters - Filters to apply when retrieving the NLP values.
|
||||
*
|
||||
* @returns A promise resolving to a paginated list of NLP values.
|
||||
* @returns A promise resolving to a paginated list of NLP values with NLP Samples count.
|
||||
*/
|
||||
@Get()
|
||||
async findPage(
|
||||
async findWithCount(
|
||||
@Query(PageQueryPipe) pageQuery: PageQueryDto<NlpValue>,
|
||||
@Query(PopulatePipe) populate: string[],
|
||||
@Query(
|
||||
@ -147,9 +148,11 @@ export class NlpValueController extends BaseController<
|
||||
)
|
||||
filters: TFilterQuery<NlpValue>,
|
||||
) {
|
||||
return this.canPopulate(populate)
|
||||
? await this.nlpValueService.findAndPopulate(filters, pageQuery)
|
||||
: await this.nlpValueService.find(filters, pageQuery);
|
||||
return await this.nlpValueService.findWithCount(
|
||||
this.canPopulate(populate) ? Format.FULL : Format.STUB,
|
||||
pageQuery,
|
||||
filters,
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -8,10 +8,20 @@
|
||||
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { InjectModel } from '@nestjs/mongoose';
|
||||
import { Document, Model, Query } from 'mongoose';
|
||||
import { plainToInstance } from 'class-transformer';
|
||||
import {
|
||||
Document,
|
||||
Model,
|
||||
PipelineStage,
|
||||
Query,
|
||||
SortOrder,
|
||||
Types,
|
||||
} from 'mongoose';
|
||||
|
||||
import { BaseRepository, DeleteResult } from '@/utils/generics/base-repository';
|
||||
import { PageQueryDto } from '@/utils/pagination/pagination-query.dto';
|
||||
import { TFilterQuery } from '@/utils/types/filter.types';
|
||||
import { Format } from '@/utils/types/format.types';
|
||||
|
||||
import { NlpValueDto } from '../dto/nlp-value.dto';
|
||||
import {
|
||||
@ -19,7 +29,10 @@ import {
|
||||
NlpValue,
|
||||
NlpValueDocument,
|
||||
NlpValueFull,
|
||||
NlpValueFullWithCount,
|
||||
NlpValuePopulate,
|
||||
NlpValueWithCount,
|
||||
TNlpValueCount,
|
||||
} from '../schemas/nlp-value.schema';
|
||||
|
||||
import { NlpSampleEntityRepository } from './nlp-sample-entity.repository';
|
||||
@ -106,4 +119,139 @@ export class NlpValueRepository extends BaseRepository<
|
||||
throw new Error('Attempted to delete a NLP value using unknown criteria');
|
||||
}
|
||||
}
|
||||
|
||||
private getSortDirection(sortOrder: SortOrder) {
|
||||
return typeof sortOrder === 'number'
|
||||
? sortOrder
|
||||
: sortOrder.toString().toLowerCase() === 'desc'
|
||||
? -1
|
||||
: 1;
|
||||
}
|
||||
|
||||
/**
|
||||
* Performs an aggregation to retrieve NLP values with their sample counts.
|
||||
*
|
||||
* @param format - The format can be full or stub
|
||||
* @param pageQuery - The pagination parameters
|
||||
* @param filterQuery - The filter criteria
|
||||
* @returns Aggregated Nlp Value results with sample counts
|
||||
*/
|
||||
private async aggregateWithCount<F extends Format>(
|
||||
format: F,
|
||||
{
|
||||
limit = 10,
|
||||
skip = 0,
|
||||
sort = ['createdAt', 'desc'],
|
||||
}: PageQueryDto<NlpValue>,
|
||||
{ $and = [], ...rest }: TFilterQuery<NlpValue>,
|
||||
): Promise<TNlpValueCount<F>[]> {
|
||||
const pipeline: PipelineStage[] = [
|
||||
{
|
||||
$match: {
|
||||
...rest,
|
||||
...($and.length
|
||||
? {
|
||||
$and: $and.map(({ entity, ...rest }) => ({
|
||||
...rest,
|
||||
...(entity
|
||||
? { entity: new Types.ObjectId(String(entity)) }
|
||||
: {}),
|
||||
})),
|
||||
}
|
||||
: {}),
|
||||
},
|
||||
},
|
||||
{
|
||||
$skip: skip,
|
||||
},
|
||||
{
|
||||
$limit: limit,
|
||||
},
|
||||
{
|
||||
$lookup: {
|
||||
from: 'nlpsampleentities',
|
||||
localField: '_id',
|
||||
foreignField: 'value',
|
||||
as: '_sampleEntities',
|
||||
},
|
||||
},
|
||||
{
|
||||
$unwind: {
|
||||
path: '$_sampleEntities',
|
||||
preserveNullAndEmptyArrays: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
$group: {
|
||||
_id: '$_id',
|
||||
_originalDoc: {
|
||||
$first: {
|
||||
$unsetField: { input: '$$ROOT', field: 'nlpSamplesCount' },
|
||||
},
|
||||
},
|
||||
nlpSamplesCount: {
|
||||
$sum: { $cond: [{ $ifNull: ['$_sampleEntities', false] }, 1, 0] },
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
$replaceWith: {
|
||||
$mergeObjects: [
|
||||
'$_originalDoc',
|
||||
{ nlpSamplesCount: '$nlpSamplesCount' },
|
||||
],
|
||||
},
|
||||
},
|
||||
...(format === Format.FULL
|
||||
? [
|
||||
{
|
||||
$lookup: {
|
||||
from: 'nlpentities',
|
||||
localField: 'entity',
|
||||
foreignField: '_id',
|
||||
as: 'entity',
|
||||
},
|
||||
},
|
||||
{
|
||||
$unwind: '$entity',
|
||||
},
|
||||
]
|
||||
: []),
|
||||
{
|
||||
$sort: {
|
||||
[sort[0]]: this.getSortDirection(sort[1]),
|
||||
_id: this.getSortDirection(sort[1]),
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
return await this.model.aggregate<TNlpValueCount<F>>(pipeline).exec();
|
||||
}
|
||||
|
||||
async findWithCount<F extends Format>(
|
||||
format: F,
|
||||
pageQuery: PageQueryDto<NlpValue>,
|
||||
filterQuery: TFilterQuery<NlpValue>,
|
||||
): Promise<TNlpValueCount<F>[]> {
|
||||
try {
|
||||
const aggregatedResults = await this.aggregateWithCount(
|
||||
format,
|
||||
pageQuery,
|
||||
filterQuery,
|
||||
);
|
||||
|
||||
if (format === Format.FULL) {
|
||||
return plainToInstance(NlpValueFullWithCount, aggregatedResults, {
|
||||
excludePrefixes: ['_'],
|
||||
}) as TNlpValueCount<F>[];
|
||||
}
|
||||
|
||||
return plainToInstance(NlpValueWithCount, aggregatedResults, {
|
||||
excludePrefixes: ['_'],
|
||||
}) as TNlpValueCount<F>[];
|
||||
} catch (error) {
|
||||
this.logger.error(`Error in findWithCount: ${error.message}`, error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -16,6 +16,7 @@ import {
|
||||
TFilterPopulateFields,
|
||||
THydratedDocument,
|
||||
} from '@/utils/types/filter.types';
|
||||
import { TStubOrFull } from '@/utils/types/format.types';
|
||||
|
||||
import { NlpEntity, NlpEntityFull } from './nlp-entity.schema';
|
||||
import { NlpValueMap } from './types';
|
||||
@ -106,6 +107,14 @@ export class NlpValueFull extends NlpValueStub {
|
||||
entity: NlpEntity;
|
||||
}
|
||||
|
||||
export class NlpValueWithCount extends NlpValue {
|
||||
nlpSamplesCount: number;
|
||||
}
|
||||
|
||||
export class NlpValueFullWithCount extends NlpValueFull {
|
||||
nlpSamplesCount: number;
|
||||
}
|
||||
|
||||
export type NlpValueDocument = THydratedDocument<NlpValue>;
|
||||
|
||||
export const NlpValueModel: ModelDefinition = LifecycleHookManager.attach({
|
||||
@ -121,3 +130,9 @@ export type NlpValuePopulate = keyof TFilterPopulateFields<
|
||||
>;
|
||||
|
||||
export const NLP_VALUE_POPULATE: NlpValuePopulate[] = ['entity'];
|
||||
|
||||
export type TNlpValueCount<T> = TStubOrFull<
|
||||
T,
|
||||
NlpValueWithCount,
|
||||
NlpValueFullWithCount
|
||||
>;
|
||||
|
@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright © 2024 Hexastack. All rights reserved.
|
||||
* Copyright © 2025 Hexastack. All rights reserved.
|
||||
*
|
||||
* Licensed under the GNU Affero General Public License v3.0 (AGPLv3) with the following additional terms:
|
||||
* 1. The name "Hexabot" is a trademark of Hexastack. You may not use this name in derivative works without express written permission.
|
||||
@ -10,6 +10,9 @@ import { forwardRef, Inject, Injectable } from '@nestjs/common';
|
||||
|
||||
import { DeleteResult } from '@/utils/generics/base-repository';
|
||||
import { BaseService } from '@/utils/generics/base-service';
|
||||
import { PageQueryDto } from '@/utils/pagination/pagination-query.dto';
|
||||
import { TFilterQuery } from '@/utils/types/filter.types';
|
||||
import { Format } from '@/utils/types/format.types';
|
||||
|
||||
import { NlpValueCreateDto, NlpValueDto } from '../dto/nlp-value.dto';
|
||||
import { NlpValueRepository } from '../repositories/nlp-value.repository';
|
||||
@ -18,6 +21,7 @@ import {
|
||||
NlpValue,
|
||||
NlpValueFull,
|
||||
NlpValuePopulate,
|
||||
TNlpValueCount,
|
||||
} from '../schemas/nlp-value.schema';
|
||||
import { NlpSampleEntityValue } from '../schemas/types';
|
||||
|
||||
@ -218,4 +222,12 @@ export class NlpValueService extends BaseService<
|
||||
});
|
||||
return Promise.all(promises);
|
||||
}
|
||||
|
||||
async findWithCount<F extends Format>(
|
||||
format: F,
|
||||
pageQuery: PageQueryDto<NlpValue>,
|
||||
filters: TFilterQuery<NlpValue>,
|
||||
): Promise<TNlpValueCount<F>[]> {
|
||||
return await this.repository.findWithCount(format, pageQuery, filters);
|
||||
}
|
||||
}
|
||||
|
36
api/src/utils/test/fixtures/setting.ts
vendored
36
api/src/utils/test/fixtures/setting.ts
vendored
@ -11,6 +11,7 @@ import mongoose from 'mongoose';
|
||||
import { SettingCreateDto } from '@/setting/dto/setting.dto';
|
||||
import { SettingModel } from '@/setting/schemas/setting.schema';
|
||||
import { SettingType } from '@/setting/schemas/types';
|
||||
import { getRandom } from '@/utils/helpers/safeRandom';
|
||||
|
||||
export const settingFixtures: SettingCreateDto[] = [
|
||||
{
|
||||
@ -90,6 +91,41 @@ export const settingFixtures: SettingCreateDto[] = [
|
||||
type: SettingType.text,
|
||||
weight: 10,
|
||||
},
|
||||
{
|
||||
group: `${getRandom()}_channel`,
|
||||
label: `${getRandom()}`,
|
||||
value: '',
|
||||
type: SettingType.text,
|
||||
weight: 11,
|
||||
},
|
||||
{
|
||||
group: `${getRandom()}_helper`,
|
||||
label: `${getRandom()}`,
|
||||
value: '',
|
||||
type: SettingType.text,
|
||||
weight: 12,
|
||||
},
|
||||
{
|
||||
group: `${getRandom()}_channel`,
|
||||
label: `${getRandom()}`,
|
||||
value: '',
|
||||
type: SettingType.text,
|
||||
weight: 13,
|
||||
},
|
||||
{
|
||||
group: `${getRandom()}_helper`,
|
||||
label: `${getRandom()}`,
|
||||
value: '',
|
||||
type: SettingType.text,
|
||||
weight: 14,
|
||||
},
|
||||
{
|
||||
group: 'local_storage_helper',
|
||||
label: 'default storage helper label',
|
||||
value: 'local-storage-helper',
|
||||
type: SettingType.text,
|
||||
weight: 15,
|
||||
},
|
||||
];
|
||||
|
||||
export const installSettingFixtures = async () => {
|
||||
|
18
api/src/utils/types/format.types.ts
Normal file
18
api/src/utils/types/format.types.ts
Normal file
@ -0,0 +1,18 @@
|
||||
/*
|
||||
* Copyright © 2025 Hexastack. All rights reserved.
|
||||
*
|
||||
* Licensed under the GNU Affero General Public License v3.0 (AGPLv3) with the following additional terms:
|
||||
* 1. The name "Hexabot" is a trademark of Hexastack. You may not use this name in derivative works without express written permission.
|
||||
* 2. All derivative works must include clear attribution to the original creator and software, Hexastack and Hexabot, in a prominent location (e.g., in the software's "About" section, documentation, and README file).
|
||||
*/
|
||||
|
||||
export enum Format {
|
||||
NONE = 0,
|
||||
STUB = 1,
|
||||
BASIC = 2,
|
||||
FULL = 3,
|
||||
}
|
||||
|
||||
export type TStubOrFull<TF, TStub, TFull> = TF extends Format.STUB
|
||||
? TStub
|
||||
: TFull;
|
@ -28,7 +28,6 @@
|
||||
"@mui/x-data-grid": "^7.3.2",
|
||||
"@projectstorm/react-canvas-core": "^7.0.3",
|
||||
"@projectstorm/react-diagrams": "^7.0.4",
|
||||
"@types/qs": "^6.9.15",
|
||||
"axios": "^1.7.7",
|
||||
"eazychart-css": "^0.2.1-alpha.0",
|
||||
"eazychart-react": "^0.8.0-alpha.0",
|
||||
@ -44,9 +43,11 @@
|
||||
"react-hook-form": "^7.51.5",
|
||||
"react-i18next": "^14.1.1",
|
||||
"react-query": "^3.39.3",
|
||||
"socket.io-client": "^4.7.5"
|
||||
"socket.io-client": "^4.7.5",
|
||||
"random-seed": "^0.3.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/qs": "^6.9.15",
|
||||
"@types/node": "20.12.12",
|
||||
"@types/random-seed": "^0.3.5",
|
||||
"@types/react": "18.3.2",
|
||||
@ -57,7 +58,6 @@
|
||||
"eslint-import-resolver-typescript": "~3.6.1",
|
||||
"eslint-plugin-header": "^3.1.1",
|
||||
"lint-staged": "^15.3.0",
|
||||
"random-seed": "^0.3.0",
|
||||
"typescript": "^5.5.3"
|
||||
},
|
||||
"engines": {
|
||||
|
@ -334,6 +334,7 @@
|
||||
"nlp": "NLU",
|
||||
"nlp_entity": "Entity",
|
||||
"nlp_entity_value": "Value",
|
||||
"nlp_samples_count": "Samples count",
|
||||
"value": "Value",
|
||||
"synonyms": "Synonyms",
|
||||
"lookups": "Lookups",
|
||||
|
@ -334,6 +334,7 @@
|
||||
"nlp": "NLU",
|
||||
"nlp_entity": "Entité NLU",
|
||||
"nlp_entity_value": "Valeur NLU",
|
||||
"nlp_samples_count": "Nombre des échantillons",
|
||||
"value": "Valeur",
|
||||
"lookups": "Stratégies",
|
||||
"lookup_strategies": "Stratégie de recherche",
|
||||
|
@ -24,11 +24,13 @@ export const GenericFormDialog = <T,>({
|
||||
Form,
|
||||
rowKey,
|
||||
payload: data,
|
||||
editText,
|
||||
addText,
|
||||
...rest
|
||||
}: GenericFormDialogProps<T>) => {
|
||||
const { t } = useTranslate();
|
||||
const hasRow = rowKey ? data?.[rowKey] : data;
|
||||
const translationKey = hasRow ? rest.editText : rest.addText;
|
||||
const translationKey = hasRow ? editText : addText;
|
||||
|
||||
return (
|
||||
<Form
|
||||
|
@ -20,6 +20,7 @@ import { useConfig } from "@/hooks/useConfig";
|
||||
import { useTranslate } from "@/hooks/useTranslate";
|
||||
import { Title } from "@/layout/content/Title";
|
||||
import { EntityType, RouterType } from "@/services/types";
|
||||
import { extractQueryParamsUrl } from "@/utils/URL";
|
||||
|
||||
import { getAvatarSrc } from "../helpers/mapMessages";
|
||||
import { useChat } from "../hooks/ChatContext";
|
||||
@ -53,7 +54,7 @@ export const SubscribersList = (props: {
|
||||
<Grid padding={2}>
|
||||
<Title title={t(props.assignedTo)} icon={InboxIcon} />
|
||||
</Grid>
|
||||
{subscribers?.length > 0 && (
|
||||
{subscribers?.length > 0 ? (
|
||||
<ConversationList
|
||||
scrollable
|
||||
loading={isFetching}
|
||||
@ -64,7 +65,10 @@ export const SubscribersList = (props: {
|
||||
<Conversation
|
||||
onClick={() => {
|
||||
chat.setSubscriberId(subscriber.id);
|
||||
push(`/${RouterType.INBOX}/subscribers/${subscriber.id}`);
|
||||
push({
|
||||
pathname: `/${RouterType.INBOX}/subscribers/${subscriber.id}`,
|
||||
query: extractQueryParamsUrl(window.location.href),
|
||||
});
|
||||
}}
|
||||
className="changeColor"
|
||||
key={subscriber.id}
|
||||
@ -87,6 +91,10 @@ export const SubscribersList = (props: {
|
||||
</Conversation>
|
||||
))}
|
||||
</ConversationList>
|
||||
) : (
|
||||
<Grid p={1} color="gray" textAlign="center">
|
||||
{t("message.no_result_found")}
|
||||
</Grid>
|
||||
)}
|
||||
</>
|
||||
);
|
||||
|
@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright © 2024 Hexastack. All rights reserved.
|
||||
* Copyright © 2025 Hexastack. All rights reserved.
|
||||
*
|
||||
* Licensed under the GNU Affero General Public License v3.0 (AGPLv3) with the following additional terms:
|
||||
* 1. The name "Hexabot" is a trademark of Hexastack. You may not use this name in derivative works without express written permission.
|
||||
@ -26,7 +26,7 @@ import { AssignedTo } from "./types";
|
||||
|
||||
export const Inbox = () => {
|
||||
const { t } = useTranslate();
|
||||
const { onSearch, searchPayload } = useSearch<ISubscriber>({
|
||||
const { onSearch, searchPayload, searchText } = useSearch<ISubscriber>({
|
||||
$or: ["first_name", "last_name"],
|
||||
});
|
||||
const [channels, setChannels] = useState<string[]>([]);
|
||||
@ -48,6 +48,7 @@ export const Inbox = () => {
|
||||
<Sidebar position="left">
|
||||
<Grid paddingX={1} paddingTop={1}>
|
||||
<Search
|
||||
value={searchText}
|
||||
onClearClick={() => onSearch("")}
|
||||
className="changeColor"
|
||||
onChange={(v) => onSearch(v)}
|
||||
|
@ -392,6 +392,7 @@ export default function NlpSample() {
|
||||
`nlpsample/export${type ? `?type=${type}` : ""}`,
|
||||
)}
|
||||
startIcon={<DownloadIcon />}
|
||||
disabled={dataGridProps?.rows?.length === 0}
|
||||
>
|
||||
{t("button.export")}
|
||||
</Button>
|
||||
|
@ -55,10 +55,10 @@ export const NlpValues = ({ entityId }: { entityId: string }) => {
|
||||
const canHaveSynonyms = nlpEntity?.lookups?.[0] === NlpLookups.keywords;
|
||||
const { onSearch, searchPayload } = useSearch<INlpValue>({
|
||||
$eq: [{ entity: entityId }],
|
||||
$or: ["doc", "value"]
|
||||
$or: ["doc", "value"],
|
||||
});
|
||||
const { dataGridProps } = useFind(
|
||||
{ entity: EntityType.NLP_VALUE },
|
||||
{ entity: EntityType.NLP_VALUE, format: Format.FULL },
|
||||
{
|
||||
params: searchPayload,
|
||||
},
|
||||
@ -125,6 +125,24 @@ export const NlpValues = ({ entityId }: { entityId: string }) => {
|
||||
disableColumnMenu: true,
|
||||
renderHeader,
|
||||
},
|
||||
{
|
||||
flex: 2,
|
||||
field: "nlpSamplesCount",
|
||||
align: "center",
|
||||
headerName: t("label.nlp_samples_count"),
|
||||
sortable: true,
|
||||
disableColumnMenu: true,
|
||||
headerAlign: "center",
|
||||
renderHeader,
|
||||
renderCell: ({ row }) => (
|
||||
<Chip
|
||||
sx={{ alignContent: "center" }}
|
||||
id={row.id}
|
||||
label={row.nlpSamplesCount}
|
||||
variant="inbox"
|
||||
/>
|
||||
),
|
||||
},
|
||||
{
|
||||
flex: 3,
|
||||
field: "doc",
|
||||
|
@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright © 2024 Hexastack. All rights reserved.
|
||||
* Copyright © 2025 Hexastack. All rights reserved.
|
||||
*
|
||||
* Licensed under the GNU Affero General Public License v3.0 (AGPLv3) with the following additional terms:
|
||||
* 1. The name "Hexabot" is a trademark of Hexastack. You may not use this name in derivative works without express written permission.
|
||||
@ -7,6 +7,7 @@
|
||||
*/
|
||||
|
||||
import { Grid } from "@mui/material";
|
||||
import { useMemo } from "react";
|
||||
|
||||
import PluginIcon from "@/app-components/svg/toolbar/PluginIcon";
|
||||
import { useFind } from "@/hooks/crud/useFind";
|
||||
@ -17,18 +18,22 @@ import { Block, StyledTitle } from "./Aside";
|
||||
|
||||
export const CustomBlocks = () => {
|
||||
const { t } = useTranslate();
|
||||
const { data: customBlocks } = useFind(
|
||||
const { data: customBlocks = [] } = useFind(
|
||||
{ entity: EntityType.CUSTOM_BLOCK },
|
||||
{ hasCount: false },
|
||||
);
|
||||
const memoizedCustomBlocks = useMemo(
|
||||
() => customBlocks.sort((a, b) => a.id.localeCompare(b.id)),
|
||||
[customBlocks],
|
||||
);
|
||||
|
||||
return customBlocks?.length ? (
|
||||
return memoizedCustomBlocks.length ? (
|
||||
<>
|
||||
<Grid mb="2">
|
||||
<StyledTitle>{t("title.custom_blocks")}</StyledTitle>
|
||||
</Grid>
|
||||
<Grid container>
|
||||
{customBlocks?.map((customBlock) => (
|
||||
{memoizedCustomBlocks.map((customBlock) => (
|
||||
<Block
|
||||
key={customBlock.id}
|
||||
title={t(`title.${customBlock.namespace}`, {
|
||||
|
@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright © 2024 Hexastack. All rights reserved.
|
||||
* Copyright © 2025 Hexastack. All rights reserved.
|
||||
*
|
||||
* Licensed under the GNU Affero General Public License v3.0 (AGPLv3) with the following additional terms:
|
||||
* 1. The name "Hexabot" is a trademark of Hexastack. You may not use this name in derivative works without express written permission.
|
||||
@ -9,13 +9,21 @@
|
||||
import { createContext, ReactNode, useContext } from "react";
|
||||
import { FormProvider, UseFormReturn } from "react-hook-form";
|
||||
|
||||
import { IBlockAttributes, IBlock } from "@/types/block.types";
|
||||
import { IBlock, IBlockAttributes } from "@/types/block.types";
|
||||
|
||||
// Create a custom context for the block value
|
||||
const BlockContext = createContext<IBlock | undefined>(undefined);
|
||||
|
||||
// Custom hook to use block context
|
||||
export const useBlock = () => useContext(BlockContext);
|
||||
export const useBlock = () => {
|
||||
const context = useContext(BlockContext);
|
||||
|
||||
if (!context) {
|
||||
throw new Error("useBlock must be used within an BlockContext");
|
||||
}
|
||||
|
||||
return context;
|
||||
};
|
||||
|
||||
// This component wraps FormProvider and adds block to its context
|
||||
function BlockFormProvider({
|
||||
@ -23,7 +31,7 @@ function BlockFormProvider({
|
||||
methods,
|
||||
block,
|
||||
}: {
|
||||
methods: UseFormReturn<IBlockAttributes, any, undefined>;
|
||||
methods: UseFormReturn<IBlockAttributes>;
|
||||
block: IBlock | undefined;
|
||||
children: ReactNode;
|
||||
}) {
|
||||
|
@ -11,15 +11,190 @@ import styled from "@emotion/styled";
|
||||
import {
|
||||
DefaultLinkFactory,
|
||||
DefaultLinkWidget,
|
||||
NodeModel,
|
||||
PortModel
|
||||
} from "@projectstorm/react-diagrams";
|
||||
|
||||
import { AdvancedLinkModel } from "./AdvancedLinkModel";
|
||||
|
||||
const PROXIMITY_THRESHOLD = 500;
|
||||
const MIN_DISTANCE = 0.1;
|
||||
const MAX_DISTANCE = 2000;
|
||||
const CONTROL_POINT_PADDING = 10;
|
||||
const BACKWARD_LINK_THRESHOLD = 12; // pixels
|
||||
const MIN_SCALE_FACTOR = 1.5;
|
||||
const MAX_SCALE_FACTOR = 2.0;
|
||||
|
||||
interface Point {
|
||||
x: number;
|
||||
y: number;
|
||||
}
|
||||
|
||||
interface Boundaries {
|
||||
left: number,
|
||||
right: number,
|
||||
top: number,
|
||||
bottom: number,
|
||||
}
|
||||
|
||||
interface Dimensions {
|
||||
width: number,
|
||||
height: number,
|
||||
}
|
||||
// Helper function to get port dimensions
|
||||
const getPortDimensions = (port: PortModel): Dimensions => {
|
||||
return {
|
||||
width: port.width || CONTROL_POINT_PADDING,
|
||||
height: port.height || CONTROL_POINT_PADDING,
|
||||
};
|
||||
};
|
||||
// Helper function to calculate port center point
|
||||
const getPortCenterPoint = (port: PortModel): Point => {
|
||||
const portSize = getPortDimensions(port);
|
||||
|
||||
return {
|
||||
x: port.getPosition().x + portSize.width / 2,
|
||||
y: port.getPosition().y + portSize.height / 2,
|
||||
};
|
||||
};
|
||||
/**
|
||||
* Logarithmic scaling function that adjusts between 1.5 and 2 based on distance,
|
||||
* minimum distance, and maximum distance.
|
||||
* @param distance - The distance to scale.
|
||||
* @param minDistance - A small value to prevent division by zero or too small values.
|
||||
* @param maxDistance - The maximum expected distance.
|
||||
*/
|
||||
const logFactor = (
|
||||
distance: number,
|
||||
minDistance: number,
|
||||
maxDistance: number
|
||||
): number => {
|
||||
const scale = Math.log(distance + minDistance) / Math.log(maxDistance + minDistance);
|
||||
|
||||
return MIN_SCALE_FACTOR + scale * (MAX_SCALE_FACTOR - MIN_SCALE_FACTOR); // Scaled to range between 1.5 and 2
|
||||
};
|
||||
/**
|
||||
* Calculates the horizontal (X-axis) overlap in pixels between two node boundaries.
|
||||
* Returns 0 if there is no overlap.
|
||||
*/
|
||||
const calculateXOverlap = (
|
||||
sourceBounds: Boundaries,
|
||||
targetBounds: Boundaries
|
||||
): number => {
|
||||
return Math.max(
|
||||
0,
|
||||
Math.min(sourceBounds.right, targetBounds.right) -
|
||||
Math.max(sourceBounds.left, targetBounds.left)
|
||||
);
|
||||
};
|
||||
/**
|
||||
* Calculates the vertical (Y-axis) overlap in pixels between two node boundaries.
|
||||
* Returns 0 if there is no overlap.
|
||||
*/
|
||||
const calculateYOverlap = (
|
||||
sourceBounds: Boundaries,
|
||||
targetBounds: Boundaries
|
||||
): number => {
|
||||
return Math.max(
|
||||
0,
|
||||
Math.min(sourceBounds.bottom, targetBounds.bottom) -
|
||||
Math.max(sourceBounds.top, targetBounds.top)
|
||||
);
|
||||
};
|
||||
/**
|
||||
* Converts an overlap amount into a ratio (0 to 1) based on the larger of the two node dimensions.
|
||||
* Useful for dynamically adjusting offsets based on how much nodes visually intersect.
|
||||
*/
|
||||
const calculateOverlapRatio = (
|
||||
overlapAmount: number,
|
||||
sourceDimension: number,
|
||||
targetDimension: number
|
||||
): number => {
|
||||
const maxRange = Math.max(sourceDimension, targetDimension);
|
||||
|
||||
return overlapAmount / maxRange;
|
||||
};
|
||||
/**
|
||||
* Computes the Euclidean distance between two points.
|
||||
* Used to scale offsets and curve control points based on how far apart nodes are.
|
||||
*/
|
||||
const calculateDistance = (startPoint: Point, endPoint: Point): number => {
|
||||
return Math.sqrt(
|
||||
Math.pow(endPoint.x - startPoint.x, 2) + Math.pow(endPoint.y - startPoint.y, 2)
|
||||
);
|
||||
};
|
||||
/**
|
||||
* Calculates the bounding box of a node based on its position and size.
|
||||
* Returns an object with `left`, `right`, `top`, and `bottom` properties representing the node's edges.
|
||||
*/
|
||||
const calculateNodeBoundaries = (node: NodeModel): Boundaries => {
|
||||
return {
|
||||
left: node.getPosition().x,
|
||||
right: node.getPosition().x + node.width,
|
||||
top: node.getPosition().y,
|
||||
bottom: node.getPosition().y + node.height,
|
||||
};
|
||||
};
|
||||
/**
|
||||
* Calculates the width and height of a node based on the position of one of its ports.
|
||||
*
|
||||
* This approach avoids relying on the node's width and height properties,
|
||||
* which may not be accurate or available at render time due to asynchronous rendering behavior.
|
||||
*
|
||||
* Instead, it uses the relative position of the port to infer the size of the node.
|
||||
* Assumes that the port's position reflects the visual layout and placement on the node.
|
||||
*
|
||||
* @param port - A PortModel instance attached to the node
|
||||
* @returns An object containing the inferred width and height of the node
|
||||
*/
|
||||
const calculateNodeDimension = (port: PortModel): Dimensions => {
|
||||
// Get the top-left position of the node
|
||||
const nodePos = port.getNode().getPosition();
|
||||
// Get the top-left position of the port
|
||||
const portPos = port.getPosition();
|
||||
// Width is the horizontal distance from the node's left to the port's right edge
|
||||
const width = (portPos.x - nodePos.x) + port.width;
|
||||
// Height is estimated by doubling the vertical offset from the node to the port
|
||||
// (port is vertically centered), then adding the port's height
|
||||
const height = Math.abs(portPos.y - nodePos.y) * 2 + port.height;
|
||||
|
||||
return { width, height };
|
||||
};
|
||||
/**
|
||||
* Calculates a single control point for a cubic Bézier curve.
|
||||
* Adjusts based on direction, dynamic offset, and node boundaries.
|
||||
*/
|
||||
const calculateControlPoint = (
|
||||
anchor: Point,
|
||||
horizontalOffset: number,
|
||||
verticalOffset: number,
|
||||
verticalDirection: number,
|
||||
nodeBounds: Boundaries,
|
||||
isStart: boolean,
|
||||
controlPointPadding: number
|
||||
): Point => {
|
||||
let x =
|
||||
anchor.x + (isStart ? horizontalOffset : -horizontalOffset);
|
||||
let y =
|
||||
anchor.y + (isStart ? verticalDirection * verticalOffset : -verticalDirection * verticalOffset);
|
||||
|
||||
// Apply minimum horizontal constraint
|
||||
x = isStart
|
||||
? Math.max(x, nodeBounds.right + controlPointPadding)
|
||||
: Math.min(x, nodeBounds.left - controlPointPadding);
|
||||
|
||||
// Apply vertical constraint based on direction
|
||||
y =
|
||||
verticalDirection > 0
|
||||
? isStart
|
||||
? Math.max(y, nodeBounds.bottom + controlPointPadding)
|
||||
: Math.min(y, nodeBounds.top - controlPointPadding)
|
||||
: isStart
|
||||
? Math.min(y, nodeBounds.top - controlPointPadding)
|
||||
: Math.max(y, nodeBounds.bottom + controlPointPadding);
|
||||
|
||||
return { x, y };
|
||||
};
|
||||
const createCurvedPath = (start: Point, end: Point, nodeHeight: number) => {
|
||||
const controlPoint1X = start.x + nodeHeight - 20;
|
||||
const controlPoint1Y = start.y - nodeHeight;
|
||||
@ -28,6 +203,74 @@ const createCurvedPath = (start: Point, end: Point, nodeHeight: number) => {
|
||||
|
||||
return `M ${start.x},${start.y} C ${controlPoint1X},${controlPoint1Y} ${controlPoint2X},${controlPoint2Y} ${end.x},${end.y}`;
|
||||
};
|
||||
const createBackwardCurvedPath = (
|
||||
sourcePort: PortModel,
|
||||
targetPort: PortModel,
|
||||
) => {
|
||||
// Set a threshold for node proximity, below which dynamic adjustments to offsets are applied
|
||||
// This helps in reducing abrupt curve steepness when nodes are close to each other
|
||||
const proximityThreshold = PROXIMITY_THRESHOLD;
|
||||
const minDistance = MIN_DISTANCE;
|
||||
const maxDistance = MAX_DISTANCE;
|
||||
const sourceNode = sourcePort.getNode();
|
||||
const targetNode = targetPort.getNode();
|
||||
// Get node dimensions
|
||||
const { width: sourceNodeWidth, height: sourceNodeHeight } = calculateNodeDimension(sourcePort);
|
||||
const { width: targetNodeWidth, height: targetNodeHeight } = calculateNodeDimension(targetPort);
|
||||
// Get node boundaries
|
||||
const sourceNodeBounds: Boundaries = calculateNodeBoundaries(sourceNode);
|
||||
const targetNodeBounds: Boundaries = calculateNodeBoundaries(targetNode);
|
||||
// **NEW:** Adjust `start` and `end` to match the exact center of ports
|
||||
const adjustedStart: Point = getPortCenterPoint(sourcePort);
|
||||
const adjustedEnd: Point = getPortCenterPoint(targetPort);
|
||||
// Calculate the distance between nodes
|
||||
const nodeDistance: number = calculateDistance(adjustedStart, adjustedEnd);
|
||||
// Use node dimensions and distance to calculate dynamic offsets
|
||||
const horizontalOffset: number = Math.max(sourceNodeWidth, targetNodeWidth);
|
||||
const verticalOffset: number = Math.max(sourceNodeHeight, targetNodeHeight);
|
||||
|
||||
// Dynamic factor, adjusting horizontal and vertical offsets based on the distance
|
||||
let adjustedHorizontalOffset: number = horizontalOffset * logFactor(nodeDistance, minDistance, maxDistance);
|
||||
let adjustedVerticalOffset: number = verticalOffset * logFactor(nodeDistance, minDistance, maxDistance);
|
||||
|
||||
// Horizontal overlap ratio (0 = no overlap, 1 = fully overlapping horizontally)
|
||||
const xOverlapAmount: number = calculateXOverlap(sourceNodeBounds, targetNodeBounds);
|
||||
const xOverlapRatio: number = calculateOverlapRatio(xOverlapAmount, sourceNodeWidth, targetNodeWidth);
|
||||
// Vertical overlap ratio (0 = no overlap, 1 = fully overlapping vertically)
|
||||
const yOverlapAmount: number = calculateYOverlap(sourceNodeBounds, targetNodeBounds);
|
||||
const yOverlapRatio: number = calculateOverlapRatio(yOverlapAmount, sourceNodeHeight, targetNodeHeight);
|
||||
// Determine vertical direction for Y alignment
|
||||
const verticalDirection: number = adjustedEnd.y >= adjustedStart.y ? 1 : -1;
|
||||
|
||||
// If Node Distance is small, multiply offsets by overlap ratios
|
||||
// to avoid abrupt curve steepness
|
||||
if (nodeDistance < proximityThreshold) {
|
||||
adjustedHorizontalOffset *= xOverlapRatio;
|
||||
adjustedVerticalOffset *= yOverlapRatio;
|
||||
}
|
||||
// Compute control points with dynamic offset
|
||||
const controlPoint1 = calculateControlPoint(
|
||||
adjustedStart,
|
||||
adjustedHorizontalOffset,
|
||||
adjustedVerticalOffset,
|
||||
verticalDirection,
|
||||
sourceNodeBounds,
|
||||
true,
|
||||
CONTROL_POINT_PADDING
|
||||
);
|
||||
const controlPoint2 = calculateControlPoint(
|
||||
adjustedEnd,
|
||||
adjustedHorizontalOffset,
|
||||
adjustedVerticalOffset,
|
||||
verticalDirection,
|
||||
targetNodeBounds,
|
||||
false,
|
||||
CONTROL_POINT_PADDING
|
||||
);
|
||||
|
||||
// Return the cubic Bezier curve
|
||||
return `M ${adjustedStart.x},${adjustedStart.y} C ${controlPoint1.x},${controlPoint1.y} ${controlPoint2.x},${controlPoint2.y} ${adjustedEnd.x},${adjustedEnd.y}`;
|
||||
};
|
||||
|
||||
namespace S {
|
||||
export const Keyframes = keyframes`
|
||||
@ -69,13 +312,12 @@ export class AdvancedLinkFactory extends DefaultLinkFactory {
|
||||
selected: boolean,
|
||||
path: string,
|
||||
) {
|
||||
const isSelfLoop =
|
||||
model.getSourcePort().getNode() === model.getTargetPort().getNode();
|
||||
|
||||
if (isSelfLoop) {
|
||||
// Adjust the path to create a curve
|
||||
const sourcePortPosition = model.getSourcePort().getPosition();
|
||||
const targetPortPosition = model.getTargetPort().getPosition();
|
||||
const backwardLinkThreshold = BACKWARD_LINK_THRESHOLD;
|
||||
const sourcePort = model.getSourcePort();
|
||||
const targetPort = model.getTargetPort();
|
||||
const isSelfLoop = sourcePort.getNode() === targetPort.getNode();
|
||||
const sourcePortPosition = sourcePort.getPosition();
|
||||
const targetPortPosition = targetPort.getPosition();
|
||||
const startPoint: Point = {
|
||||
x: sourcePortPosition.x + 20,
|
||||
y: sourcePortPosition.y + 20,
|
||||
@ -84,21 +326,33 @@ export class AdvancedLinkFactory extends DefaultLinkFactory {
|
||||
x: targetPortPosition.x + 20,
|
||||
y: targetPortPosition.y + 20,
|
||||
};
|
||||
const targetPortHeight = model.getTargetPort().height;
|
||||
const targetNdeHeight =
|
||||
(model.getTargetPort().getPosition().y -
|
||||
model.getTargetPort().getNode().getPosition().y) *
|
||||
// Check if it's a backward link (moving left)
|
||||
const isBackward = startPoint.x - endPoint.x > backwardLinkThreshold;
|
||||
|
||||
if (isSelfLoop) {
|
||||
// Adjust start Point to match the exact source port's centre
|
||||
const adjustedStartPoint: Point = getPortCenterPoint(sourcePort);
|
||||
// Handle self-loop (curved) links
|
||||
const targetPortHeight = targetPort.height;
|
||||
const targetNodeHeight =
|
||||
(targetPort.getPosition().y -
|
||||
targetPort.getNode().getPosition().y) *
|
||||
2 +
|
||||
targetPortHeight;
|
||||
|
||||
path = createCurvedPath(startPoint, endPoint, targetNdeHeight);
|
||||
path = createCurvedPath(adjustedStartPoint, endPoint, targetNodeHeight);
|
||||
} else if (isBackward) {
|
||||
// Handle backward (leftward) link with refined function
|
||||
path = createBackwardCurvedPath(sourcePort, targetPort);
|
||||
}
|
||||
|
||||
return (
|
||||
return (
|
||||
<S.Path
|
||||
selected={selected}
|
||||
stroke={
|
||||
selected ? model.getOptions().selectedColor : model.getOptions().color
|
||||
selected
|
||||
? model.getOptions().selectedColor
|
||||
: model.getOptions().color
|
||||
}
|
||||
strokeWidth={model.getOptions().width}
|
||||
d={path}
|
||||
|
@ -8,7 +8,7 @@
|
||||
|
||||
import getConfig from "next/config";
|
||||
import { useRouter } from "next/router";
|
||||
import { createContext, ReactNode, useEffect, useState } from "react";
|
||||
import { createContext, ReactNode } from "react";
|
||||
import {
|
||||
QueryObserverResult,
|
||||
RefetchOptions,
|
||||
@ -25,7 +25,6 @@ import { useSubscribeBroadcastChannel } from "@/hooks/useSubscribeBroadcastChann
|
||||
import { useTranslate } from "@/hooks/useTranslate";
|
||||
import { RouterType } from "@/services/types";
|
||||
import { IUser } from "@/types/user.types";
|
||||
import { getFromQuery } from "@/utils/URL";
|
||||
|
||||
export interface AuthContextValue {
|
||||
user: IUser | undefined;
|
||||
@ -51,10 +50,8 @@ const { publicRuntimeConfig } = getConfig();
|
||||
|
||||
export const AuthProvider = ({ children }: AuthProviderProps): JSX.Element => {
|
||||
const router = useRouter();
|
||||
const [search, setSearch] = useState("");
|
||||
const hasPublicPath = PUBLIC_PATHS.includes(router.pathname);
|
||||
const { i18n } = useTranslate();
|
||||
const [isReady, setIsReady] = useState(false);
|
||||
const queryClient = useQueryClient();
|
||||
const updateLanguage = (lang: string) => {
|
||||
i18n.changeLanguage(lang);
|
||||
@ -66,11 +63,11 @@ export const AuthProvider = ({ children }: AuthProviderProps): JSX.Element => {
|
||||
};
|
||||
const authRedirection = async (isAuthenticated: boolean) => {
|
||||
if (isAuthenticated) {
|
||||
const redirect = getFromQuery({ search, key: "redirect" });
|
||||
const nextPage = redirect && decodeURIComponent(redirect);
|
||||
|
||||
if (nextPage?.startsWith("/")) {
|
||||
await router.push(nextPage);
|
||||
if (
|
||||
router.query.redirect &&
|
||||
router.query.redirect.toString().startsWith("/")
|
||||
) {
|
||||
await router.push(router.query.redirect.toString());
|
||||
} else if (hasPublicPath) {
|
||||
await router.push(RouterType.HOME);
|
||||
}
|
||||
@ -109,14 +106,9 @@ export const AuthProvider = ({ children }: AuthProviderProps): JSX.Element => {
|
||||
router.reload();
|
||||
});
|
||||
|
||||
useEffect(() => {
|
||||
const search = location.search;
|
||||
|
||||
setSearch(search);
|
||||
setIsReady(true);
|
||||
}, []);
|
||||
|
||||
if (!isReady || isLoading) return <Progress />;
|
||||
if (isLoading) {
|
||||
return <Progress />;
|
||||
}
|
||||
|
||||
return (
|
||||
<AuthContext.Provider
|
||||
|
@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright © 2024 Hexastack. All rights reserved.
|
||||
* Copyright © 2025 Hexastack. All rights reserved.
|
||||
*
|
||||
* Licensed under the GNU Affero General Public License v3.0 (AGPLv3) with the following additional terms:
|
||||
* 1. The name "Hexabot" is a trademark of Hexastack. You may not use this name in derivative works without express written permission.
|
||||
@ -7,12 +7,13 @@
|
||||
*/
|
||||
|
||||
import { debounce } from "@mui/material";
|
||||
import { ChangeEvent, useState } from "react";
|
||||
import { useRouter } from "next/router";
|
||||
import { ChangeEvent, useCallback, useEffect, useState } from "react";
|
||||
|
||||
import {
|
||||
TParamItem,
|
||||
TBuildParamProps,
|
||||
TBuildInitialParamProps,
|
||||
TBuildParamProps,
|
||||
TParamItem,
|
||||
} from "@/types/search.types";
|
||||
|
||||
const buildOrParams = <T,>({ params, searchText }: TBuildParamProps<T>) => ({
|
||||
@ -52,13 +53,38 @@ const buildNeqInitialParams = <T,>({
|
||||
);
|
||||
|
||||
export const useSearch = <T,>(params: TParamItem<T>) => {
|
||||
const [searchText, setSearchText] = useState<string>("");
|
||||
const onSearch = debounce(
|
||||
(e: ChangeEvent<HTMLInputElement | HTMLTextAreaElement> | string) => {
|
||||
setSearchText(typeof e === "string" ? e : e.target.value);
|
||||
},
|
||||
300,
|
||||
const router = useRouter();
|
||||
const [searchText, setSearchText] = useState<string>(
|
||||
(router.query.search as string) || "",
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
if (router.query.search !== searchText) {
|
||||
setSearchText((router.query.search as string) || "");
|
||||
}
|
||||
}, [router.query.search]);
|
||||
|
||||
const updateQueryParams = useCallback(
|
||||
debounce(async (newSearchText: string) => {
|
||||
await router.replace(
|
||||
{
|
||||
pathname: router.pathname,
|
||||
query: { ...router.query, search: newSearchText || undefined },
|
||||
},
|
||||
undefined,
|
||||
{ shallow: true },
|
||||
);
|
||||
}, 300),
|
||||
[router],
|
||||
);
|
||||
const onSearch = (
|
||||
e: ChangeEvent<HTMLInputElement | HTMLTextAreaElement> | string,
|
||||
) => {
|
||||
const newSearchText = typeof e === "string" ? e : e.target.value;
|
||||
|
||||
setSearchText(newSearchText);
|
||||
updateQueryParams(newSearchText);
|
||||
};
|
||||
const {
|
||||
$eq: eqInitialParams,
|
||||
$iLike: iLikeParams,
|
||||
@ -67,6 +93,7 @@ export const useSearch = <T,>(params: TParamItem<T>) => {
|
||||
} = params;
|
||||
|
||||
return {
|
||||
searchText,
|
||||
onSearch,
|
||||
searchPayload: {
|
||||
where: {
|
||||
|
@ -19,6 +19,7 @@ export interface INlpValueAttributes {
|
||||
expressions?: string[];
|
||||
metadata?: Record<string, any>;
|
||||
builtin?: boolean;
|
||||
nlpSamplesCount?: number;
|
||||
}
|
||||
|
||||
export interface INlpValueStub extends IBaseSchema, INlpValueAttributes {}
|
||||
|
@ -6,25 +6,7 @@
|
||||
* 2. All derivative works must include clear attribution to the original creator and software, Hexastack and Hexabot, in a prominent location (e.g., in the software's "About" section, documentation, and README file).
|
||||
*/
|
||||
|
||||
export const getFromQuery = ({
|
||||
key,
|
||||
search,
|
||||
defaultValue = "",
|
||||
}: {
|
||||
key: string;
|
||||
search?: string;
|
||||
defaultValue?: string;
|
||||
}) => {
|
||||
try {
|
||||
const paramsString = search || window.location.search;
|
||||
const searchParams = new URLSearchParams(paramsString);
|
||||
const loadCampaign = searchParams.get(key) || defaultValue;
|
||||
|
||||
return loadCampaign;
|
||||
} catch (e) {
|
||||
return defaultValue;
|
||||
}
|
||||
};
|
||||
import qs from "qs";
|
||||
|
||||
export const buildURL = (baseUrl: string, relativePath: string): string => {
|
||||
try {
|
||||
@ -57,3 +39,12 @@ export const isAbsoluteUrl = (value: string = ""): boolean => {
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
||||
// todo: in the future we might need to extract this logic into a hook
|
||||
export const extractQueryParamsUrl = (fullUrl: string): string => {
|
||||
const extractedQueryParams = qs.parse(new URL(fullUrl).search, {
|
||||
ignoreQueryPrefix: true,
|
||||
});
|
||||
|
||||
return qs.stringify(extractedQueryParams);
|
||||
};
|
||||
|
6
package-lock.json
generated
6
package-lock.json
generated
@ -59,7 +59,6 @@
|
||||
"@mui/x-data-grid": "^7.3.2",
|
||||
"@projectstorm/react-canvas-core": "^7.0.3",
|
||||
"@projectstorm/react-diagrams": "^7.0.4",
|
||||
"@types/qs": "^6.9.15",
|
||||
"axios": "^1.7.7",
|
||||
"eazychart-css": "^0.2.1-alpha.0",
|
||||
"eazychart-react": "^0.8.0-alpha.0",
|
||||
@ -79,6 +78,7 @@
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "20.12.12",
|
||||
"@types/qs": "^6.9.15",
|
||||
"@types/random-seed": "^0.3.5",
|
||||
"@types/react": "18.3.2",
|
||||
"@types/react-dom": "^18",
|
||||
@ -2824,6 +2824,7 @@
|
||||
"version": "3.5.2",
|
||||
"resolved": "https://registry.npmjs.org/@types/emoji-js/-/emoji-js-3.5.2.tgz",
|
||||
"integrity": "sha512-qPR85yjSPk2UEbdjYYNHfcOjVod7DCARSrJlPcL+cwaDFwdnmOFhPyYUvP5GaW0YZEy8mU93ZjTNgsVWz1zzlg==",
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@types/estree": {
|
||||
@ -2877,6 +2878,7 @@
|
||||
"version": "6.9.15",
|
||||
"resolved": "https://registry.npmjs.org/@types/qs/-/qs-6.9.15.tgz",
|
||||
"integrity": "sha512-uXHQKES6DQKKCLh441Xv/dwxOq1TVS3JPUMlEqoEglvlhR6Mxnlew/Xq/LRVHpLyk7iK3zODe1qYHIMltO7XGg==",
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@types/random-seed": {
|
||||
@ -10307,7 +10309,6 @@
|
||||
"version": "2.2.5",
|
||||
"license": "AGPL-3.0-only",
|
||||
"dependencies": {
|
||||
"@types/emoji-js": "^3.5.2",
|
||||
"autolinker": "^4.0.0",
|
||||
"dayjs": "^1.11.12",
|
||||
"emoji-js": "^3.8.0",
|
||||
@ -10317,6 +10318,7 @@
|
||||
"socket.io-client": "^4.7.5"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/emoji-js": "^3.5.2",
|
||||
"@types/react": "^18.3.3",
|
||||
"@types/react-dom": "^18.3.0",
|
||||
"@typescript-eslint/eslint-plugin": "^7.15.0",
|
||||
|
@ -20,7 +20,6 @@
|
||||
"*.{ts,tsx}": "eslint --fix -c \".eslintrc-staged.json\""
|
||||
},
|
||||
"dependencies": {
|
||||
"@types/emoji-js": "^3.5.2",
|
||||
"autolinker": "^4.0.0",
|
||||
"dayjs": "^1.11.12",
|
||||
"emoji-js": "^3.8.0",
|
||||
@ -30,6 +29,7 @@
|
||||
"socket.io-client": "^4.7.5"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/emoji-js": "^3.5.2",
|
||||
"@types/react": "^18.3.3",
|
||||
"@types/react-dom": "^18.3.0",
|
||||
"@typescript-eslint/eslint-plugin": "^7.15.0",
|
||||
|
Loading…
Reference in New Issue
Block a user