diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index 45735c91..9323e6e6 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -77,6 +77,7 @@ jobs: context: ./widget/ file: ./widget/Dockerfile platforms: linux/amd64,linux/arm64 + target: production push: true tags: hexastack/hexabot-widget:latest diff --git a/Makefile b/Makefile index c7b635a4..915f3fd0 100644 --- a/Makefile +++ b/Makefile @@ -1,64 +1,60 @@ -COMPOSE_FILES := -f ./docker/docker-compose.yml +# Makefile +FOLDER := ./docker -# Function to add service files -define add_service - ifeq ($(PROD), true) - COMPOSE_FILES += -f ./docker/docker-compose.$(1).yml - ifneq ($(wildcard ./docker/docker-compose.$(1).prod.yml),) - COMPOSE_FILES += -f ./docker/docker-compose.$(1).prod.yml - endif - else ifeq ($(DEV_MODE), true) - COMPOSE_FILES += -f ./docker/docker-compose.$(1).yml - ifneq ($(wildcard ./docker/docker-compose.$(1).dev.yml),) - COMPOSE_FILES += -f ./docker/docker-compose.$(1).dev.yml - endif - endif +# The services that can be toggled +SERVICES := nginx nlu smtp4dev + +# Function to dynamically add Docker Compose files based on enabled services +define compose_files + $(foreach service,$(SERVICES),$(if $($(shell echo $(service) | tr a-z A-Z)), -f $(FOLDER)/docker-compose.$(service).yml)) endef +# Function to dynamically add Docker Compose dev files based on enabled services and file existence +define compose_dev_files + $(foreach service,$(SERVICES), \ + $(if $($(shell echo $(service) | tr a-z A-Z)), \ + $(if $(shell [ -f $(FOLDER)/docker-compose.$(service).dev.yml ] && echo yes), -f $(FOLDER)/docker-compose.$(service).dev.yml))) +endef -# Function to set up COMPOSE_FILES -define compose_files - ifeq ($(1), true) - ifneq ($(wildcard ./docker/docker-compose.dev.yml),) - COMPOSE_FILES += -f ./docker/docker-compose.dev.yml - endif - endif - ifneq ($(NGINX),) - $(eval $(call add_service,nginx)) - endif - ifneq ($(NLU),) - $(eval $(call add_service,nlu)) - endif +# Function to dynamically add Docker Compose dev files based on enabled services and file existence +define compose_prod_files + $(foreach service,$(SERVICES), \ + $(if $($(shell echo $(service) | tr a-z A-Z)), \ + $(if $(shell [ -f $(FOLDER)/docker-compose.$(service).prod.yml ] && echo yes), -f $(FOLDER)/docker-compose.$(service).dev.yml))) endef # Ensure .env file exists and matches .env.example check-env: - @if [ ! -f "./docker/.env" ]; then \ + @if [ ! -f "$(FOLDER)/.env" ]; then \ echo "Error: .env file does not exist. Creating one now from .env.example ..."; \ - cp ./docker/.env.example ./docker/.env; \ + cp $(FOLDER)/.env.example $(FOLDER)/.env; \ fi @echo "Checking .env file for missing variables..." - @awk -F '=' 'NR==FNR {a[$$1]; next} !($$1 in a) {print "Missing env var: " $$1}' ./docker/.env ./docker/.env.example + @awk -F '=' 'NR==FNR {a[$$1]; next} !($$1 in a) {print "Missing env var: " $$1}' $(FOLDER)/.env $(FOLDER)/.env.example init: - cp ./docker/.env.example ./docker/.env - -dev: check-env - $(eval $(call compose_files,true)) - docker compose $(COMPOSE_FILES) up -d + cp $(FOLDER)/.env.example $(FOLDER)/.env +# Start command: runs docker-compose with the main file and any additional service files start: check-env - $(eval $(call compose_files,false)) - docker compose $(COMPOSE_FILES) up -d + @docker compose -f $(FOLDER)/docker-compose.yml $(call compose_files) up -d -stop: check-env - $(eval $(call compose_files,true)) - docker compose $(COMPOSE_FILES) down +# Dev command: runs docker-compose with the main file, dev file, and any additional service dev files (if they exist) +dev: check-env + @docker compose -f $(FOLDER)/docker-compose.yml -f $(FOLDER)/docker-compose.dev.yml $(call compose_files) $(call compose_dev_files) up -d -destroy: check-env - $(eval $(call compose_files,true)) - docker compose $(COMPOSE_FILES) down -v +# Start command: runs docker-compose with the main file and any additional service files +start-prod: check-env + @docker compose -f $(FOLDER)/docker-compose.yml -f $(FOLDER)/docker-compose.prod.yml $(call compose_files) $(call compose_prod_files) up -d +# Stop command: stops the running containers +stop: + @docker compose -f $(FOLDER)/docker-compose.yml -f $(FOLDER)/docker-compose.dev.yml $(call compose_files) $(call compose_dev_files) $(call compose_prod_files) down + +# Destroy command: stops the running containers and removes the volumes +destroy: + @docker compose -f $(FOLDER)/docker-compose.yml -f $(FOLDER)/docker-compose.dev.yml $(call compose_files) $(call compose_dev_files) $(call compose_prod_files) down -v + +# Migrate command: migrate-up: - $(eval $(call compose_files,false)) - docker-compose $(COMPOSE_FILES) up --no-deps -d database-init + @docker compose -f $(FOLDER)/docker-compose.yml -f $(FOLDER)/docker-compose.dev.yml $(call compose_files) $(call compose_dev_files) up --no-deps -d database-init diff --git a/api/migrations/config/create.ts b/api/migrations/config/create.ts index fa712f51..09a8e103 100644 --- a/api/migrations/config/create.ts +++ b/api/migrations/config/create.ts @@ -11,6 +11,8 @@ import fs from 'fs'; import path from 'path'; +import escapeRegExp from 'lodash/escapeRegExp'; + // Get the argument passed (e.g., "all-users-fr") const arg: string | undefined = process.argv[2]; @@ -25,7 +27,7 @@ const templatePath: string = path.join(__dirname, '../config/template.ts'); // Check if a migration with the same name (excluding timestamp) already exists const migrationExists: boolean = fs.readdirSync(migrationsDir).some((file) => { - const regex = new RegExp(`^[0-9]+-${arg}\.ts$`); + const regex = new RegExp(`^[0-9]+-${escapeRegExp(arg)}\\.ts$`); return regex.test(file); }); diff --git a/api/migrations/models/index.ts b/api/migrations/models/index.ts index f99665ac..9d13dd21 100644 --- a/api/migrations/models/index.ts +++ b/api/migrations/models/index.ts @@ -26,13 +26,13 @@ import conversationSchema, { import labelSchema, { Label } from '@/chat/schemas/label.schema'; import messageSchema, { Message } from '@/chat/schemas/message.schema'; import subscriberSchema, { Subscriber } from '@/chat/schemas/subscriber.schema'; -import translationSchema, { - Translation, -} from '@/chat/schemas/translation.schema'; import { ContentType } from '@/cms/schemas/content-type.schema'; import contentSchema, { Content } from '@/cms/schemas/content.schema'; import menuSchema, { Menu } from '@/cms/schemas/menu.schema'; import { config } from '@/config'; +import translationSchema, { + Translation, +} from '@/i18n/schemas/translation.schema'; import nlpEntitySchema, { NlpEntity } from '@/nlp/schemas/nlp-entity.schema'; import nlpSampleEntitySchema, { NlpSampleEntity, diff --git a/api/package-lock.json b/api/package-lock.json index b38dbe1c..b0358ea8 100644 --- a/api/package-lock.json +++ b/api/package-lock.json @@ -67,6 +67,7 @@ "@types/express": "^4.17.17", "@types/express-session": "^1.17.10", "@types/jest": "^29.5.2", + "@types/lodash": "^4.17.9", "@types/minio": "^7.1.1", "@types/module-alias": "^2.0.4", "@types/multer": "^1.4.11", @@ -6027,6 +6028,12 @@ "@types/node": "*" } }, + "node_modules/@types/lodash": { + "version": "4.17.9", + "resolved": "https://registry.npmjs.org/@types/lodash/-/lodash-4.17.9.tgz", + "integrity": "sha512-w9iWudx1XWOHW5lQRS9iKpK/XuRhnN+0T7HvdCCd802FYkT1AMTnxndJHGrNJwRoRHkslGr4S29tjm1cT7x/7w==", + "dev": true + }, "node_modules/@types/mime": { "version": "1.3.5", "resolved": "https://registry.npmjs.org/@types/mime/-/mime-1.3.5.tgz", diff --git a/api/package.json b/api/package.json index 673b7b78..238d822e 100644 --- a/api/package.json +++ b/api/package.json @@ -91,6 +91,7 @@ "@types/express": "^4.17.17", "@types/express-session": "^1.17.10", "@types/jest": "^29.5.2", + "@types/lodash": "^4.17.9", "@types/module-alias": "^2.0.4", "@types/multer": "^1.4.11", "@types/node": "^20.3.1", diff --git a/api/src/app.module.ts b/api/src/app.module.ts index 53f6ef07..8025a47a 100644 --- a/api/src/app.module.ts +++ b/api/src/app.module.ts @@ -32,7 +32,7 @@ import { ChannelModule } from './channel/channel.module'; import { ChatModule } from './chat/chat.module'; import { CmsModule } from './cms/cms.module'; import { config } from './config'; -import { ExtendedI18nModule } from './extended-18n.module'; +import { I18nModule } from './i18n/i18n.module'; import { LoggerModule } from './logger/logger.module'; import { DtoUpdateMiddleware } from './middlewares/dto.update.middleware'; import { NlpModule } from './nlp/nlp.module'; @@ -44,7 +44,7 @@ import idPlugin from './utils/schema-plugin/id.plugin'; import { WebsocketModule } from './websocket/websocket.module'; const i18nOptions: I18nOptions = { - fallbackLanguage: config.chatbot.lang.default, + fallbackLanguage: 'en', loaderOptions: { path: path.join(__dirname, '/config/i18n/'), watch: true, @@ -120,7 +120,7 @@ const i18nOptions: I18nOptions = { ignoreErrors: false, }), CsrfModule, - ExtendedI18nModule.forRoot(i18nOptions), + I18nModule.forRoot(i18nOptions), CacheModule.register({ isGlobal: true, ttl: config.cache.ttl, diff --git a/api/src/app.service.ts b/api/src/app.service.ts index a78e0d3d..a81eaa28 100644 --- a/api/src/app.service.ts +++ b/api/src/app.service.ts @@ -9,11 +9,11 @@ import { Injectable } from '@nestjs/common'; -import { ExtendedI18nService } from './extended-i18n.service'; +import { I18nService } from './i18n/services/i18n.service'; @Injectable() export class AppService { - constructor(private readonly i18n: ExtendedI18nService) {} + constructor(private readonly i18n: I18nService) {} getHello(): string { return this.i18n.t('welcome', { lang: 'en' }); diff --git a/api/src/attachment/schemas/attachment.schema.ts b/api/src/attachment/schemas/attachment.schema.ts index 98eb9cb4..4f6ee7b1 100644 --- a/api/src/attachment/schemas/attachment.schema.ts +++ b/api/src/attachment/schemas/attachment.schema.ts @@ -13,6 +13,7 @@ import { THydratedDocument } from 'mongoose'; import { FileType } from '@/chat/schemas/types/attachment'; import { config } from '@/config'; import { BaseSchema } from '@/utils/generics/base-schema'; +import { buildURL } from '@/utils/helpers/URL'; import { MIME_REGEX } from '../utilities'; @@ -89,7 +90,10 @@ export class Attachment extends BaseSchema { attachmentId: string, attachmentName: string = '', ): string { - return `${config.parameters.apiUrl}/attachment/download/${attachmentId}/${attachmentName}`; + return buildURL( + config.parameters.apiUrl, + `/attachment/download/${attachmentId}/${attachmentName}`, + ); } /** @@ -119,7 +123,10 @@ export const AttachmentModel: ModelDefinition = { AttachmentModel.schema.virtual('url').get(function () { if (this._id && this.name) - return `${config.apiPath}/attachment/download/${this._id}/${this.name}`; + return buildURL( + config.apiPath, + `/attachment/download/${this._id}/${this.name}`, + ); return ''; }); diff --git a/api/src/chat/chat.module.ts b/api/src/chat/chat.module.ts index 772a7ab2..0c1cefad 100644 --- a/api/src/chat/chat.module.ts +++ b/api/src/chat/chat.module.ts @@ -23,7 +23,6 @@ import { ContextVarController } from './controllers/context-var.controller'; import { LabelController } from './controllers/label.controller'; import { MessageController } from './controllers/message.controller'; import { SubscriberController } from './controllers/subscriber.controller'; -import { TranslationController } from './controllers/translation.controller'; import { BlockRepository } from './repositories/block.repository'; import { CategoryRepository } from './repositories/category.repository'; import { ContextVarRepository } from './repositories/context-var.repository'; @@ -31,7 +30,6 @@ import { ConversationRepository } from './repositories/conversation.repository'; import { LabelRepository } from './repositories/label.repository'; import { MessageRepository } from './repositories/message.repository'; import { SubscriberRepository } from './repositories/subscriber.repository'; -import { TranslationRepository } from './repositories/translation.repository'; import { BlockModel } from './schemas/block.schema'; import { CategoryModel } from './schemas/category.schema'; import { ContextVarModel } from './schemas/context-var.schema'; @@ -39,10 +37,8 @@ import { ConversationModel } from './schemas/conversation.schema'; import { LabelModel } from './schemas/label.schema'; import { MessageModel } from './schemas/message.schema'; import { SubscriberModel } from './schemas/subscriber.schema'; -import { TranslationModel } from './schemas/translation.schema'; import { CategorySeeder } from './seeds/category.seed'; import { ContextVarSeeder } from './seeds/context-var.seed'; -import { TranslationSeeder } from './seeds/translation.seed'; import { BlockService } from './services/block.service'; import { BotService } from './services/bot.service'; import { CategoryService } from './services/category.service'; @@ -52,7 +48,6 @@ import { ConversationService } from './services/conversation.service'; import { LabelService } from './services/label.service'; import { MessageService } from './services/message.service'; import { SubscriberService } from './services/subscriber.service'; -import { TranslationService } from './services/translation.service'; @Module({ imports: [ @@ -63,7 +58,6 @@ import { TranslationService } from './services/translation.service'; BlockModel, MessageModel, SubscriberModel, - TranslationModel, ConversationModel, SubscriberModel, ]), @@ -81,7 +75,6 @@ import { TranslationService } from './services/translation.service'; BlockController, MessageController, SubscriberController, - TranslationController, ], providers: [ CategoryRepository, @@ -90,7 +83,6 @@ import { TranslationService } from './services/translation.service'; BlockRepository, MessageRepository, SubscriberRepository, - TranslationRepository, ConversationRepository, CategoryService, ContextVarService, @@ -98,13 +90,11 @@ import { TranslationService } from './services/translation.service'; BlockService, MessageService, SubscriberService, - TranslationService, CategorySeeder, ContextVarSeeder, ConversationService, ChatService, BotService, - TranslationSeeder, ], exports: [SubscriberService, MessageService, LabelService, BlockService], }) diff --git a/api/src/chat/controllers/block.controller.spec.ts b/api/src/chat/controllers/block.controller.spec.ts index 26590399..36580faf 100644 --- a/api/src/chat/controllers/block.controller.spec.ts +++ b/api/src/chat/controllers/block.controller.spec.ts @@ -19,7 +19,10 @@ import { AttachmentService } from '@/attachment/services/attachment.service'; import { ContentRepository } from '@/cms/repositories/content.repository'; import { ContentModel } from '@/cms/schemas/content.schema'; import { ContentService } from '@/cms/services/content.service'; -import { ExtendedI18nService } from '@/extended-i18n.service'; +import { LanguageRepository } from '@/i18n/repositories/language.repository'; +import { LanguageModel } from '@/i18n/schemas/language.schema'; +import { I18nService } from '@/i18n/services/i18n.service'; +import { LanguageService } from '@/i18n/services/language.service'; import { LoggerService } from '@/logger/logger.service'; import { PluginService } from '@/plugins/plugins.service'; import { SettingService } from '@/setting/services/setting.service'; @@ -86,6 +89,7 @@ describe('BlockController', () => { UserModel, RoleModel, PermissionModel, + LanguageModel, ]), ], providers: [ @@ -97,6 +101,7 @@ describe('BlockController', () => { UserRepository, RoleRepository, PermissionRepository, + LanguageRepository, BlockService, LabelService, CategoryService, @@ -105,10 +110,11 @@ describe('BlockController', () => { UserService, RoleService, PermissionService, + LanguageService, PluginService, LoggerService, { - provide: ExtendedI18nService, + provide: I18nService, useValue: { t: jest.fn().mockImplementation((t) => t), }, diff --git a/api/src/chat/controllers/category.contoller.spec.ts b/api/src/chat/controllers/category.contoller.spec.ts index f346bd32..c307a798 100644 --- a/api/src/chat/controllers/category.contoller.spec.ts +++ b/api/src/chat/controllers/category.contoller.spec.ts @@ -18,7 +18,7 @@ import { AttachmentService } from '@/attachment/services/attachment.service'; import { ContentRepository } from '@/cms/repositories/content.repository'; import { ContentModel } from '@/cms/schemas/content.schema'; import { ContentService } from '@/cms/services/content.service'; -import { ExtendedI18nService } from '@/extended-i18n.service'; +import { I18nService } from '@/i18n/services/i18n.service'; import { LoggerService } from '@/logger/logger.service'; import { PluginService } from '@/plugins/plugins.service'; import { SettingService } from '@/setting/services/setting.service'; @@ -77,7 +77,7 @@ describe('CategoryController', () => { }, LoggerService, { - provide: ExtendedI18nService, + provide: I18nService, useValue: { t: jest.fn().mockImplementation((t) => t), }, diff --git a/api/src/chat/controllers/message.controller.spec.ts b/api/src/chat/controllers/message.controller.spec.ts index 5f80153a..9e5c8772 100644 --- a/api/src/chat/controllers/message.controller.spec.ts +++ b/api/src/chat/controllers/message.controller.spec.ts @@ -19,7 +19,7 @@ import { ChannelService } from '@/channel/channel.service'; import { MenuRepository } from '@/cms/repositories/menu.repository'; import { MenuModel } from '@/cms/schemas/menu.schema'; import { MenuService } from '@/cms/services/menu.service'; -import { ExtendedI18nService } from '@/extended-i18n.service'; +import { I18nService } from '@/i18n/services/i18n.service'; import { LoggerService } from '@/logger/logger.service'; import { NlpService } from '@/nlp/services/nlp.service'; import { SettingService } from '@/setting/services/setting.service'; @@ -92,7 +92,7 @@ describe('MessageController', () => { MenuService, MenuRepository, { - provide: ExtendedI18nService, + provide: I18nService, useValue: { t: jest.fn().mockImplementation((t) => t), }, diff --git a/api/src/chat/repositories/message.repository.ts b/api/src/chat/repositories/message.repository.ts index 31d6c256..cd3515dd 100644 --- a/api/src/chat/repositories/message.repository.ts +++ b/api/src/chat/repositories/message.repository.ts @@ -11,6 +11,7 @@ import { Injectable, Optional } from '@nestjs/common'; import { InjectModel } from '@nestjs/mongoose'; import { Model } from 'mongoose'; +import { LanguageService } from '@/i18n/services/language.service'; import { LoggerService } from '@/logger/logger.service'; import { NlpSampleCreateDto } from '@/nlp/dto/nlp-sample.dto'; import { NlpSampleState } from '@/nlp/schemas/types'; @@ -36,10 +37,13 @@ export class MessageRepository extends BaseRepository< private readonly logger: LoggerService; + private readonly languageService: LanguageService; + constructor( @InjectModel(Message.name) readonly model: Model, @Optional() nlpSampleService?: NlpSampleService, @Optional() logger?: LoggerService, + @Optional() languageService?: LanguageService, ) { super( model, @@ -49,6 +53,7 @@ export class MessageRepository extends BaseRepository< ); this.logger = logger; this.nlpSampleService = nlpSampleService; + this.languageService = languageService; } /** @@ -72,10 +77,13 @@ export class MessageRepository extends BaseRepository< 'message' in _doc && 'text' in _doc.message ) { + const defaultLang = await this.languageService?.getDefaultLanguage(); const record: NlpSampleCreateDto = { text: _doc.message.text, type: NlpSampleState.inbox, trained: false, + // @TODO : We need to define the language in the message entity + language: defaultLang.id, }; try { await this.nlpSampleService.findOneOrCreate(record, record); diff --git a/api/src/chat/services/block.service.spec.ts b/api/src/chat/services/block.service.spec.ts index b5f152f4..c6efe85a 100644 --- a/api/src/chat/services/block.service.spec.ts +++ b/api/src/chat/services/block.service.spec.ts @@ -7,6 +7,7 @@ * 3. SaaS Restriction: This software, or any derivative of it, may not be used to offer a competing product or service (SaaS) without prior written consent from Hexastack. Offering the software as a service or using it in a commercial cloud environment without express permission is strictly prohibited. */ +import { CACHE_MANAGER } from '@nestjs/cache-manager'; import { EventEmitter2 } from '@nestjs/event-emitter'; import { MongooseModule } from '@nestjs/mongoose'; import { Test } from '@nestjs/testing'; @@ -24,11 +25,14 @@ import { ContentTypeModel } from '@/cms/schemas/content-type.schema'; import { Content, ContentModel } from '@/cms/schemas/content.schema'; import { ContentTypeService } from '@/cms/services/content-type.service'; import { ContentService } from '@/cms/services/content.service'; -import { ExtendedI18nService } from '@/extended-i18n.service'; import OfflineHandler from '@/extensions/channels/offline/index.channel'; import { OFFLINE_CHANNEL_NAME } from '@/extensions/channels/offline/settings'; import { Offline } from '@/extensions/channels/offline/types'; import OfflineEventWrapper from '@/extensions/channels/offline/wrapper'; +import { LanguageRepository } from '@/i18n/repositories/language.repository'; +import { LanguageModel } from '@/i18n/schemas/language.schema'; +import { I18nService } from '@/i18n/services/i18n.service'; +import { LanguageService } from '@/i18n/services/language.service'; import { LoggerService } from '@/logger/logger.service'; import { PluginService } from '@/plugins/plugins.service'; import { Settings } from '@/setting/schemas/types'; @@ -94,6 +98,7 @@ describe('BlockService', () => { ContentModel, AttachmentModel, LabelModel, + LanguageModel, ]), ], providers: [ @@ -102,18 +107,20 @@ describe('BlockService', () => { ContentTypeRepository, ContentRepository, AttachmentRepository, + LanguageRepository, BlockService, CategoryService, ContentTypeService, ContentService, AttachmentService, + LanguageService, { provide: PluginService, useValue: {}, }, LoggerService, { - provide: ExtendedI18nService, + provide: I18nService, useValue: { t: jest.fn().mockImplementation((t) => { return t === 'Welcome' ? 'Bienvenue' : t; @@ -132,6 +139,14 @@ describe('BlockService', () => { }, }, EventEmitter2, + { + provide: CACHE_MANAGER, + useValue: { + del: jest.fn(), + get: jest.fn(), + set: jest.fn(), + }, + }, ], }).compile(); blockService = module.get(BlockService); diff --git a/api/src/chat/services/block.service.ts b/api/src/chat/services/block.service.ts index af85e15f..f1163a9a 100644 --- a/api/src/chat/services/block.service.ts +++ b/api/src/chat/services/block.service.ts @@ -13,7 +13,8 @@ import { Attachment } from '@/attachment/schemas/attachment.schema'; import { AttachmentService } from '@/attachment/services/attachment.service'; import EventWrapper from '@/channel/lib/EventWrapper'; import { ContentService } from '@/cms/services/content.service'; -import { ExtendedI18nService } from '@/extended-i18n.service'; +import { I18nService } from '@/i18n/services/i18n.service'; +import { LanguageService } from '@/i18n/services/language.service'; import { LoggerService } from '@/logger/logger.service'; import { Nlp } from '@/nlp/lib/types'; import { PluginService } from '@/plugins/plugins.service'; @@ -44,7 +45,8 @@ export class BlockService extends BaseService { private readonly settingService: SettingService, private readonly pluginService: PluginService, private readonly logger: LoggerService, - protected readonly i18n: ExtendedI18nService, + protected readonly i18n: I18nService, + protected readonly languageService: LanguageService, ) { super(repository); } @@ -109,12 +111,9 @@ export class BlockService extends BaseService { // Check & catch user language through NLP const nlp = event.getNLP(); if (nlp) { - const settings = await this.settingService.getSettings(); + const languages = await this.languageService.getLanguages(); const lang = nlp.entities.find((e) => e.entity === 'language'); - if ( - lang && - settings.nlp_settings.languages.indexOf(lang.value) !== -1 - ) { + if (lang && Object.keys(languages).indexOf(lang.value) !== -1) { const profile = event.getSender(); profile.language = lang.value; event.setSender(profile); @@ -372,12 +371,11 @@ export class BlockService extends BaseService { subscriberContext: SubscriberContext, settings: Settings, ): string { - const lang = - context && context.user && context.user.language - ? context.user.language - : settings.nlp_settings.default_lang; // Translate - text = this.i18n.t(text, { lang, defaultValue: text }); + text = this.i18n.t(text, { + lang: context.user.language, + defaultValue: text, + }); // Replace context tokens text = this.processTokenReplacements( text, diff --git a/api/src/chat/services/bot.service.spec.ts b/api/src/chat/services/bot.service.spec.ts index 6f6f8f5a..1d3fe530 100644 --- a/api/src/chat/services/bot.service.spec.ts +++ b/api/src/chat/services/bot.service.spec.ts @@ -25,10 +25,13 @@ import { MenuModel } from '@/cms/schemas/menu.schema'; import { ContentTypeService } from '@/cms/services/content-type.service'; import { ContentService } from '@/cms/services/content.service'; import { MenuService } from '@/cms/services/menu.service'; -import { ExtendedI18nService } from '@/extended-i18n.service'; import { offlineEventText } from '@/extensions/channels/offline/__test__/events.mock'; import OfflineHandler from '@/extensions/channels/offline/index.channel'; import OfflineEventWrapper from '@/extensions/channels/offline/wrapper'; +import { LanguageRepository } from '@/i18n/repositories/language.repository'; +import { LanguageModel } from '@/i18n/schemas/language.schema'; +import { I18nService } from '@/i18n/services/i18n.service'; +import { LanguageService } from '@/i18n/services/language.service'; import { LoggerService } from '@/logger/logger.service'; import { NlpEntityRepository } from '@/nlp/repositories/nlp-entity.repository'; import { NlpSampleEntityRepository } from '@/nlp/repositories/nlp-sample-entity.repository'; @@ -111,6 +114,7 @@ describe('BlockService', () => { NlpSampleEntityModel, NlpSampleModel, ContextVarModel, + LanguageModel, ]), ], providers: [ @@ -130,6 +134,7 @@ describe('BlockService', () => { NlpEntityRepository, NlpSampleEntityRepository, NlpSampleRepository, + LanguageRepository, BlockService, CategoryService, ContentTypeService, @@ -149,13 +154,14 @@ describe('BlockService', () => { NlpService, ContextVarService, ContextVarRepository, + LanguageService, { provide: PluginService, useValue: {}, }, LoggerService, { - provide: ExtendedI18nService, + provide: I18nService, useValue: { t: jest.fn().mockImplementation((t) => t), }, diff --git a/api/src/config/index.ts b/api/src/config/index.ts index 3b51ff4e..e6c30d72 100644 --- a/api/src/config/index.ts +++ b/api/src/config/index.ts @@ -16,7 +16,7 @@ export const config: Config = { translationFilename: process.env.I18N_TRANSLATION_FILENAME || 'messages', }, appPath: process.cwd(), - apiPath: process.env.API_ORIGIN, + apiPath: process.env.API_ORIGIN || 'http://localhost:4000', frontendPath: process.env.FRONTEND_ORIGIN ? process.env.FRONTEND_ORIGIN.split(',')[0] : 'http://localhost:8080', @@ -120,10 +120,6 @@ export const config: Config = { limit: 10, }, chatbot: { - lang: { - default: 'en', - available: ['en', 'fr'], - }, messages: { track_delivery: false, track_read: false, diff --git a/api/src/config/types.ts b/api/src/config/types.ts index 971a1be4..0d810362 100644 --- a/api/src/config/types.ts +++ b/api/src/config/types.ts @@ -15,7 +15,6 @@ type TJwtOptions = { secret: string; expiresIn: string; }; -type TLanguage = 'en' | 'fr' | 'ar' | 'tn'; type TMethods = 'GET' | 'PATCH' | 'POST' | 'DELETE' | 'OPTIONS' | 'HEAD'; type TLogLevel = 'log' | 'fatal' | 'error' | 'warn' | 'debug' | 'verbose'; type TCacheConfig = { @@ -87,10 +86,6 @@ export type Config = { limit: number; }; chatbot: { - lang: { - default: TLanguage; - available: TLanguage[]; - }; messages: { track_delivery: boolean; track_read: boolean; diff --git a/api/src/extended-18n.module.ts b/api/src/extended-18n.module.ts deleted file mode 100644 index 3bba6340..00000000 --- a/api/src/extended-18n.module.ts +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Copyright © 2024 Hexastack. All rights reserved. - * - * Licensed under the GNU Affero General Public License v3.0 (AGPLv3) with the following additional terms: - * 1. The name "Hexabot" is a trademark of Hexastack. You may not use this name in derivative works without express written permission. - * 2. All derivative works must include clear attribution to the original creator and software, Hexastack and Hexabot, in a prominent location (e.g., in the software's "About" section, documentation, and README file). - * 3. SaaS Restriction: This software, or any derivative of it, may not be used to offer a competing product or service (SaaS) without prior written consent from Hexastack. Offering the software as a service or using it in a commercial cloud environment without express permission is strictly prohibited. - */ - -import { DynamicModule, Global, Inject, Module } from '@nestjs/common'; -import { HttpAdapterHost } from '@nestjs/core'; -import { - I18N_OPTIONS, - I18N_TRANSLATIONS, - I18nModule, - I18nOptions, - I18nTranslation, -} from 'nestjs-i18n'; -import { Observable } from 'rxjs'; - -import { ExtendedI18nService } from './extended-i18n.service'; - -@Global() -@Module({}) -export class ExtendedI18nModule extends I18nModule { - constructor( - i18n: ExtendedI18nService, - @Inject(I18N_TRANSLATIONS) - translations: Observable, - @Inject(I18N_OPTIONS) i18nOptions: I18nOptions, - adapter: HttpAdapterHost, - ) { - super(i18n, translations, i18nOptions, adapter); - } - - static forRoot(options: I18nOptions): DynamicModule { - const { providers, exports } = super.forRoot(options); - return { - module: ExtendedI18nModule, - providers: providers.concat(ExtendedI18nService), - exports: exports.concat(ExtendedI18nService), - }; - } -} diff --git a/api/src/extensions/channels/live-chat-tester/index.channel.ts b/api/src/extensions/channels/live-chat-tester/index.channel.ts index 3f5888ec..1c44879a 100644 --- a/api/src/extensions/channels/live-chat-tester/index.channel.ts +++ b/api/src/extensions/channels/live-chat-tester/index.channel.ts @@ -15,7 +15,7 @@ import { ChannelService } from '@/channel/channel.service'; import { MessageService } from '@/chat/services/message.service'; import { SubscriberService } from '@/chat/services/subscriber.service'; import { MenuService } from '@/cms/services/menu.service'; -import { ExtendedI18nService } from '@/extended-i18n.service'; +import { I18nService } from '@/i18n/services/i18n.service'; import { LoggerService } from '@/logger/logger.service'; import { NlpService } from '@/nlp/services/nlp.service'; import { SettingCreateDto } from '@/setting/dto/setting.dto'; @@ -38,7 +38,7 @@ export default class LiveChatTesterHandler extends OfflineHandler { nlpService: NlpService, logger: LoggerService, eventEmitter: EventEmitter2, - i18n: ExtendedI18nService, + i18n: I18nService, subscriberService: SubscriberService, attachmentService: AttachmentService, messageService: MessageService, diff --git a/api/src/extensions/channels/offline/__test__/index.spec.ts b/api/src/extensions/channels/offline/__test__/index.spec.ts index 9bd05f45..d5a3bec8 100644 --- a/api/src/extensions/channels/offline/__test__/index.spec.ts +++ b/api/src/extensions/channels/offline/__test__/index.spec.ts @@ -35,7 +35,7 @@ import { SubscriberService } from '@/chat/services/subscriber.service'; import { MenuRepository } from '@/cms/repositories/menu.repository'; import { MenuModel } from '@/cms/schemas/menu.schema'; import { MenuService } from '@/cms/services/menu.service'; -import { ExtendedI18nService } from '@/extended-i18n.service'; +import { I18nService } from '@/i18n/services/i18n.service'; import { LoggerService } from '@/logger/logger.service'; import { NlpService } from '@/nlp/services/nlp.service'; import { SettingService } from '@/setting/services/setting.service'; @@ -113,7 +113,7 @@ describe('Offline Handler', () => { EventEmitter2, LoggerService, { - provide: ExtendedI18nService, + provide: I18nService, useValue: { t: jest.fn().mockImplementation((t) => t), }, diff --git a/api/src/extensions/channels/offline/__test__/wrapper.spec.ts b/api/src/extensions/channels/offline/__test__/wrapper.spec.ts index 3a967641..ade376e1 100644 --- a/api/src/extensions/channels/offline/__test__/wrapper.spec.ts +++ b/api/src/extensions/channels/offline/__test__/wrapper.spec.ts @@ -25,7 +25,7 @@ import { SubscriberService } from '@/chat/services/subscriber.service'; import { MenuRepository } from '@/cms/repositories/menu.repository'; import { MenuModel } from '@/cms/schemas/menu.schema'; import { MenuService } from '@/cms/services/menu.service'; -import { ExtendedI18nService } from '@/extended-i18n.service'; +import { I18nService } from '@/i18n/services/i18n.service'; import { LoggerService } from '@/logger/logger.service'; import { NlpService } from '@/nlp/services/nlp.service'; import { SettingService } from '@/setting/services/setting.service'; @@ -90,7 +90,7 @@ describe(`Offline event wrapper`, () => { EventEmitter2, LoggerService, { - provide: ExtendedI18nService, + provide: I18nService, useValue: { t: jest.fn().mockImplementation((t) => t), }, diff --git a/api/src/extensions/channels/offline/index.channel.ts b/api/src/extensions/channels/offline/index.channel.ts index b6073bd0..0ecf5c75 100644 --- a/api/src/extensions/channels/offline/index.channel.ts +++ b/api/src/extensions/channels/offline/index.channel.ts @@ -50,7 +50,7 @@ import { SubscriberService } from '@/chat/services/subscriber.service'; import { Content } from '@/cms/schemas/content.schema'; import { MenuService } from '@/cms/services/menu.service'; import { config } from '@/config'; -import { ExtendedI18nService } from '@/extended-i18n.service'; +import { I18nService } from '@/i18n/services/i18n.service'; import { LoggerService } from '@/logger/logger.service'; import { NlpService } from '@/nlp/services/nlp.service'; import { SettingCreateDto } from '@/setting/dto/setting.dto'; @@ -73,7 +73,7 @@ export default class OfflineHandler extends ChannelHandler { nlpService: NlpService, logger: LoggerService, protected readonly eventEmitter: EventEmitter2, - protected readonly i18n: ExtendedI18nService, + protected readonly i18n: I18nService, protected readonly subscriberService: SubscriberService, protected readonly attachmentService: AttachmentService, protected readonly messageService: MessageService, @@ -477,7 +477,7 @@ export default class OfflineHandler extends ChannelHandler { ...channelData, name: this.getChannel(), }, - language: config.chatbot.lang.default, + language: '', locale: '', timezone: 0, gender: 'male', diff --git a/api/src/extensions/helpers/nlp/default/__test__/__mock__/base.mock.ts b/api/src/extensions/helpers/nlp/default/__test__/__mock__/base.mock.ts index d52548fb..bd85b61a 100644 --- a/api/src/extensions/helpers/nlp/default/__test__/__mock__/base.mock.ts +++ b/api/src/extensions/helpers/nlp/default/__test__/__mock__/base.mock.ts @@ -29,6 +29,13 @@ export const baseNlpEntity = { builtin: true, }; +export const baseLanguage = { + ...modelInstance, + title: 'English', + code: 'en', + isDefault: true, +}; + export const entitiesMock: NlpEntityFull[] = [ { ...baseNlpEntity, @@ -89,6 +96,7 @@ export const samplesMock: NlpSampleFull[] = [ ], trained: false, type: NlpSampleState.train, + language: baseLanguage, }, { ...modelInstance, @@ -112,5 +120,6 @@ export const samplesMock: NlpSampleFull[] = [ ], trained: false, type: NlpSampleState.train, + language: baseLanguage, }, ]; diff --git a/api/src/extensions/helpers/nlp/default/__test__/index.mock.ts b/api/src/extensions/helpers/nlp/default/__test__/index.mock.ts index 22f257df..c30acb8e 100644 --- a/api/src/extensions/helpers/nlp/default/__test__/index.mock.ts +++ b/api/src/extensions/helpers/nlp/default/__test__/index.mock.ts @@ -23,6 +23,10 @@ export const nlpEmptyFormated: DatasetType = { name: 'product', elements: ['pizza', 'sandwich'], }, + { + elements: ['en', 'fr'], + name: 'language', + }, ], entity_synonyms: [ { @@ -34,17 +38,33 @@ export const nlpEmptyFormated: DatasetType = { export const nlpFormatted: DatasetType = { common_examples: [ - { text: 'Hello', intent: 'greeting', entities: [] }, + { + text: 'Hello', + intent: 'greeting', + entities: [ + { + entity: 'language', + value: 'en', + }, + ], + }, { text: 'i want to order a pizza', intent: 'order', - entities: [{ entity: 'product', value: 'pizza', start: 19, end: 23 }], + entities: [ + { entity: 'product', value: 'pizza', start: 19, end: 23 }, + { + entity: 'language', + value: 'en', + }, + ], }, ], regex_features: [], lookup_tables: [ { name: 'intent', elements: ['greeting', 'order'] }, { name: 'product', elements: ['pizza', 'sandwich'] }, + { name: 'language', elements: ['en', 'fr'] }, ], entity_synonyms: [ { diff --git a/api/src/extensions/helpers/nlp/default/__test__/index.spec.ts b/api/src/extensions/helpers/nlp/default/__test__/index.spec.ts index ac8027d0..9433b405 100644 --- a/api/src/extensions/helpers/nlp/default/__test__/index.spec.ts +++ b/api/src/extensions/helpers/nlp/default/__test__/index.spec.ts @@ -8,10 +8,14 @@ */ import { HttpModule } from '@nestjs/axios'; +import { CACHE_MANAGER } from '@nestjs/cache-manager'; import { EventEmitter2 } from '@nestjs/event-emitter'; import { MongooseModule } from '@nestjs/mongoose'; import { Test, TestingModule } from '@nestjs/testing'; +import { LanguageRepository } from '@/i18n/repositories/language.repository'; +import { LanguageModel } from '@/i18n/schemas/language.schema'; +import { LanguageService } from '@/i18n/services/language.service'; import { LoggerService } from '@/logger/logger.service'; import { NlpEntityRepository } from '@/nlp/repositories/nlp-entity.repository'; import { NlpSampleEntityRepository } from '@/nlp/repositories/nlp-sample-entity.repository'; @@ -56,26 +60,11 @@ describe('NLP Default Helper', () => { NlpValueModel, NlpSampleModel, NlpSampleEntityModel, + LanguageModel, ]), HttpModule, ], providers: [ - LoggerService, - { - provide: SettingService, - useValue: { - getSettings: jest.fn(() => ({ - nlp_settings: { - provider: 'default', - endpoint: 'path', - token: 'token', - languages: ['fr', 'ar', 'tn'], - default_lang: 'fr', - threshold: '0.5', - }, - })), - }, - }, NlpService, NlpSampleService, NlpSampleRepository, @@ -85,8 +74,32 @@ describe('NLP Default Helper', () => { NlpValueRepository, NlpSampleEntityService, NlpSampleEntityRepository, + LanguageService, + LanguageRepository, EventEmitter2, DefaultNlpHelper, + LoggerService, + { + provide: SettingService, + useValue: { + getSettings: jest.fn(() => ({ + nlp_settings: { + provider: 'default', + endpoint: 'path', + token: 'token', + threshold: '0.5', + }, + })), + }, + }, + { + provide: CACHE_MANAGER, + useValue: { + del: jest.fn(), + get: jest.fn(), + set: jest.fn(), + }, + }, ], }).compile(); settingService = module.get(SettingService); @@ -103,15 +116,15 @@ describe('NLP Default Helper', () => { expect(nlp).toBeDefined(); }); - it('should format empty training set properly', () => { + it('should format empty training set properly', async () => { const nlp = nlpService.getNLP(); - const results = nlp.format([], entitiesMock); + const results = await nlp.format([], entitiesMock); expect(results).toEqual(nlpEmptyFormated); }); - it('should format training set properly', () => { + it('should format training set properly', async () => { const nlp = nlpService.getNLP(); - const results = nlp.format(samplesMock, entitiesMock); + const results = await nlp.format(samplesMock, entitiesMock); expect(results).toEqual(nlpFormatted); }); diff --git a/api/src/extensions/helpers/nlp/default/index.nlp.helper.ts b/api/src/extensions/helpers/nlp/default/index.nlp.helper.ts index 059dfdaf..04fc5d04 100644 --- a/api/src/extensions/helpers/nlp/default/index.nlp.helper.ts +++ b/api/src/extensions/helpers/nlp/default/index.nlp.helper.ts @@ -13,21 +13,14 @@ import { Injectable } from '@nestjs/common'; import { LoggerService } from '@/logger/logger.service'; import BaseNlpHelper from '@/nlp/lib/BaseNlpHelper'; import { Nlp } from '@/nlp/lib/types'; -import { NlpEntity, NlpEntityFull } from '@/nlp/schemas/nlp-entity.schema'; +import { NlpEntityFull } from '@/nlp/schemas/nlp-entity.schema'; import { NlpSampleFull } from '@/nlp/schemas/nlp-sample.schema'; -import { NlpValue } from '@/nlp/schemas/nlp-value.schema'; import { NlpEntityService } from '@/nlp/services/nlp-entity.service'; import { NlpSampleService } from '@/nlp/services/nlp-sample.service'; import { NlpService } from '@/nlp/services/nlp.service'; +import { buildURL } from '@/utils/helpers/URL'; -import { - CommonExample, - DatasetType, - EntitySynonym, - ExampleEntity, - LookupTable, - NlpParseResultType, -} from './types'; +import { DatasetType, NlpParseResultType } from './types'; @Injectable() export default class DefaultNlpHelper extends BaseNlpHelper { @@ -61,69 +54,16 @@ export default class DefaultNlpHelper extends BaseNlpHelper { * @param entities - All available entities * @returns {DatasetType} - The formatted RASA training set */ - format(samples: NlpSampleFull[], entities: NlpEntityFull[]): DatasetType { - const entityMap = NlpEntity.getEntityMap(entities); - const valueMap = NlpValue.getValueMap( - NlpValue.getValuesFromEntities(entities), + async format( + samples: NlpSampleFull[], + entities: NlpEntityFull[], + ): Promise { + const nluData = await this.nlpSampleService.formatRasaNlu( + samples, + entities, ); - const common_examples: CommonExample[] = samples - .filter((s) => s.entities.length > 0) - .map((s) => { - const intent = s.entities.find( - (e) => entityMap[e.entity].name === 'intent', - ); - if (!intent) { - throw new Error('Unable to find the `intent` nlp entity.'); - } - const sampleEntities: ExampleEntity[] = s.entities - .filter((e) => entityMap[e.entity].name !== 'intent') - .map((e) => { - const res: ExampleEntity = { - entity: entityMap[e.entity].name, - value: valueMap[e.value].value, - }; - if ('start' in e && 'end' in e) { - Object.assign(res, { - start: e.start, - end: e.end, - }); - } - return res; - }); - return { - text: s.text, - intent: valueMap[intent.value].value, - entities: sampleEntities, - }; - }); - const lookup_tables: LookupTable[] = entities.map((e) => { - return { - name: e.name, - elements: e.values.map((v) => { - return v.value; - }), - }; - }); - const entity_synonyms = entities - .reduce((acc, e) => { - const synonyms = e.values.map((v) => { - return { - value: v.value, - synonyms: v.expressions, - }; - }); - return acc.concat(synonyms); - }, [] as EntitySynonym[]) - .filter((s) => { - return s.synonyms.length > 0; - }); - return { - common_examples, - regex_features: [], - lookup_tables, - entity_synonyms, - }; + return nluData; } /** @@ -138,10 +78,10 @@ export default class DefaultNlpHelper extends BaseNlpHelper { entities: NlpEntityFull[], ): Promise { const self = this; - const nluData: DatasetType = self.format(samples, entities); + const nluData: DatasetType = await self.format(samples, entities); // Train samples const result = await this.httpService.axiosRef.post( - `${this.settings.endpoint}/train`, + buildURL(this.settings.endpoint, `/train`), nluData, { params: { @@ -169,10 +109,10 @@ export default class DefaultNlpHelper extends BaseNlpHelper { entities: NlpEntityFull[], ): Promise { const self = this; - const nluTestData: DatasetType = self.format(samples, entities); + const nluTestData: DatasetType = await self.format(samples, entities); // Evaluate model with test samples return await this.httpService.axiosRef.post( - `${this.settings.endpoint}/evaluate`, + buildURL(this.settings.endpoint, `/evaluate`), nluTestData, { params: { @@ -251,7 +191,7 @@ export default class DefaultNlpHelper extends BaseNlpHelper { try { const { data: nlp } = await this.httpService.axiosRef.post( - `${this.settings.endpoint}/parse`, + buildURL(this.settings.endpoint, '/parse'), { q: text, project, diff --git a/api/src/i18n/controllers/language.controller.spec.ts b/api/src/i18n/controllers/language.controller.spec.ts new file mode 100644 index 00000000..d07b5fdd --- /dev/null +++ b/api/src/i18n/controllers/language.controller.spec.ts @@ -0,0 +1,181 @@ +/* + * Copyright © 2024 Hexastack. All rights reserved. + * + * Licensed under the GNU Affero General Public License v3.0 (AGPLv3) with the following additional terms: + * 1. The name "Hexabot" is a trademark of Hexastack. You may not use this name in derivative works without express written permission. + * 2. All derivative works must include clear attribution to the original creator and software, Hexastack and Hexabot, in a prominent location (e.g., in the software's "About" section, documentation, and README file). + * 3. SaaS Restriction: This software, or any derivative of it, may not be used to offer a competing product or service (SaaS) without prior written consent from Hexastack. Offering the software as a service or using it in a commercial cloud environment without express permission is strictly prohibited. + */ + +import { CACHE_MANAGER } from '@nestjs/cache-manager'; +import { BadRequestException, NotFoundException } from '@nestjs/common'; +import { EventEmitter2 } from '@nestjs/event-emitter'; +import { MongooseModule } from '@nestjs/mongoose'; +import { Test } from '@nestjs/testing'; + +import { I18nService } from '@/i18n/services/i18n.service'; +import { LoggerService } from '@/logger/logger.service'; +import { NOT_FOUND_ID } from '@/utils/constants/mock'; +import { + installLanguageFixtures, + languageFixtures, +} from '@/utils/test/fixtures/language'; +import { getPageQuery } from '@/utils/test/pagination'; +import { + closeInMongodConnection, + rootMongooseTestModule, +} from '@/utils/test/test'; + +import { LanguageController } from './language.controller'; +import { LanguageUpdateDto } from '../dto/language.dto'; +import { LanguageRepository } from '../repositories/language.repository'; +import { Language, LanguageModel } from '../schemas/language.schema'; +import { LanguageService } from '../services/language.service'; + +describe('LanguageController', () => { + let languageController: LanguageController; + let languageService: LanguageService; + let language: Language; + + beforeAll(async () => { + const module = await Test.createTestingModule({ + imports: [ + rootMongooseTestModule(installLanguageFixtures), + MongooseModule.forFeature([LanguageModel]), + ], + providers: [ + LanguageController, + LanguageService, + LanguageRepository, + LoggerService, + { + provide: I18nService, + useValue: { + t: jest.fn().mockImplementation((t) => t), + initDynamicLanguages: jest.fn(), + }, + }, + { + provide: CACHE_MANAGER, + useValue: { + del: jest.fn(), + get: jest.fn(), + set: jest.fn(), + }, + }, + LoggerService, + EventEmitter2, + ], + }).compile(); + languageService = module.get(LanguageService); + languageController = module.get(LanguageController); + language = await languageService.findOne({ code: 'en' }); + }); + + afterEach(jest.clearAllMocks); + afterAll(closeInMongodConnection); + + describe('count', () => { + it('should count languages', async () => { + jest.spyOn(languageService, 'count'); + const result = await languageController.filterCount(); + + expect(languageService.count).toHaveBeenCalled(); + expect(result).toEqual({ count: languageFixtures.length }); + }); + }); + + describe('findOne', () => { + it('should find one translation by id', async () => { + jest.spyOn(languageService, 'findOne'); + const result = await languageController.findOne(language.id); + + expect(languageService.findOne).toHaveBeenCalledWith(language.id); + expect(result).toEqualPayload( + languageFixtures.find(({ code }) => code === language.code), + ); + }); + }); + + describe('findPage', () => { + const pageQuery = getPageQuery({ sort: ['code', 'asc'] }); + it('should find languages', async () => { + jest.spyOn(languageService, 'findPage'); + const result = await languageController.findPage(pageQuery, {}); + + expect(languageService.findPage).toHaveBeenCalledWith({}, pageQuery); + expect(result).toEqualPayload( + languageFixtures.sort(({ code: codeA }, { code: codeB }) => { + if (codeA < codeB) { + return -1; + } + if (codeA > codeB) { + return 1; + } + return 0; + }), + ); + }); + }); + + describe('updateOne', () => { + const translationUpdateDto: LanguageUpdateDto = { + title: 'English (US)', + }; + it('should update one language by id', async () => { + jest.spyOn(languageService, 'updateOne'); + const result = await languageController.updateOne( + language.id, + translationUpdateDto, + ); + + expect(languageService.updateOne).toHaveBeenCalledWith( + language.id, + translationUpdateDto, + ); + expect(result).toEqualPayload({ + ...languageFixtures.find(({ code }) => code === language.code), + ...translationUpdateDto, + }); + }); + + it('should mark a language as default', async () => { + jest.spyOn(languageService, 'updateOne'); + const translationUpdateDto = { isDefault: true }; + const frLang = await languageService.findOne({ code: 'fr' }); + const result = await languageController.updateOne( + frLang.id, + translationUpdateDto, + ); + + expect(languageService.updateOne).toHaveBeenCalledWith( + frLang.id, + translationUpdateDto, + ); + expect(result).toEqualPayload({ + ...languageFixtures.find(({ code }) => code === frLang.code), + ...translationUpdateDto, + }); + + const enLang = await languageService.findOne({ code: 'en' }); + expect(enLang.isDefault).toBe(false); + }); + + it('should throw a NotFoundException when attempting to update a translation by id', async () => { + jest.spyOn(languageService, 'updateOne'); + await expect( + languageController.updateOne(NOT_FOUND_ID, translationUpdateDto), + ).rejects.toThrow(NotFoundException); + }); + }); + + describe('deleteOne', () => { + it('should throw when attempting to delete the default language', async () => { + const defaultLang = await languageService.findOne({ isDefault: true }); + + await expect( + languageController.deleteOne(defaultLang.id), + ).rejects.toThrow(BadRequestException); + }); + }); +}); diff --git a/api/src/i18n/controllers/language.controller.ts b/api/src/i18n/controllers/language.controller.ts new file mode 100644 index 00000000..12619fbb --- /dev/null +++ b/api/src/i18n/controllers/language.controller.ts @@ -0,0 +1,154 @@ +/* + * Copyright © 2024 Hexastack. All rights reserved. + * + * Licensed under the GNU Affero General Public License v3.0 (AGPLv3) with the following additional terms: + * 1. The name "Hexabot" is a trademark of Hexastack. You may not use this name in derivative works without express written permission. + * 2. All derivative works must include clear attribution to the original creator and software, Hexastack and Hexabot, in a prominent location (e.g., in the software's "About" section, documentation, and README file). + * 3. SaaS Restriction: This software, or any derivative of it, may not be used to offer a competing product or service (SaaS) without prior written consent from Hexastack. Offering the software as a service or using it in a commercial cloud environment without express permission is strictly prohibited. + */ + +import { + BadRequestException, + Body, + Controller, + Delete, + Get, + HttpCode, + NotFoundException, + Param, + Patch, + Post, + Query, + UseInterceptors, +} from '@nestjs/common'; +import { CsrfCheck } from '@tekuconcept/nestjs-csrf'; +import { TFilterQuery } from 'mongoose'; + +import { CsrfInterceptor } from '@/interceptors/csrf.interceptor'; +import { LoggerService } from '@/logger/logger.service'; +import { BaseController } from '@/utils/generics/base-controller'; +import { DeleteResult } from '@/utils/generics/base-repository'; +import { PageQueryDto } from '@/utils/pagination/pagination-query.dto'; +import { PageQueryPipe } from '@/utils/pagination/pagination-query.pipe'; +import { SearchFilterPipe } from '@/utils/pipes/search-filter.pipe'; + +import { LanguageCreateDto, LanguageUpdateDto } from '../dto/language.dto'; +import { Language } from '../schemas/language.schema'; +import { LanguageService } from '../services/language.service'; + +@UseInterceptors(CsrfInterceptor) +@Controller('language') +export class LanguageController extends BaseController { + constructor( + private readonly languageService: LanguageService, + private readonly logger: LoggerService, + ) { + super(languageService); + } + + /** + * Retrieves a paginated list of categories based on provided filters and pagination settings. + * @param pageQuery - The pagination settings. + * @param filters - The filters to apply to the language search. + * @returns A Promise that resolves to a paginated list of categories. + */ + @Get() + async findPage( + @Query(PageQueryPipe) pageQuery: PageQueryDto, + @Query(new SearchFilterPipe({ allowedFields: ['title', 'code'] })) + filters: TFilterQuery, + ) { + return await this.languageService.findPage(filters, pageQuery); + } + + /** + * Counts the filtered number of categories. + * @returns A promise that resolves to an object representing the filtered number of categories. + */ + @Get('count') + async filterCount( + @Query( + new SearchFilterPipe({ + allowedFields: ['title', 'code'], + }), + ) + filters?: TFilterQuery, + ) { + return await this.count(filters); + } + + /** + * Finds a language by its ID. + * @param id - The ID of the language to find. + * @returns A Promise that resolves to the found language. + */ + @Get(':id') + async findOne(@Param('id') id: string): Promise { + const doc = await this.languageService.findOne(id); + if (!doc) { + this.logger.warn(`Unable to find Language by id ${id}`); + throw new NotFoundException(`Language with ID ${id} not found`); + } + return doc; + } + + /** + * Creates a new language. + * @param language - The data of the language to be created. + * @returns A Promise that resolves to the created language. + */ + @CsrfCheck(true) + @Post() + async create(@Body() language: LanguageCreateDto): Promise { + return await this.languageService.create(language); + } + + /** + * Updates an existing language. + * @param id - The ID of the language to be updated. + * @param languageUpdate - The updated data for the language. + * @returns A Promise that resolves to the updated language. + */ + @CsrfCheck(true) + @Patch(':id') + async updateOne( + @Param('id') id: string, + @Body() languageUpdate: LanguageUpdateDto, + ): Promise { + if ('isDefault' in languageUpdate) { + if (languageUpdate.isDefault) { + // A new default language is define, make sure that only one is marked as default + await this.languageService.updateMany({}, { isDefault: false }); + } else { + throw new BadRequestException('Should not be able to disable default'); + } + } + + const result = await this.languageService.updateOne(id, languageUpdate); + if (!result) { + this.logger.warn(`Unable to update Language by id ${id}`); + throw new NotFoundException(`Language with ID ${id} not found`); + } + return result; + } + + /** + * Deletes a language by its ID. + * @param id - The ID of the language to be deleted. + * @returns A Promise that resolves to the deletion result. + */ + @CsrfCheck(true) + @Delete(':id') + @HttpCode(204) + async deleteOne(@Param('id') id: string): Promise { + const result = await this.languageService.deleteOne({ + isDefault: false, // Prevent deleting the default language + _id: id, + }); + if (result.deletedCount === 0) { + this.logger.warn(`Unable to delete Language by id ${id}`); + throw new BadRequestException(`Unable to delete Language with ID ${id}`); + } + return result; + } +} diff --git a/api/src/chat/controllers/translation.controller.spec.ts b/api/src/i18n/controllers/translation.controller.spec.ts similarity index 86% rename from api/src/chat/controllers/translation.controller.spec.ts rename to api/src/i18n/controllers/translation.controller.spec.ts index b7513893..6854c770 100644 --- a/api/src/chat/controllers/translation.controller.spec.ts +++ b/api/src/i18n/controllers/translation.controller.spec.ts @@ -17,13 +17,23 @@ import { AttachmentRepository } from '@/attachment/repositories/attachment.repos import { AttachmentModel } from '@/attachment/schemas/attachment.schema'; import { AttachmentService } from '@/attachment/services/attachment.service'; import { ChannelService } from '@/channel/channel.service'; +import { MessageController } from '@/chat/controllers/message.controller'; +import { BlockRepository } from '@/chat/repositories/block.repository'; +import { MessageRepository } from '@/chat/repositories/message.repository'; +import { SubscriberRepository } from '@/chat/repositories/subscriber.repository'; +import { BlockModel } from '@/chat/schemas/block.schema'; +import { MessageModel } from '@/chat/schemas/message.schema'; +import { SubscriberModel } from '@/chat/schemas/subscriber.schema'; +import { BlockService } from '@/chat/services/block.service'; +import { MessageService } from '@/chat/services/message.service'; +import { SubscriberService } from '@/chat/services/subscriber.service'; import { ContentRepository } from '@/cms/repositories/content.repository'; import { MenuRepository } from '@/cms/repositories/menu.repository'; import { ContentModel } from '@/cms/schemas/content.schema'; import { MenuModel } from '@/cms/schemas/menu.schema'; import { ContentService } from '@/cms/services/content.service'; import { MenuService } from '@/cms/services/menu.service'; -import { ExtendedI18nService } from '@/extended-i18n.service'; +import { I18nService } from '@/i18n/services/i18n.service'; import { LoggerService } from '@/logger/logger.service'; import { NlpService } from '@/nlp/services/nlp.service'; import { PluginService } from '@/plugins/plugins.service'; @@ -39,20 +49,13 @@ import { rootMongooseTestModule, } from '@/utils/test/test'; -import { MessageController } from './message.controller'; import { TranslationController } from './translation.controller'; import { TranslationUpdateDto } from '../dto/translation.dto'; -import { BlockRepository } from '../repositories/block.repository'; -import { MessageRepository } from '../repositories/message.repository'; -import { SubscriberRepository } from '../repositories/subscriber.repository'; +import { LanguageRepository } from '../repositories/language.repository'; import { TranslationRepository } from '../repositories/translation.repository'; -import { BlockModel } from '../schemas/block.schema'; -import { MessageModel } from '../schemas/message.schema'; -import { SubscriberModel } from '../schemas/subscriber.schema'; +import { LanguageModel } from '../schemas/language.schema'; import { Translation, TranslationModel } from '../schemas/translation.schema'; -import { BlockService } from '../services/block.service'; -import { MessageService } from '../services/message.service'; -import { SubscriberService } from '../services/subscriber.service'; +import { LanguageService } from '../services/language.service'; import { TranslationService } from '../services/translation.service'; describe('TranslationController', () => { @@ -73,6 +76,7 @@ describe('TranslationController', () => { MenuModel, BlockModel, ContentModel, + LanguageModel, ]), ], providers: [ @@ -114,10 +118,10 @@ describe('TranslationController', () => { EventEmitter2, LoggerService, { - provide: ExtendedI18nService, + provide: I18nService, useValue: { t: jest.fn().mockImplementation((t) => t), - initDynamicTranslations: jest.fn(), + refreshDynamicTranslations: jest.fn(), }, }, { @@ -129,6 +133,8 @@ describe('TranslationController', () => { }, }, LoggerService, + LanguageService, + LanguageRepository, ], }).compile(); translationService = module.get(TranslationService); diff --git a/api/src/chat/controllers/translation.controller.ts b/api/src/i18n/controllers/translation.controller.ts similarity index 65% rename from api/src/chat/controllers/translation.controller.ts rename to api/src/i18n/controllers/translation.controller.ts index 6db99836..1c81e82b 100644 --- a/api/src/chat/controllers/translation.controller.ts +++ b/api/src/i18n/controllers/translation.controller.ts @@ -8,15 +8,18 @@ */ import { + BadRequestException, Body, Controller, + Delete, Get, + HttpCode, NotFoundException, Param, Patch, + Post, Query, UseInterceptors, - Post, } from '@nestjs/common'; import { CsrfCheck } from '@tekuconcept/nestjs-csrf'; import { TFilterQuery } from 'mongoose'; @@ -25,18 +28,21 @@ import { CsrfInterceptor } from '@/interceptors/csrf.interceptor'; import { LoggerService } from '@/logger/logger.service'; import { SettingService } from '@/setting/services/setting.service'; import { BaseController } from '@/utils/generics/base-controller'; +import { DeleteResult } from '@/utils/generics/base-repository'; import { PageQueryDto } from '@/utils/pagination/pagination-query.dto'; import { PageQueryPipe } from '@/utils/pagination/pagination-query.pipe'; import { SearchFilterPipe } from '@/utils/pipes/search-filter.pipe'; import { TranslationUpdateDto } from '../dto/translation.dto'; import { Translation } from '../schemas/translation.schema'; +import { LanguageService } from '../services/language.service'; import { TranslationService } from '../services/translation.service'; @UseInterceptors(CsrfInterceptor) @Controller('translation') export class TranslationController extends BaseController { constructor( + private readonly languageService: LanguageService, private readonly translationService: TranslationService, private readonly settingService: SettingService, private readonly logger: LoggerService, @@ -103,40 +109,56 @@ export class TranslationController extends BaseController { @CsrfCheck(true) @Post('refresh') async refresh(): Promise { - const settings = await this.settingService.getSettings(); - const languages = settings.nlp_settings.languages; - const defaultTrans: Translation['translations'] = languages.reduce( - (acc, curr) => { - acc[curr] = ''; - return acc; - }, - {} as { [key: string]: string }, - ); + const defaultLanguage = await this.languageService.getDefaultLanguage(); + const languages = await this.languageService.getLanguages(); + const defaultTrans: Translation['translations'] = Object.keys(languages) + .filter((lang) => lang !== defaultLanguage.code) + .reduce( + (acc, curr) => { + acc[curr] = ''; + return acc; + }, + {} as { [key: string]: string }, + ); // Scan Blocks - return this.translationService - .getAllBlockStrings() - .then(async (strings: string[]) => { - const settingStrings = - await this.translationService.getSettingStrings(); - // Scan global settings - strings = strings.concat(settingStrings); - // Filter unique and not empty messages - strings = strings.filter((str, pos) => { - return str && strings.indexOf(str) == pos; - }); - // Perform refresh - const queue = strings.map((str) => - this.translationService.findOneOrCreate( - { str }, - { str, translations: defaultTrans as any, translated: 100 }, - ), - ); - return Promise.all(queue).then(() => { - // Purge non existing translations - return this.translationService.deleteMany({ - str: { $nin: strings }, - }); - }); - }); + let strings = await this.translationService.getAllBlockStrings(); + const settingStrings = await this.translationService.getSettingStrings(); + // Scan global settings + strings = strings.concat(settingStrings); + // Filter unique and not empty messages + strings = strings.filter((str, pos) => { + return str && strings.indexOf(str) == pos; + }); + // Perform refresh + const queue = strings.map((str) => + this.translationService.findOneOrCreate( + { str }, + { str, translations: defaultTrans }, + ), + ); + await Promise.all(queue); + // Purge non existing translations + return this.translationService.deleteMany({ + str: { $nin: strings }, + }); + } + + /** + * Deletes a translation by its ID. + * @param id - The ID of the translation to be deleted. + * @returns A Promise that resolves to the deletion result. + */ + @CsrfCheck(true) + @Delete(':id') + @HttpCode(204) + async deleteOne(@Param('id') id: string): Promise { + const result = await this.translationService.deleteOne(id); + if (result.deletedCount === 0) { + this.logger.warn(`Unable to delete Translation by id ${id}`); + throw new BadRequestException( + `Unable to delete Translation with ID ${id}`, + ); + } + return result; } } diff --git a/api/src/i18n/dto/language.dto.ts b/api/src/i18n/dto/language.dto.ts new file mode 100644 index 00000000..dd70714f --- /dev/null +++ b/api/src/i18n/dto/language.dto.ts @@ -0,0 +1,35 @@ +/* + * Copyright © 2024 Hexastack. All rights reserved. + * + * Licensed under the GNU Affero General Public License v3.0 (AGPLv3) with the following additional terms: + * 1. The name "Hexabot" is a trademark of Hexastack. You may not use this name in derivative works without express written permission. + * 2. All derivative works must include clear attribution to the original creator and software, Hexastack and Hexabot, in a prominent location (e.g., in the software's "About" section, documentation, and README file). + * 3. SaaS Restriction: This software, or any derivative of it, may not be used to offer a competing product or service (SaaS) without prior written consent from Hexastack. Offering the software as a service or using it in a commercial cloud environment without express permission is strictly prohibited. + */ + +import { PartialType } from '@nestjs/mapped-types'; +import { ApiProperty } from '@nestjs/swagger'; +import { IsBoolean, IsNotEmpty, IsOptional, IsString } from 'class-validator'; + +export class LanguageCreateDto { + @ApiProperty({ description: 'Language Title', type: String }) + @IsNotEmpty() + @IsString() + title: string; + + @ApiProperty({ description: 'Language Code', type: String }) + @IsNotEmpty() + @IsString() + code: string; + + @ApiProperty({ description: 'Whether Language is RTL', type: Boolean }) + @IsBoolean() + isRTL: boolean; + + @ApiProperty({ description: 'Is Default Language ?', type: Boolean }) + @IsOptional() + @IsBoolean() + isDefault?: boolean; +} + +export class LanguageUpdateDto extends PartialType(LanguageCreateDto) {} diff --git a/api/src/chat/dto/translation.dto.ts b/api/src/i18n/dto/translation.dto.ts similarity index 100% rename from api/src/chat/dto/translation.dto.ts rename to api/src/i18n/dto/translation.dto.ts diff --git a/api/src/i18n/i18n.module.ts b/api/src/i18n/i18n.module.ts new file mode 100644 index 00000000..5ba93764 --- /dev/null +++ b/api/src/i18n/i18n.module.ts @@ -0,0 +1,79 @@ +/* + * Copyright © 2024 Hexastack. All rights reserved. + * + * Licensed under the GNU Affero General Public License v3.0 (AGPLv3) with the following additional terms: + * 1. The name "Hexabot" is a trademark of Hexastack. You may not use this name in derivative works without express written permission. + * 2. All derivative works must include clear attribution to the original creator and software, Hexastack and Hexabot, in a prominent location (e.g., in the software's "About" section, documentation, and README file). + * 3. SaaS Restriction: This software, or any derivative of it, may not be used to offer a competing product or service (SaaS) without prior written consent from Hexastack. Offering the software as a service or using it in a commercial cloud environment without express permission is strictly prohibited. + */ + +import { + DynamicModule, + forwardRef, + Global, + Inject, + Module, +} from '@nestjs/common'; +import { HttpAdapterHost } from '@nestjs/core'; +import { MongooseModule } from '@nestjs/mongoose'; +import { + I18N_OPTIONS, + I18N_TRANSLATIONS, + I18nOptions, + I18nTranslation, + I18nModule as NativeI18nModule, +} from 'nestjs-i18n'; +import { Observable } from 'rxjs'; + +import { ChatModule } from '@/chat/chat.module'; + +import { LanguageController } from './controllers/language.controller'; +import { TranslationController } from './controllers/translation.controller'; +import { LanguageRepository } from './repositories/language.repository'; +import { TranslationRepository } from './repositories/translation.repository'; +import { LanguageModel } from './schemas/language.schema'; +import { TranslationModel } from './schemas/translation.schema'; +import { LanguageSeeder } from './seeds/language.seed'; +import { TranslationSeeder } from './seeds/translation.seed'; +import { I18nService } from './services/i18n.service'; +import { LanguageService } from './services/language.service'; +import { TranslationService } from './services/translation.service'; + +@Global() +@Module({}) +export class I18nModule extends NativeI18nModule { + constructor( + i18n: I18nService, + @Inject(I18N_TRANSLATIONS) + translations: Observable, + @Inject(I18N_OPTIONS) i18nOptions: I18nOptions, + adapter: HttpAdapterHost, + ) { + super(i18n, translations, i18nOptions, adapter); + } + + static forRoot(options: I18nOptions): DynamicModule { + const { imports, providers, controllers, exports } = super.forRoot(options); + return { + module: I18nModule, + imports: (imports || []).concat([ + MongooseModule.forFeature([LanguageModel, TranslationModel]), + forwardRef(() => ChatModule), + ]), + controllers: (controllers || []).concat([ + LanguageController, + TranslationController, + ]), + providers: providers.concat([ + I18nService, + LanguageRepository, + LanguageService, + LanguageSeeder, + TranslationRepository, + TranslationService, + TranslationSeeder, + ]), + exports: exports.concat(I18nService, LanguageService), + }; + } +} diff --git a/api/src/i18n/repositories/language.repository.ts b/api/src/i18n/repositories/language.repository.ts new file mode 100644 index 00000000..604dff98 --- /dev/null +++ b/api/src/i18n/repositories/language.repository.ts @@ -0,0 +1,53 @@ +/* + * Copyright © 2024 Hexastack. All rights reserved. + * + * Licensed under the GNU Affero General Public License v3.0 (AGPLv3) with the following additional terms: + * 1. The name "Hexabot" is a trademark of Hexastack. You may not use this name in derivative works without express written permission. + * 2. All derivative works must include clear attribution to the original creator and software, Hexastack and Hexabot, in a prominent location (e.g., in the software's "About" section, documentation, and README file). + * 3. SaaS Restriction: This software, or any derivative of it, may not be used to offer a competing product or service (SaaS) without prior written consent from Hexastack. Offering the software as a service or using it in a commercial cloud environment without express permission is strictly prohibited. + */ + +import { Injectable } from '@nestjs/common'; +import { EventEmitter2 } from '@nestjs/event-emitter'; +import { InjectModel } from '@nestjs/mongoose'; +import { Document, Model, Query, TFilterQuery } from 'mongoose'; + +import { BaseRepository, DeleteResult } from '@/utils/generics/base-repository'; + +import { Language } from '../schemas/language.schema'; + +@Injectable() +export class LanguageRepository extends BaseRepository { + constructor( + @InjectModel(Language.name) readonly model: Model, + private readonly eventEmitter: EventEmitter2, + ) { + super(model, Language); + } + + /** + * Pre-delete hook that triggers before an language is deleted. + * + * @param query The query used to delete the language. + * @param criteria The filter criteria used to find the language for deletion. + */ + async preDelete( + _query: Query< + DeleteResult, + Document, + unknown, + Language, + 'deleteOne' | 'deleteMany' + >, + criteria: TFilterQuery, + ): Promise { + if (criteria._id) { + const language = await this.findOne( + typeof criteria === 'string' ? { _id: criteria } : criteria, + ); + this.eventEmitter.emit('hook:language:delete', language); + } else { + throw new Error('Attempted to delete language using unknown criteria'); + } + } +} diff --git a/api/src/chat/repositories/translation.repository.ts b/api/src/i18n/repositories/translation.repository.ts similarity index 97% rename from api/src/chat/repositories/translation.repository.ts rename to api/src/i18n/repositories/translation.repository.ts index 0e969458..1009057c 100644 --- a/api/src/chat/repositories/translation.repository.ts +++ b/api/src/i18n/repositories/translation.repository.ts @@ -14,7 +14,7 @@ import { Document, Model, Query, Types } from 'mongoose'; import { BaseRepository, DeleteResult } from '@/utils/generics/base-repository'; -import { Translation } from '../schemas/translation.schema'; +import { Translation } from '../../i18n/schemas/translation.schema'; @Injectable() export class TranslationRepository extends BaseRepository { diff --git a/api/src/i18n/schemas/language.schema.ts b/api/src/i18n/schemas/language.schema.ts new file mode 100644 index 00000000..b56eda14 --- /dev/null +++ b/api/src/i18n/schemas/language.schema.ts @@ -0,0 +1,52 @@ +/* + * Copyright © 2024 Hexastack. All rights reserved. + * + * Licensed under the GNU Affero General Public License v3.0 (AGPLv3) with the following additional terms: + * 1. The name "Hexabot" is a trademark of Hexastack. You may not use this name in derivative works without express written permission. + * 2. All derivative works must include clear attribution to the original creator and software, Hexastack and Hexabot, in a prominent location (e.g., in the software's "About" section, documentation, and README file). + * 3. SaaS Restriction: This software, or any derivative of it, may not be used to offer a competing product or service (SaaS) without prior written consent from Hexastack. Offering the software as a service or using it in a commercial cloud environment without express permission is strictly prohibited. + */ + +import { Prop, Schema, SchemaFactory, ModelDefinition } from '@nestjs/mongoose'; +import { THydratedDocument } from 'mongoose'; + +import { BaseSchema } from '@/utils/generics/base-schema'; +import { LifecycleHookManager } from '@/utils/generics/lifecycle-hook-manager'; + +@Schema({ timestamps: true }) +export class Language extends BaseSchema { + @Prop({ + type: String, + required: true, + unique: true, + }) + title: string; + + @Prop({ + type: String, + required: true, + unique: true, + }) + code: string; + + @Prop({ + type: Boolean, + default: false, + }) + isDefault?: boolean; + + @Prop({ + type: Boolean, + default: false, + }) + isRTL?: boolean; +} + +export const LanguageModel: ModelDefinition = LifecycleHookManager.attach({ + name: Language.name, + schema: SchemaFactory.createForClass(Language), +}); + +export type LanguageDocument = THydratedDocument; + +export default LanguageModel.schema; diff --git a/api/src/chat/schemas/translation.schema.ts b/api/src/i18n/schemas/translation.schema.ts similarity index 89% rename from api/src/chat/schemas/translation.schema.ts rename to api/src/i18n/schemas/translation.schema.ts index 55cd9e73..12c80c59 100644 --- a/api/src/chat/schemas/translation.schema.ts +++ b/api/src/i18n/schemas/translation.schema.ts @@ -11,6 +11,7 @@ import { Prop, Schema, SchemaFactory, ModelDefinition } from '@nestjs/mongoose'; import { THydratedDocument } from 'mongoose'; import { BaseSchema } from '@/utils/generics/base-schema'; +import { LifecycleHookManager } from '@/utils/generics/lifecycle-hook-manager'; @Schema({ timestamps: true }) export class Translation extends BaseSchema { @@ -26,17 +27,12 @@ export class Translation extends BaseSchema { required: true, }) translations: Record; - - @Prop({ - type: Number, - }) - translated: number; } -export const TranslationModel: ModelDefinition = { +export const TranslationModel: ModelDefinition = LifecycleHookManager.attach({ name: Translation.name, schema: SchemaFactory.createForClass(Translation), -}; +}); export type TranslationDocument = THydratedDocument; diff --git a/api/src/i18n/seeds/language.seed-model.ts b/api/src/i18n/seeds/language.seed-model.ts new file mode 100644 index 00000000..6f4d0fb7 --- /dev/null +++ b/api/src/i18n/seeds/language.seed-model.ts @@ -0,0 +1,24 @@ +/* + * Copyright © 2024 Hexastack. All rights reserved. + * + * Licensed under the GNU Affero General Public License v3.0 (AGPLv3) with the following additional terms: + * 1. The name "Hexabot" is a trademark of Hexastack. You may not use this name in derivative works without express written permission. + * 2. All derivative works must include clear attribution to the original creator and software, Hexastack and Hexabot, in a prominent location (e.g., in the software's "About" section, documentation, and README file). + * 3. SaaS Restriction: This software, or any derivative of it, may not be used to offer a competing product or service (SaaS) without prior written consent from Hexastack. Offering the software as a service or using it in a commercial cloud environment without express permission is strictly prohibited. + */ + +import { LanguageCreateDto } from '../dto/language.dto'; + +export const languageModels: LanguageCreateDto[] = [ + { + title: 'English', + code: 'en', + isRTL: false, + isDefault: true, + }, + { + title: 'Français', + code: 'fr', + isRTL: false, + }, +]; diff --git a/api/src/i18n/seeds/language.seed.ts b/api/src/i18n/seeds/language.seed.ts new file mode 100644 index 00000000..34a483df --- /dev/null +++ b/api/src/i18n/seeds/language.seed.ts @@ -0,0 +1,22 @@ +/* + * Copyright © 2024 Hexastack. All rights reserved. + * + * Licensed under the GNU Affero General Public License v3.0 (AGPLv3) with the following additional terms: + * 1. The name "Hexabot" is a trademark of Hexastack. You may not use this name in derivative works without express written permission. + * 2. All derivative works must include clear attribution to the original creator and software, Hexastack and Hexabot, in a prominent location (e.g., in the software's "About" section, documentation, and README file). + * 3. SaaS Restriction: This software, or any derivative of it, may not be used to offer a competing product or service (SaaS) without prior written consent from Hexastack. Offering the software as a service or using it in a commercial cloud environment without express permission is strictly prohibited. + */ + +import { Injectable } from '@nestjs/common'; + +import { BaseSeeder } from '@/utils/generics/base-seeder'; + +import { LanguageRepository } from '../repositories/language.repository'; +import { Language } from '../schemas/language.schema'; + +@Injectable() +export class LanguageSeeder extends BaseSeeder { + constructor(private readonly languageRepository: LanguageRepository) { + super(languageRepository); + } +} diff --git a/api/src/chat/seeds/translation.seed-model.ts b/api/src/i18n/seeds/translation.seed-model.ts similarity index 100% rename from api/src/chat/seeds/translation.seed-model.ts rename to api/src/i18n/seeds/translation.seed-model.ts diff --git a/api/src/chat/seeds/translation.seed.ts b/api/src/i18n/seeds/translation.seed.ts similarity index 100% rename from api/src/chat/seeds/translation.seed.ts rename to api/src/i18n/seeds/translation.seed.ts diff --git a/api/src/extended-i18n.service.ts b/api/src/i18n/services/i18n.service.ts similarity index 69% rename from api/src/extended-i18n.service.ts rename to api/src/i18n/services/i18n.service.ts index 5bb4e3d7..3463035f 100644 --- a/api/src/extended-i18n.service.ts +++ b/api/src/i18n/services/i18n.service.ts @@ -8,22 +8,22 @@ */ import { Injectable } from '@nestjs/common'; -import { OnEvent } from '@nestjs/event-emitter'; -import { I18nService, Path, PathValue, TranslateOptions } from 'nestjs-i18n'; +import { + I18nService as NativeI18nService, + Path, + PathValue, + TranslateOptions, +} from 'nestjs-i18n'; import { IfAnyOrNever } from 'nestjs-i18n/dist/types'; -import { Translation } from './chat/schemas/translation.schema'; -import { config } from './config'; +import { config } from '@/config'; +import { Translation } from '@/i18n/schemas/translation.schema'; @Injectable() -export class ExtendedI18nService< +export class I18nService< K = Record, -> extends I18nService { - private dynamicTranslations: Record> = - config.chatbot.lang.available.reduce( - (acc, curr) => ({ ...acc, [curr]: {} }), - {}, - ); +> extends NativeI18nService { + private dynamicTranslations: Record> = {}; t

= any, R = PathValue>( key: P, @@ -35,17 +35,19 @@ export class ExtendedI18nService< ...options, }; let { lang } = options; - lang = lang ?? this.i18nOptions.fallbackLanguage; lang = this.resolveLanguage(lang); // Translate block message, button text, ... if (lang in this.dynamicTranslations) { if (key in this.dynamicTranslations[lang]) { - return this.dynamicTranslations[lang][key] as IfAnyOrNever< - R, - string, - R - >; + if (this.dynamicTranslations[lang][key]) { + return this.dynamicTranslations[lang][key] as IfAnyOrNever< + R, + string, + R + >; + } + return options.defaultValue as IfAnyOrNever; } } @@ -54,15 +56,13 @@ export class ExtendedI18nService< return super.t(key, options); } - @OnEvent('hook:i18n:refresh') - initDynamicTranslations(translations: Translation[]) { + refreshDynamicTranslations(translations: Translation[]) { this.dynamicTranslations = translations.reduce((acc, curr) => { const { str, translations } = curr; - Object.entries(translations) - .filter(([lang]) => lang in acc) - .forEach(([lang, t]) => { - acc[lang][str] = t; - }); + Object.entries(translations).forEach(([lang, t]) => { + acc[lang] = acc[lang] || {}; + acc[lang][str] = t; + }); return acc; }, this.dynamicTranslations); diff --git a/api/src/i18n/services/language.service.ts b/api/src/i18n/services/language.service.ts new file mode 100644 index 00000000..c70bfca8 --- /dev/null +++ b/api/src/i18n/services/language.service.ts @@ -0,0 +1,68 @@ +/* + * Copyright © 2024 Hexastack. All rights reserved. + * + * Licensed under the GNU Affero General Public License v3.0 (AGPLv3) with the following additional terms: + * 1. The name "Hexabot" is a trademark of Hexastack. You may not use this name in derivative works without express written permission. + * 2. All derivative works must include clear attribution to the original creator and software, Hexastack and Hexabot, in a prominent location (e.g., in the software's "About" section, documentation, and README file). + * 3. SaaS Restriction: This software, or any derivative of it, may not be used to offer a competing product or service (SaaS) without prior written consent from Hexastack. Offering the software as a service or using it in a commercial cloud environment without express permission is strictly prohibited. + */ + +import { CACHE_MANAGER } from '@nestjs/cache-manager'; +import { Inject, Injectable } from '@nestjs/common'; +import { Cache } from 'cache-manager'; + +import { + DEFAULT_LANGUAGE_CACHE_KEY, + LANGUAGES_CACHE_KEY, +} from '@/utils/constants/cache'; +import { Cacheable } from '@/utils/decorators/cacheable.decorator'; +import { BaseService } from '@/utils/generics/base-service'; + +import { LanguageRepository } from '../repositories/language.repository'; +import { Language } from '../schemas/language.schema'; + +@Injectable() +export class LanguageService extends BaseService { + constructor( + readonly repository: LanguageRepository, + @Inject(CACHE_MANAGER) private readonly cacheManager: Cache, + ) { + super(repository); + } + + /** + * Retrieves all available languages from the repository. + * + * @returns A promise that resolves to an object where each key is a language code + * and the corresponding value is the `Language` object. + */ + @Cacheable(LANGUAGES_CACHE_KEY) + async getLanguages() { + const languages = await this.findAll(); + return languages.reduce((acc, curr) => { + return { + ...acc, + [curr.code]: curr, + }; + }, {}); + } + + /** + * Retrieves the default language from the repository. + * + * @returns A promise that resolves to the default `Language` object. + */ + @Cacheable(DEFAULT_LANGUAGE_CACHE_KEY) + async getDefaultLanguage() { + return await this.findOne({ isDefault: true }); + } + + /** + * Retrieves the language by code. + * + * @returns A promise that resolves to the `Language` object. + */ + async getLanguageByCode(code: string) { + return await this.findOne({ code }); + } +} diff --git a/api/src/i18n/services/translation.service.spec.ts b/api/src/i18n/services/translation.service.spec.ts new file mode 100644 index 00000000..12ad3de0 --- /dev/null +++ b/api/src/i18n/services/translation.service.spec.ts @@ -0,0 +1,266 @@ +/* + * Copyright © 2024 Hexastack. All rights reserved. + * + * Licensed under the GNU Affero General Public License v3.0 (AGPLv3) with the following additional terms: + * 1. The name "Hexabot" is a trademark of Hexastack. You may not use this name in derivative works without express written permission. + * 2. All derivative works must include clear attribution to the original creator and software, Hexastack and Hexabot, in a prominent location (e.g., in the software's "About" section, documentation, and README file). + * 3. SaaS Restriction: This software, or any derivative of it, may not be used to offer a competing product or service (SaaS) without prior written consent from Hexastack. Offering the software as a service or using it in a commercial cloud environment without express permission is strictly prohibited. + */ + +import { EventEmitter2 } from '@nestjs/event-emitter'; +import { Test, TestingModule } from '@nestjs/testing'; + +import { I18nService } from '@/i18n/services/i18n.service'; +import { Settings } from '@/setting/schemas/types'; +import { SettingService } from '@/setting/services/setting.service'; + +import { Block } from '../../chat/schemas/block.schema'; +import { BlockOptions } from '../../chat/schemas/types/options'; +import { BlockService } from '../../chat/services/block.service'; +import { TranslationRepository } from '../repositories/translation.repository'; +import { TranslationService } from '../services/translation.service'; + +describe('TranslationService', () => { + let service: TranslationService; + let settingService: SettingService; + let i18nService: I18nService; + + beforeEach(async () => { + const module: TestingModule = await Test.createTestingModule({ + providers: [ + TranslationService, + { + provide: TranslationRepository, + useValue: { + findAll: jest.fn().mockResolvedValue([ + { + key: 'test', + value: 'test', + lang: 'en', + }, + ]), + }, + }, + { + provide: BlockService, + useValue: { + find: jest.fn().mockResolvedValue([ + { + id: 'blockId', + message: ['Test message'], + options: { + fallback: { + message: ['Fallback message'], + }, + }, + } as Block, + ]), + }, + }, + { + provide: SettingService, + useValue: { + getSettings: jest.fn().mockResolvedValue({ + chatbot_settings: { + global_fallback: true, + fallback_message: ['Global fallback message'], + }, + } as Settings), + }, + }, + { + provide: I18nService, + useValue: { + refreshDynamicTranslations: jest.fn(), + }, + }, + EventEmitter2, + ], + }).compile(); + + service = module.get(TranslationService); + settingService = module.get(SettingService); + i18nService = module.get(I18nService); + }); + + it('should call refreshDynamicTranslations with translations from findAll', async () => { + jest.spyOn(i18nService, 'refreshDynamicTranslations'); + await service.resetI18nTranslations(); + expect(i18nService.refreshDynamicTranslations).toHaveBeenCalledWith([ + { + key: 'test', + value: 'test', + lang: 'en', + }, + ]); + }); + + it('should return an array of strings from all blocks', async () => { + const strings = await service.getAllBlockStrings(); + expect(strings).toEqual(['Test message', 'Fallback message']); + }); + + it('should return an array of strings from the settings when global fallback is enabled', async () => { + const strings = await service.getSettingStrings(); + expect(strings).toEqual(['Global fallback message']); + }); + + it('should return an empty array from the settings when global fallback is disabled', async () => { + jest.spyOn(settingService, 'getSettings').mockResolvedValueOnce({ + chatbot_settings: { + global_fallback: false, + fallback_message: ['Global fallback message'], + }, + } as Settings); + + const strings = await service.getSettingStrings(); + expect(strings).toEqual([]); + }); + + it('should return an array of strings from a block with a quick reply message', () => { + const block = { + id: 'blockId', + name: 'Test Block', + category: 'Test Category', + position: { x: 0, y: 0 }, + message: { + text: 'Test message', + quickReplies: [ + { + title: 'Quick reply 1', + }, + { + title: 'Quick reply 2', + }, + ], + }, + options: { + fallback: { + active: true, + message: ['Fallback message'], + max_attempts: 3, + } as BlockOptions, + }, + createdAt: new Date(), + updatedAt: new Date(), + } as Block; + const strings = service.getBlockStrings(block); + expect(strings).toEqual([ + 'Test message', + 'Quick reply 1', + 'Quick reply 2', + 'Fallback message', + ]); + }); + + it('should return an array of strings from a block with a button message', () => { + const block = { + id: 'blockId', + name: 'Test Block', + category: 'Test Category', + position: { x: 0, y: 0 }, + message: { + text: 'Test message', + buttons: [ + { + title: 'Button 1', + }, + { + title: 'Button 2', + }, + ], + }, + options: { + fallback: { + active: true, + message: ['Fallback message'], + max_attempts: 3, + } as BlockOptions, + }, + createdAt: new Date(), + updatedAt: new Date(), + } as Block; + const strings = service.getBlockStrings(block); + expect(strings).toEqual([ + 'Test message', + 'Button 1', + 'Button 2', + 'Fallback message', + ]); + }); + + it('should return an array of strings from a block with a text message', () => { + const block = { + id: 'blockId', + name: 'Test Block', + category: 'Test Category', + position: { x: 0, y: 0 }, + message: ['Test message'], // Text message as an array + options: { + fallback: { + active: true, + message: ['Fallback message'], + max_attempts: 3, + } as BlockOptions, + }, + createdAt: new Date(), + updatedAt: new Date(), + } as Block; + const strings = service.getBlockStrings(block); + expect(strings).toEqual(['Test message', 'Fallback message']); + }); + + it('should return an array of strings from a block with a nested message object', () => { + const block = { + id: 'blockId', + name: 'Test Block', + category: 'Test Category', + position: { x: 0, y: 0 }, + message: { + text: 'Test message', // Nested text message + }, + options: { + fallback: { + active: true, + message: ['Fallback message'], + max_attempts: 3, + } as BlockOptions, + }, + createdAt: new Date(), + updatedAt: new Date(), + } as Block; + const strings = service.getBlockStrings(block); + expect(strings).toEqual(['Test message', 'Fallback message']); + }); + + it('should handle different message formats in getBlockStrings', () => { + // Covers lines 54-60, 65 + + // Test with an array message (line 54-57) + const block1 = { + id: 'blockId1', + message: ['This is a text message'], + options: { fallback: { message: ['Fallback message'] } }, + } as Block; + const strings1 = service.getBlockStrings(block1); + expect(strings1).toEqual(['This is a text message', 'Fallback message']); + + // Test with an object message (line 58-60) + const block2 = { + id: 'blockId2', + message: { text: 'Another text message' }, + options: { fallback: { message: ['Fallback message'] } }, + } as Block; + const strings2 = service.getBlockStrings(block2); + expect(strings2).toEqual(['Another text message', 'Fallback message']); + + // Test a block without a fallback (line 65) + const block3 = { + id: 'blockId3', + message: { text: 'Another test message' }, + options: {}, + } as Block; + const strings3 = service.getBlockStrings(block3); + expect(strings3).toEqual(['Another test message']); + }); +}); diff --git a/api/src/chat/services/translation.service.ts b/api/src/i18n/services/translation.service.ts similarity index 94% rename from api/src/chat/services/translation.service.ts rename to api/src/i18n/services/translation.service.ts index 113262a6..e632775a 100644 --- a/api/src/chat/services/translation.service.ts +++ b/api/src/i18n/services/translation.service.ts @@ -10,13 +10,13 @@ import { Injectable } from '@nestjs/common'; import { OnEvent } from '@nestjs/event-emitter'; -import { ExtendedI18nService } from '@/extended-i18n.service'; +import { I18nService } from '@/i18n/services/i18n.service'; import { SettingService } from '@/setting/services/setting.service'; import { BaseService } from '@/utils/generics/base-service'; -import { BlockService } from './block.service'; +import { Block } from '../../chat/schemas/block.schema'; +import { BlockService } from '../../chat/services/block.service'; import { TranslationRepository } from '../repositories/translation.repository'; -import { Block } from '../schemas/block.schema'; import { Translation } from '../schemas/translation.schema'; @Injectable() @@ -25,7 +25,7 @@ export class TranslationService extends BaseService { readonly repository: TranslationRepository, private readonly blockService: BlockService, private readonly settingService: SettingService, - private readonly i18n: ExtendedI18nService, + private readonly i18n: I18nService, ) { super(repository); this.resetI18nTranslations(); @@ -33,7 +33,7 @@ export class TranslationService extends BaseService { public async resetI18nTranslations() { const translations = await this.findAll(); - this.i18n.initDynamicTranslations(translations); + this.i18n.refreshDynamicTranslations(translations); } /** diff --git a/api/src/main.ts b/api/src/main.ts index 0d82293a..7ad1eef2 100644 --- a/api/src/main.ts +++ b/api/src/main.ts @@ -51,6 +51,7 @@ async function bootstrap() { app.useGlobalPipes( new ValidationPipe({ whitelist: true, + transform: true, // forbidNonWhitelisted: true, }), new ObjectIdPipe(), diff --git a/api/src/nlp/controllers/nlp-sample.controller.spec.ts b/api/src/nlp/controllers/nlp-sample.controller.spec.ts index f71d1120..86fb6230 100644 --- a/api/src/nlp/controllers/nlp-sample.controller.spec.ts +++ b/api/src/nlp/controllers/nlp-sample.controller.spec.ts @@ -18,7 +18,10 @@ import { Test, TestingModule } from '@nestjs/testing'; import { AttachmentRepository } from '@/attachment/repositories/attachment.repository'; import { AttachmentModel } from '@/attachment/schemas/attachment.schema'; import { AttachmentService } from '@/attachment/services/attachment.service'; -import { ExtendedI18nService } from '@/extended-i18n.service'; +import { LanguageRepository } from '@/i18n/repositories/language.repository'; +import { Language, LanguageModel } from '@/i18n/schemas/language.schema'; +import { I18nService } from '@/i18n/services/i18n.service'; +import { LanguageService } from '@/i18n/services/language.service'; import { LoggerService } from '@/logger/logger.service'; import { SettingRepository } from '@/setting/repositories/setting.repository'; import { SettingModel } from '@/setting/schemas/setting.schema'; @@ -57,7 +60,9 @@ describe('NlpSampleController', () => { let nlpEntityService: NlpEntityService; let nlpValueService: NlpValueService; let attachmentService: AttachmentService; + let languageService: LanguageService; let byeJhonSampleId: string; + let languages: Language[]; beforeAll(async () => { const module: TestingModule = await Test.createTestingModule({ @@ -74,6 +79,7 @@ describe('NlpSampleController', () => { NlpEntityModel, NlpValueModel, SettingModel, + LanguageModel, ]), ], providers: [ @@ -88,13 +94,15 @@ describe('NlpSampleController', () => { NlpValueRepository, NlpSampleService, NlpSampleEntityService, + LanguageRepository, + LanguageService, EventEmitter2, NlpService, SettingRepository, SettingService, SettingSeeder, { - provide: ExtendedI18nService, + provide: I18nService, useValue: { t: jest.fn().mockImplementation((t) => t), }, @@ -122,6 +130,8 @@ describe('NlpSampleController', () => { }) ).id; attachmentService = module.get(AttachmentService); + languageService = module.get(LanguageService); + languages = await languageService.findAll(); }); afterAll(async () => { await closeInMongodConnection(); @@ -134,7 +144,7 @@ describe('NlpSampleController', () => { const pageQuery = getPageQuery({ sort: ['text', 'desc'] }); const result = await nlpSampleController.findPage( pageQuery, - ['entities'], + ['language', 'entities'], {}, ); const nlpSamples = await nlpSampleService.findAll(); @@ -146,6 +156,7 @@ describe('NlpSampleController', () => { entities: nlpSampleEntities.filter((currSampleEntity) => { return currSampleEntity.sample === currSample.id; }), + language: languages.find((lang) => lang.id === currSample.language), }; acc.push(sampleWithEntities); return acc; @@ -163,7 +174,12 @@ describe('NlpSampleController', () => { ['invalidCriteria'], {}, ); - expect(result).toEqualPayload(nlpSampleFixtures); + expect(result).toEqualPayload( + nlpSampleFixtures.map((sample) => ({ + ...sample, + language: languages[sample.language].id, + })), + ); }); }); @@ -177,14 +193,19 @@ describe('NlpSampleController', () => { describe('create', () => { it('should create nlp sample', async () => { + const enLang = await languageService.findOne({ code: 'en' }); const nlSample: NlpSampleDto = { text: 'text1', trained: true, type: NlpSampleState.test, entities: [], + language: 'en', }; const result = await nlpSampleController.create(nlSample); - expect(result).toEqualPayload(nlSample); + expect(result).toEqualPayload({ + ...nlSample, + language: enLang, + }); }); }); @@ -209,7 +230,10 @@ describe('NlpSampleController', () => { const result = await nlpSampleController.findOne(yessSample.id, [ 'invalidCreteria', ]); - expect(result).toEqualPayload(nlpSampleFixtures[0]); + expect(result).toEqualPayload({ + ...nlpSampleFixtures[0], + language: languages[nlpSampleFixtures[0].language].id, + }); }); it('should find a nlp sample and populate its entities', async () => { @@ -225,6 +249,7 @@ describe('NlpSampleController', () => { const samplesWithEntities = { ...nlpSampleFixtures[0], entities: [yessSampleEntity], + language: languages[nlpSampleFixtures[0].language], }; expect(result).toEqualPayload(samplesWithEntities); }); @@ -241,6 +266,9 @@ describe('NlpSampleController', () => { const yessSample = await nlpSampleService.findOne({ text: 'yess', }); + const frLang = await languageService.findOne({ + code: 'fr', + }); const result = await nlpSampleController.updateOne(yessSample.id, { text: 'updated', trained: true, @@ -251,6 +279,7 @@ describe('NlpSampleController', () => { value: 'update', }, ], + language: 'fr', }); const updatedSample = { text: 'updated', @@ -263,11 +292,13 @@ describe('NlpSampleController', () => { value: expect.stringMatching(/^[a-z0-9]+$/), }, ], + language: frLang, }; expect(result.text).toEqual(updatedSample.text); expect(result.type).toEqual(updatedSample.type); expect(result.trained).toEqual(updatedSample.trained); expect(result.entities).toMatchObject(updatedSample.entities); + expect(result.language).toEqualPayload(updatedSample.language); }); it('should throw exception when nlp sample id not found', async () => { @@ -276,6 +307,7 @@ describe('NlpSampleController', () => { text: 'updated', trained: true, type: NlpSampleState.test, + language: 'fr', }), ).rejects.toThrow(NotFoundException); }); @@ -352,7 +384,7 @@ describe('NlpSampleController', () => { ).id; const mockCsvData: string = [ `text,intent,language`, - `Was kostet dieser bmw,preis,de`, + `How much does a BMW cost?,price,en`, ].join('\n'); jest.spyOn(fs, 'existsSync').mockReturnValueOnce(true); jest.spyOn(fs, 'readFileSync').mockReturnValueOnce(mockCsvData); @@ -361,17 +393,14 @@ describe('NlpSampleController', () => { const intentEntityResult = await nlpEntityService.findOne({ name: 'intent', }); - const languageEntityResult = await nlpEntityService.findOne({ - name: 'language', - }); - const preisValueResult = await nlpValueService.findOne({ - value: 'preis', - }); - const deValueResult = await nlpValueService.findOne({ - value: 'de', + const priceValueResult = await nlpValueService.findOne({ + value: 'price', }); const textSampleResult = await nlpSampleService.findOne({ - text: 'Was kostet dieser bmw', + text: 'How much does a BMW cost?', + }); + const language = await languageService.findOne({ + code: 'en', }); const intentEntity = { name: 'intent', @@ -379,40 +408,24 @@ describe('NlpSampleController', () => { doc: '', builtin: false, }; - const languageEntity = { - name: 'language', - lookups: ['trait'], - builtin: false, - doc: '', - }; - const preisVlueEntity = await nlpEntityService.findOne({ + const priceValueEntity = await nlpEntityService.findOne({ name: 'intent', }); - const preisValue = { - value: 'preis', + const priceValue = { + value: 'price', expressions: [], builtin: false, - entity: preisVlueEntity.id, - }; - const deValueEntity = await nlpEntityService.findOne({ - name: 'language', - }); - const deValue = { - value: 'de', - expressions: [], - builtin: false, - entity: deValueEntity.id, + entity: priceValueEntity.id, }; const textSample = { - text: 'Was kostet dieser bmw', + text: 'How much does a BMW cost?', trained: false, type: 'train', + language: language.id, }; - expect(languageEntityResult).toEqualPayload(languageEntity); expect(intentEntityResult).toEqualPayload(intentEntity); - expect(preisValueResult).toEqualPayload(preisValue); - expect(deValueResult).toEqualPayload(deValue); + expect(priceValueResult).toEqualPayload(priceValue); expect(textSampleResult).toEqualPayload(textSample); expect(result).toEqual({ success: true }); }); diff --git a/api/src/nlp/controllers/nlp-sample.controller.ts b/api/src/nlp/controllers/nlp-sample.controller.ts index 817718ec..50a118db 100644 --- a/api/src/nlp/controllers/nlp-sample.controller.ts +++ b/api/src/nlp/controllers/nlp-sample.controller.ts @@ -34,6 +34,7 @@ import Papa from 'papaparse'; import { AttachmentService } from '@/attachment/services/attachment.service'; import { config } from '@/config'; +import { LanguageService } from '@/i18n/services/language.service'; import { CsrfInterceptor } from '@/interceptors/csrf.interceptor'; import { LoggerService } from '@/logger/logger.service'; import { BaseController } from '@/utils/generics/base-controller'; @@ -70,6 +71,7 @@ export class NlpSampleController extends BaseController< private readonly nlpEntityService: NlpEntityService, private readonly logger: LoggerService, private readonly nlpService: NlpService, + private readonly languageService: LanguageService, ) { super(nlpSampleService); } @@ -91,7 +93,7 @@ export class NlpSampleController extends BaseController< type ? { type } : {}, ); const entities = await this.nlpEntityService.findAllAndPopulate(); - const result = this.nlpSampleService.formatRasaNlu(samples, entities); + const result = await this.nlpSampleService.formatRasaNlu(samples, entities); // Sending the JSON data as a file const buffer = Buffer.from(JSON.stringify(result)); @@ -120,11 +122,18 @@ export class NlpSampleController extends BaseController< @CsrfCheck(true) @Post() async create( - @Body() { entities: nlpEntities, ...createNlpSampleDto }: NlpSampleDto, + @Body() + { + entities: nlpEntities, + language: languageCode, + ...createNlpSampleDto + }: NlpSampleDto, ): Promise { - const nlpSample = await this.nlpSampleService.create( - createNlpSampleDto as NlpSampleCreateDto, - ); + const language = await this.languageService.getLanguageByCode(languageCode); + const nlpSample = await this.nlpSampleService.create({ + ...createNlpSampleDto, + language: language.id, + }); const entities = await this.nlpSampleEntityService.storeSampleEntities( nlpSample, @@ -134,6 +143,7 @@ export class NlpSampleController extends BaseController< return { ...nlpSample, entities, + language, }; } @@ -243,7 +253,11 @@ export class NlpSampleController extends BaseController< async findPage( @Query(PageQueryPipe) pageQuery: PageQueryDto, @Query(PopulatePipe) populate: string[], - @Query(new SearchFilterPipe({ allowedFields: ['text', 'type'] })) + @Query( + new SearchFilterPipe({ + allowedFields: ['text', 'type', 'language'], + }), + ) filters: TFilterQuery, ) { return this.canPopulate(populate) @@ -263,12 +277,12 @@ export class NlpSampleController extends BaseController< @Patch(':id') async updateOne( @Param('id') id: string, - @Body() updateNlpSampleDto: NlpSampleDto, + @Body() { entities, language: languageCode, ...sampleAttrs }: NlpSampleDto, ): Promise { - const { text, type, entities } = updateNlpSampleDto; + const language = await this.languageService.getLanguageByCode(languageCode); const sample = await this.nlpSampleService.updateOne(id, { - text, - type, + ...sampleAttrs, + language: language.id, trained: false, }); @@ -284,6 +298,7 @@ export class NlpSampleController extends BaseController< return { ...sample, + language, entities: updatedSampleEntities, }; } @@ -366,6 +381,8 @@ export class NlpSampleController extends BaseController< } // Remove data with no intent const filteredData = result.data.filter((d) => d.intent !== 'none'); + const languages = await this.languageService.getLanguages(); + const defaultLanguage = await this.languageService.getDefaultLanguage(); // Reduce function to ensure executing promises one by one for (const d of filteredData) { try { @@ -375,15 +392,25 @@ export class NlpSampleController extends BaseController< }); // Skip if sample already exists - if (Array.isArray(existingSamples) && existingSamples.length > 0) { continue; } + // Fallback to default language if 'language' is missing or invalid + if (!d.language || !(d.language in languages)) { + if (d.language) { + this.logger.warn( + `Language "${d.language}" does not exist, falling back to default.`, + ); + } + d.language = defaultLanguage.code; + } + // Create a new sample dto const sample: NlpSampleCreateDto = { text: d.text, trained: false, + language: languages[d.language].id, }; // Create a new sample entity dto diff --git a/api/src/nlp/dto/nlp-sample.dto.ts b/api/src/nlp/dto/nlp-sample.dto.ts index fda56b09..3c42ab6f 100644 --- a/api/src/nlp/dto/nlp-sample.dto.ts +++ b/api/src/nlp/dto/nlp-sample.dto.ts @@ -16,27 +16,38 @@ import { IsString, } from 'class-validator'; +import { IsObjectId } from '@/utils/validation-rules/is-object-id'; + import { NlpSampleEntityValue, NlpSampleState } from '../schemas/types'; export class NlpSampleCreateDto { - @ApiProperty({ description: 'nlp sample text', type: String }) + @ApiProperty({ description: 'NLP sample text', type: String }) @IsString() @IsNotEmpty() text: string; - @ApiPropertyOptional({ description: 'nlp sample is trained', type: Boolean }) + @ApiPropertyOptional({ + description: 'If NLP sample is trained', + type: Boolean, + }) @IsBoolean() @IsOptional() trained?: boolean; @ApiPropertyOptional({ - description: 'nlp sample type', + description: 'NLP sample type', enum: Object.values(NlpSampleState), }) @IsString() @IsIn(Object.values(NlpSampleState)) @IsOptional() type?: NlpSampleState; + + @ApiProperty({ description: 'NLP sample language id', type: String }) + @IsString() + @IsNotEmpty() + @IsObjectId({ message: 'Language must be a valid ObjectId' }) + language: string; } export class NlpSampleDto extends NlpSampleCreateDto { @@ -45,6 +56,11 @@ export class NlpSampleDto extends NlpSampleCreateDto { }) @IsOptional() entities?: NlpSampleEntityValue[]; + + @ApiProperty({ description: 'NLP sample language code', type: String }) + @IsString() + @IsNotEmpty() + language: string; } export class NlpSampleUpdateDto extends PartialType(NlpSampleCreateDto) {} diff --git a/api/src/nlp/repositories/nlp-sample-entity.repository.spec.ts b/api/src/nlp/repositories/nlp-sample-entity.repository.spec.ts index 94aa10d5..772b7272 100644 --- a/api/src/nlp/repositories/nlp-sample-entity.repository.spec.ts +++ b/api/src/nlp/repositories/nlp-sample-entity.repository.spec.ts @@ -11,6 +11,8 @@ import { EventEmitter2 } from '@nestjs/event-emitter'; import { MongooseModule } from '@nestjs/mongoose'; import { Test, TestingModule } from '@nestjs/testing'; +import { LanguageRepository } from '@/i18n/repositories/language.repository'; +import { Language, LanguageModel } from '@/i18n/schemas/language.schema'; import { nlpSampleFixtures } from '@/utils/test/fixtures/nlpsample'; import { installNlpSampleEntityFixtures, @@ -37,8 +39,10 @@ import { NlpValueModel } from '../schemas/nlp-value.schema'; describe('NlpSampleEntityRepository', () => { let nlpSampleEntityRepository: NlpSampleEntityRepository; let nlpEntityRepository: NlpEntityRepository; + let languageRepository: LanguageRepository; let nlpSampleEntities: NlpSampleEntity[]; let nlpEntities: NlpEntity[]; + let languages: Language[]; beforeAll(async () => { const module: TestingModule = await Test.createTestingModule({ @@ -49,12 +53,14 @@ describe('NlpSampleEntityRepository', () => { NlpEntityModel, NlpValueModel, NlpSampleModel, + LanguageModel, ]), ], providers: [ NlpSampleEntityRepository, NlpEntityRepository, NlpValueRepository, + LanguageRepository, EventEmitter2, ], }).compile(); @@ -62,8 +68,10 @@ describe('NlpSampleEntityRepository', () => { NlpSampleEntityRepository, ); nlpEntityRepository = module.get(NlpEntityRepository); + languageRepository = module.get(LanguageRepository); nlpSampleEntities = await nlpSampleEntityRepository.findAll(); nlpEntities = await nlpEntityRepository.findAll(); + languages = await languageRepository.findAll(); }); afterAll(async () => { @@ -81,7 +89,10 @@ describe('NlpSampleEntityRepository', () => { ...nlpSampleEntityFixtures[0], entity: nlpEntities[0], value: { ...nlpValueFixtures[0], entity: nlpEntities[0].id }, - sample: nlpSampleFixtures[0], + sample: { + ...nlpSampleFixtures[0], + language: languages[nlpSampleFixtures[0].language].id, + }, }); }); }); @@ -117,7 +128,10 @@ describe('NlpSampleEntityRepository', () => { ...curr, entity: nlpEntities[curr.entity], value: nlpValueFixturesWithEntities[curr.value], - sample: nlpSampleFixtures[curr.sample], + sample: { + ...nlpSampleFixtures[curr.sample], + language: languages[nlpSampleFixtures[curr.sample].language].id, + }, }; acc.push(sampleEntityWithPopulate); return acc; diff --git a/api/src/nlp/repositories/nlp-sample.repository.spec.ts b/api/src/nlp/repositories/nlp-sample.repository.spec.ts index a8417a3a..9a99339f 100644 --- a/api/src/nlp/repositories/nlp-sample.repository.spec.ts +++ b/api/src/nlp/repositories/nlp-sample.repository.spec.ts @@ -11,6 +11,8 @@ import { EventEmitter2 } from '@nestjs/event-emitter'; import { MongooseModule } from '@nestjs/mongoose'; import { Test, TestingModule } from '@nestjs/testing'; +import { LanguageRepository } from '@/i18n/repositories/language.repository'; +import { Language, LanguageModel } from '@/i18n/schemas/language.schema'; import { nlpSampleFixtures } from '@/utils/test/fixtures/nlpsample'; import { installNlpSampleEntityFixtures } from '@/utils/test/fixtures/nlpsampleentity'; import { getPageQuery } from '@/utils/test/pagination'; @@ -30,18 +32,25 @@ import { NlpSampleModel, NlpSample } from '../schemas/nlp-sample.schema'; describe('NlpSampleRepository', () => { let nlpSampleRepository: NlpSampleRepository; let nlpSampleEntityRepository: NlpSampleEntityRepository; + let languageRepository: LanguageRepository; let nlpSampleEntity: NlpSampleEntity; let noNlpSample: NlpSample; + let languages: Language[]; beforeAll(async () => { const module: TestingModule = await Test.createTestingModule({ imports: [ rootMongooseTestModule(installNlpSampleEntityFixtures), - MongooseModule.forFeature([NlpSampleModel, NlpSampleEntityModel]), + MongooseModule.forFeature([ + NlpSampleModel, + NlpSampleEntityModel, + LanguageModel, + ]), ], providers: [ NlpSampleRepository, NlpSampleEntityRepository, + LanguageRepository, EventEmitter2, ], }).compile(); @@ -49,10 +58,12 @@ describe('NlpSampleRepository', () => { nlpSampleEntityRepository = module.get( NlpSampleEntityRepository, ); + languageRepository = module.get(LanguageRepository); noNlpSample = await nlpSampleRepository.findOne({ text: 'No' }); nlpSampleEntity = await nlpSampleEntityRepository.findOne({ sample: noNlpSample.id, }); + languages = await languageRepository.findAll(); }); afterAll(async () => { @@ -69,6 +80,7 @@ describe('NlpSampleRepository', () => { expect(result).toEqualPayload({ ...nlpSampleFixtures[1], entities: [nlpSampleEntity], + language: languages[nlpSampleFixtures[1].language], }); }); }); @@ -92,6 +104,7 @@ describe('NlpSampleRepository', () => { entities: nlpSampleEntities.filter((currSampleEntity) => { return currSampleEntity.sample === currSample.id; }), + language: languages.find((lang) => currSample.language === lang.id), }; acc.push(sampleWithEntities); return acc; diff --git a/api/src/nlp/schemas/nlp-sample.schema.ts b/api/src/nlp/schemas/nlp-sample.schema.ts index cbf4b00d..acfff99f 100644 --- a/api/src/nlp/schemas/nlp-sample.schema.ts +++ b/api/src/nlp/schemas/nlp-sample.schema.ts @@ -8,9 +8,10 @@ */ import { ModelDefinition, Prop, Schema, SchemaFactory } from '@nestjs/mongoose'; -import { Exclude, Type } from 'class-transformer'; -import { THydratedDocument } from 'mongoose'; +import { Exclude, Transform, Type } from 'class-transformer'; +import { THydratedDocument, Schema as MongooseSchema } from 'mongoose'; +import { Language } from '@/i18n/schemas/language.schema'; import { BaseSchema } from '@/utils/generics/base-schema'; import { LifecycleHookManager } from '@/utils/generics/lifecycle-hook-manager'; import { TFilterPopulateFields } from '@/utils/types/filter.types'; @@ -41,16 +42,32 @@ export class NlpSampleStub extends BaseSchema { default: NlpSampleState.train, }) type?: keyof typeof NlpSampleState; + + /** + * The language of the sample. + */ + @Prop({ + type: MongooseSchema.Types.ObjectId, + ref: 'Language', + required: false, + }) + language: unknown | null; } @Schema({ timestamps: true }) export class NlpSample extends NlpSampleStub { + @Transform(({ obj }) => obj.language.toString()) + language: string | null; + @Exclude() entities?: never; } @Schema({ timestamps: true }) export class NlpSampleFull extends NlpSampleStub { + @Type(() => Language) + language: Language | null; + @Type(() => NlpSampleEntity) entities: NlpSampleEntity[]; } @@ -75,4 +92,7 @@ export type NlpSamplePopulate = keyof TFilterPopulateFields< NlpSampleStub >; -export const NLP_SAMPLE_POPULATE: NlpSamplePopulate[] = ['entities']; +export const NLP_SAMPLE_POPULATE: NlpSamplePopulate[] = [ + 'language', + 'entities', +]; diff --git a/api/src/nlp/seeds/nlp-entity.seed-model.ts b/api/src/nlp/seeds/nlp-entity.seed-model.ts index 82826fbe..14483491 100644 --- a/api/src/nlp/seeds/nlp-entity.seed-model.ts +++ b/api/src/nlp/seeds/nlp-entity.seed-model.ts @@ -10,12 +10,6 @@ import { NlpEntityCreateDto } from '../dto/nlp-entity.dto'; export const nlpEntityModels: NlpEntityCreateDto[] = [ - { - name: 'language', - lookups: ['trait'], - doc: `"language" refers to the language of the text sent by the end user`, - builtin: true, - }, { name: 'intent', lookups: ['trait'], diff --git a/api/src/nlp/seeds/nlp-value.seed-model.ts b/api/src/nlp/seeds/nlp-value.seed-model.ts index 5309d68d..4b0ca8de 100644 --- a/api/src/nlp/seeds/nlp-value.seed-model.ts +++ b/api/src/nlp/seeds/nlp-value.seed-model.ts @@ -7,16 +7,6 @@ * 3. SaaS Restriction: This software, or any derivative of it, may not be used to offer a competing product or service (SaaS) without prior written consent from Hexastack. Offering the software as a service or using it in a commercial cloud environment without express permission is strictly prohibited. */ -import { config } from '@/config'; - import { NlpValueCreateDto } from '../dto/nlp-value.dto'; -export const nlpValueModels: NlpValueCreateDto[] = [ - ...config.chatbot.lang.available.map((lang: string) => { - return { - entity: 'language', - value: lang, - builtin: true, - }; - }), -]; +export const nlpValueModels: NlpValueCreateDto[] = []; diff --git a/api/src/nlp/services/nlp-sample-entity.service.spec.ts b/api/src/nlp/services/nlp-sample-entity.service.spec.ts index 8fa1268f..1ce4546a 100644 --- a/api/src/nlp/services/nlp-sample-entity.service.spec.ts +++ b/api/src/nlp/services/nlp-sample-entity.service.spec.ts @@ -11,6 +11,8 @@ import { EventEmitter2 } from '@nestjs/event-emitter'; import { MongooseModule } from '@nestjs/mongoose'; import { Test, TestingModule } from '@nestjs/testing'; +import { LanguageRepository } from '@/i18n/repositories/language.repository'; +import { Language, LanguageModel } from '@/i18n/schemas/language.schema'; import { nlpSampleFixtures } from '@/utils/test/fixtures/nlpsample'; import { installNlpSampleEntityFixtures, @@ -42,7 +44,9 @@ describe('NlpSampleEntityService', () => { let nlpSampleEntityRepository: NlpSampleEntityRepository; let nlpSampleEntities: NlpSampleEntity[]; let nlpEntityRepository: NlpEntityRepository; + let languageRepository: LanguageRepository; let nlpEntities: NlpEntity[]; + let languages: Language[]; let nlpEntityService: NlpEntityService; let nlpValueService: NlpValueService; @@ -55,12 +59,14 @@ describe('NlpSampleEntityService', () => { NlpEntityModel, NlpSampleModel, NlpValueModel, + LanguageModel, ]), ], providers: [ NlpSampleEntityRepository, NlpEntityRepository, NlpValueRepository, + LanguageRepository, NlpSampleEntityService, NlpEntityService, NlpValueService, @@ -74,6 +80,7 @@ describe('NlpSampleEntityService', () => { NlpSampleEntityRepository, ); nlpEntityRepository = module.get(NlpEntityRepository); + languageRepository = module.get(LanguageRepository); nlpSampleEntityService = module.get( NlpSampleEntityService, ); @@ -81,6 +88,7 @@ describe('NlpSampleEntityService', () => { nlpValueService = module.get(NlpValueService); nlpSampleEntities = await nlpSampleEntityRepository.findAll(); nlpEntities = await nlpEntityRepository.findAll(); + languages = await languageRepository.findAll(); }); afterAll(async () => { @@ -98,7 +106,10 @@ describe('NlpSampleEntityService', () => { ...nlpSampleEntityFixtures[0], entity: nlpEntities[0], value: { ...nlpValueFixtures[0], entity: nlpEntities[0].id }, - sample: nlpSampleFixtures[0], + sample: { + ...nlpSampleFixtures[0], + language: languages[nlpSampleFixtures[0].language].id, + }, }; expect(result).toEqualPayload(sampleEntityWithPopulate); }); @@ -135,7 +146,10 @@ describe('NlpSampleEntityService', () => { ...curr, entity: nlpEntities[curr.entity], value: nlpValueFixturesWithEntities[curr.value], - sample: nlpSampleFixtures[curr.sample], + sample: { + ...nlpSampleFixtures[curr.sample], + language: languages[nlpSampleFixtures[curr.sample].language].id, + }, }; acc.push(sampleEntityWithPopulate); return acc; diff --git a/api/src/nlp/services/nlp-sample.service.spec.ts b/api/src/nlp/services/nlp-sample.service.spec.ts index c9a90a21..970a933a 100644 --- a/api/src/nlp/services/nlp-sample.service.spec.ts +++ b/api/src/nlp/services/nlp-sample.service.spec.ts @@ -7,10 +7,14 @@ * 3. SaaS Restriction: This software, or any derivative of it, may not be used to offer a competing product or service (SaaS) without prior written consent from Hexastack. Offering the software as a service or using it in a commercial cloud environment without express permission is strictly prohibited. */ +import { CACHE_MANAGER } from '@nestjs/cache-manager'; import { EventEmitter2 } from '@nestjs/event-emitter'; import { MongooseModule } from '@nestjs/mongoose'; import { Test, TestingModule } from '@nestjs/testing'; +import { LanguageRepository } from '@/i18n/repositories/language.repository'; +import { Language, LanguageModel } from '@/i18n/schemas/language.schema'; +import { LanguageService } from '@/i18n/services/language.service'; import { nlpSampleFixtures } from '@/utils/test/fixtures/nlpsample'; import { installNlpSampleEntityFixtures } from '@/utils/test/fixtures/nlpsampleentity'; import { getPageQuery } from '@/utils/test/pagination'; @@ -39,8 +43,10 @@ describe('NlpSampleService', () => { let nlpSampleService: NlpSampleService; let nlpSampleEntityRepository: NlpSampleEntityRepository; let nlpSampleRepository: NlpSampleRepository; + let languageRepository: LanguageRepository; let noNlpSample: NlpSample; let nlpSampleEntity: NlpSampleEntity; + let languages: Language[]; beforeAll(async () => { const module: TestingModule = await Test.createTestingModule({ @@ -51,6 +57,7 @@ describe('NlpSampleService', () => { NlpSampleEntityModel, NlpValueModel, NlpEntityModel, + LanguageModel, ]), ], providers: [ @@ -58,11 +65,21 @@ describe('NlpSampleService', () => { NlpSampleEntityRepository, NlpEntityRepository, NlpValueRepository, + LanguageRepository, NlpSampleService, NlpSampleEntityService, NlpEntityService, NlpValueService, + LanguageService, EventEmitter2, + { + provide: CACHE_MANAGER, + useValue: { + del: jest.fn(), + get: jest.fn(), + set: jest.fn(), + }, + }, ], }).compile(); nlpSampleService = module.get(NlpSampleService); @@ -73,10 +90,12 @@ describe('NlpSampleService', () => { nlpSampleEntityRepository = module.get( NlpSampleEntityRepository, ); + languageRepository = module.get(LanguageRepository); noNlpSample = await nlpSampleService.findOne({ text: 'No' }); nlpSampleEntity = await nlpSampleEntityRepository.findOne({ sample: noNlpSample.id, }); + languages = await languageRepository.findAll(); }); afterAll(async () => { @@ -91,6 +110,7 @@ describe('NlpSampleService', () => { const sampleWithEntities = { ...nlpSampleFixtures[1], entities: [nlpSampleEntity], + language: languages[nlpSampleFixtures[1].language], }; expect(result).toEqualPayload(sampleWithEntities); }); @@ -110,6 +130,7 @@ describe('NlpSampleService', () => { entities: nlpSampleEntities.filter((currSampleEntity) => { return currSampleEntity.sample === currSample.id; }), + language: languages.find((lang) => lang.id === currSample.language), }; acc.push(sampleWithEntities); return acc; diff --git a/api/src/nlp/services/nlp-sample.service.ts b/api/src/nlp/services/nlp-sample.service.ts index 4ebb131e..04076422 100644 --- a/api/src/nlp/services/nlp-sample.service.ts +++ b/api/src/nlp/services/nlp-sample.service.ts @@ -8,6 +8,7 @@ */ import { Injectable } from '@nestjs/common'; +import { OnEvent } from '@nestjs/event-emitter'; import { CommonExample, @@ -16,6 +17,8 @@ import { ExampleEntity, LookupTable, } from '@/extensions/helpers/nlp/default/types'; +import { Language } from '@/i18n/schemas/language.schema'; +import { LanguageService } from '@/i18n/services/language.service'; import { BaseService } from '@/utils/generics/base-service'; import { NlpSampleRepository } from '../repositories/nlp-sample.repository'; @@ -33,7 +36,10 @@ export class NlpSampleService extends BaseService< NlpSamplePopulate, NlpSampleFull > { - constructor(readonly repository: NlpSampleRepository) { + constructor( + readonly repository: NlpSampleRepository, + private readonly languageService: LanguageService, + ) { super(repository); } @@ -56,10 +62,10 @@ export class NlpSampleService extends BaseService< * * @returns The formatted Rasa NLU training dataset. */ - formatRasaNlu( + async formatRasaNlu( samples: NlpSampleFull[], entities: NlpEntityFull[], - ): DatasetType { + ): Promise { const entityMap = NlpEntity.getEntityMap(entities); const valueMap = NlpValue.getValueMap( NlpValue.getValuesFromEntities(entities), @@ -88,21 +94,34 @@ export class NlpSampleService extends BaseService< }); } return res; + }) + // TODO : place language at the same level as the intent + .concat({ + entity: 'language', + value: s.language.code, }); + return { text: s.text, intent: valueMap[intent.value].value, entities: sampleEntities, }; }); - const lookup_tables: LookupTable[] = entities.map((e) => { - return { - name: e.name, - elements: e.values.map((v) => { - return v.value; - }), - }; - }); + + const languages = await this.languageService.getLanguages(); + const lookup_tables: LookupTable[] = entities + .map((e) => { + return { + name: e.name, + elements: e.values.map((v) => { + return v.value; + }), + }; + }) + .concat({ + name: 'language', + elements: Object.keys(languages), + }); const entity_synonyms = entities .reduce((acc, e) => { const synonyms = e.values.map((v) => { @@ -123,4 +142,21 @@ export class NlpSampleService extends BaseService< entity_synonyms, }; } + + /** + * When a language gets deleted, we need to set related samples to null + * + * @param language The language that has been deleted. + */ + @OnEvent('hook:language:delete') + async handleLanguageDelete(language: Language) { + await this.updateMany( + { + language: language.id, + }, + { + language: null, + }, + ); + } } diff --git a/api/src/seeder.ts b/api/src/seeder.ts index c94c20b7..19d76450 100644 --- a/api/src/seeder.ts +++ b/api/src/seeder.ts @@ -13,8 +13,10 @@ import { CategorySeeder } from './chat/seeds/category.seed'; import { categoryModels } from './chat/seeds/category.seed-model'; import { ContextVarSeeder } from './chat/seeds/context-var.seed'; import { contextVarModels } from './chat/seeds/context-var.seed-model'; -import { TranslationSeeder } from './chat/seeds/translation.seed'; -import { translationModels } from './chat/seeds/translation.seed-model'; +import { LanguageSeeder } from './i18n/seeds/language.seed'; +import { languageModels } from './i18n/seeds/language.seed-model'; +import { TranslationSeeder } from './i18n/seeds/translation.seed'; +import { translationModels } from './i18n/seeds/translation.seed-model'; import { LoggerService } from './logger/logger.service'; import { NlpEntitySeeder } from './nlp/seeds/nlp-entity.seed'; import { nlpEntityModels } from './nlp/seeds/nlp-entity.seed-model'; @@ -40,6 +42,7 @@ export async function seedDatabase(app: INestApplicationContext) { const settingSeeder = app.get(SettingSeeder); const permissionSeeder = app.get(PermissionSeeder); const userSeeder = app.get(UserSeeder); + const languageSeeder = app.get(LanguageSeeder); const translationSeeder = app.get(TranslationSeeder); const nlpEntitySeeder = app.get(NlpEntitySeeder); const nlpValueSeeder = app.get(NlpValueSeeder); @@ -127,6 +130,14 @@ export async function seedDatabase(app: INestApplicationContext) { throw e; } + // Seed languages + try { + await languageSeeder.seed(languageModels); + } catch (e) { + logger.error('Unable to seed the database with languages!'); + throw e; + } + // Seed translations try { await translationSeeder.seed(translationModels); diff --git a/api/src/setting/controllers/setting.controller.spec.ts b/api/src/setting/controllers/setting.controller.spec.ts index 6e6d0e47..57dc52b3 100644 --- a/api/src/setting/controllers/setting.controller.spec.ts +++ b/api/src/setting/controllers/setting.controller.spec.ts @@ -12,7 +12,7 @@ import { EventEmitter2 } from '@nestjs/event-emitter'; import { MongooseModule } from '@nestjs/mongoose'; import { Test } from '@nestjs/testing'; -import { ExtendedI18nService } from '@/extended-i18n.service'; +import { I18nService } from '@/i18n/services/i18n.service'; import { LoggerService } from '@/logger/logger.service'; import { installSettingFixtures, @@ -47,7 +47,7 @@ describe('SettingController', () => { LoggerService, EventEmitter2, { - provide: ExtendedI18nService, + provide: I18nService, useValue: { t: jest.fn().mockImplementation((t) => t), }, diff --git a/api/src/setting/repositories/setting.repository.ts b/api/src/setting/repositories/setting.repository.ts index d215a30a..018c9636 100644 --- a/api/src/setting/repositories/setting.repository.ts +++ b/api/src/setting/repositories/setting.repository.ts @@ -12,8 +12,7 @@ import { EventEmitter2 } from '@nestjs/event-emitter'; import { InjectModel } from '@nestjs/mongoose'; import { Document, Model, Query, Types } from 'mongoose'; -import { config } from '@/config'; -import { ExtendedI18nService } from '@/extended-i18n.service'; +import { I18nService } from '@/i18n/services/i18n.service'; import { BaseRepository } from '@/utils/generics/base-repository'; import { Setting } from '../schemas/setting.schema'; @@ -23,7 +22,7 @@ export class SettingRepository extends BaseRepository { constructor( @InjectModel(Setting.name) readonly model: Model, private readonly eventEmitter: EventEmitter2, - private readonly i18n: ExtendedI18nService, + private readonly i18n: I18nService, ) { super(model, Setting); } @@ -65,8 +64,7 @@ export class SettingRepository extends BaseRepository { * Emits an event after a `Setting` has been updated. * * This method is used to synchronize global settings by emitting an event - * based on the `group` and `label` of the `Setting`. It also updates the i18n - * default language setting when the `default_lang` label is updated. + * based on the `group` and `label` of the `Setting`. * * @param _query The Mongoose query object used to find and update the document. * @param setting The updated `Setting` object. @@ -86,33 +84,5 @@ export class SettingRepository extends BaseRepository { 'hook:settings:' + setting.group + ':' + setting.label, setting, ); - - if (setting.label === 'default_lang') { - // @todo : check if this actually updates the default lang - this.i18n.resolveLanguage(setting.value as string); - } - } - - /** - * Sets default values before creating a `Setting` document. - * - * If the setting is part of the `nlp_settings` group, it sets specific values - * for `languages` and `default_lang` labels, using configuration values from the - * chatbot settings. - * - * @param setting The `Setting` document to be created. - */ - async preCreate( - setting: Document & - Setting & { _id: Types.ObjectId }, - ) { - if (setting.group === 'nlp_settings') { - if (setting.label === 'languages') { - setting.value = config.chatbot.lang.available; - } else if (setting.label === 'default_lang') { - setting.value = config.chatbot.lang.default; - setting.options = config.chatbot.lang.available; - } - } } } diff --git a/api/src/setting/schemas/types.ts b/api/src/setting/schemas/types.ts index f13f849e..cadf2be7 100644 --- a/api/src/setting/schemas/types.ts +++ b/api/src/setting/schemas/types.ts @@ -98,8 +98,6 @@ export type SettingDict = { [group: string]: Setting[] }; export type Settings = { nlp_settings: { - default_lang: string; - languages: string[]; threshold: string; provider: string; endpoint: string; diff --git a/api/src/setting/seeds/setting.seed-model.ts b/api/src/setting/seeds/setting.seed-model.ts index 855aafa6..2f28674a 100644 --- a/api/src/setting/seeds/setting.seed-model.ts +++ b/api/src/setting/seeds/setting.seed-model.ts @@ -7,8 +7,6 @@ * 3. SaaS Restriction: This software, or any derivative of it, may not be used to offer a competing product or service (SaaS) without prior written consent from Hexastack. Offering the software as a service or using it in a commercial cloud environment without express permission is strictly prohibited. */ -import { config } from '@/config'; - import { SettingCreateDto } from '../dto/setting.dto'; import { SettingType } from '../schemas/types'; @@ -67,26 +65,6 @@ export const settingModels: SettingCreateDto[] = [ type: SettingType.text, weight: 3, }, - { - group: 'nlp_settings', - label: 'languages', - value: [], - options: [], - type: SettingType.select, - config: { - multiple: true, - allowCreate: true, - }, - weight: 4, - }, - { - group: 'nlp_settings', - label: 'default_lang', - value: config.chatbot.lang.default, - options: [], // NOTE : will be set onBeforeCreate from config - type: SettingType.select, - weight: 5, - }, { group: 'nlp_settings', label: 'threshold', @@ -97,7 +75,7 @@ export const settingModels: SettingCreateDto[] = [ max: 1, step: 0.01, }, - weight: 6, + weight: 4, }, { group: 'contact', diff --git a/api/src/setting/services/setting.service.spec.ts b/api/src/setting/services/setting.service.spec.ts index 69e4e985..0fa7c6b9 100644 --- a/api/src/setting/services/setting.service.spec.ts +++ b/api/src/setting/services/setting.service.spec.ts @@ -12,7 +12,7 @@ import { EventEmitter2 } from '@nestjs/event-emitter'; import { MongooseModule } from '@nestjs/mongoose'; import { Test } from '@nestjs/testing'; -import { ExtendedI18nService } from '@/extended-i18n.service'; +import { I18nService } from '@/i18n/services/i18n.service'; import { LoggerService } from '@/logger/logger.service'; import { installSettingFixtures, @@ -51,7 +51,7 @@ describe('SettingService', () => { SettingSeeder, EventEmitter2, { - provide: ExtendedI18nService, + provide: I18nService, useValue: { t: jest.fn().mockImplementation((t) => t), }, diff --git a/api/src/user/controllers/auth.controller.spec.ts b/api/src/user/controllers/auth.controller.spec.ts index b30a6adb..e5a13fc8 100644 --- a/api/src/user/controllers/auth.controller.spec.ts +++ b/api/src/user/controllers/auth.controller.spec.ts @@ -23,7 +23,10 @@ import { SentMessageInfo } from 'nodemailer'; import { AttachmentRepository } from '@/attachment/repositories/attachment.repository'; import { AttachmentModel } from '@/attachment/schemas/attachment.schema'; import { AttachmentService } from '@/attachment/services/attachment.service'; -import { ExtendedI18nService } from '@/extended-i18n.service'; +import { LanguageRepository } from '@/i18n/repositories/language.repository'; +import { LanguageModel } from '@/i18n/schemas/language.schema'; +import { I18nService } from '@/i18n/services/i18n.service'; +import { LanguageService } from '@/i18n/services/language.service'; import { LoggerService } from '@/logger/logger.service'; import { installUserFixtures } from '@/utils/test/fixtures/user'; import { @@ -69,6 +72,7 @@ describe('AuthController', () => { PermissionModel, InvitationModel, AttachmentModel, + LanguageModel, ]), ], providers: [ @@ -86,6 +90,8 @@ describe('AuthController', () => { PermissionRepository, InvitationRepository, InvitationService, + LanguageRepository, + LanguageService, JwtService, { provide: MailerService, @@ -106,7 +112,7 @@ describe('AuthController', () => { EventEmitter2, ValidateAccountService, { - provide: ExtendedI18nService, + provide: I18nService, useValue: { t: jest.fn().mockImplementation((t) => t), }, diff --git a/api/src/user/controllers/user.controller.spec.ts b/api/src/user/controllers/user.controller.spec.ts index 399a68dd..a674dd9f 100644 --- a/api/src/user/controllers/user.controller.spec.ts +++ b/api/src/user/controllers/user.controller.spec.ts @@ -20,7 +20,10 @@ import { SentMessageInfo } from 'nodemailer'; import { AttachmentRepository } from '@/attachment/repositories/attachment.repository'; import { AttachmentModel } from '@/attachment/schemas/attachment.schema'; import { AttachmentService } from '@/attachment/services/attachment.service'; -import { ExtendedI18nService } from '@/extended-i18n.service'; +import { LanguageRepository } from '@/i18n/repositories/language.repository'; +import { LanguageModel } from '@/i18n/schemas/language.schema'; +import { I18nService } from '@/i18n/services/i18n.service'; +import { LanguageService } from '@/i18n/services/language.service'; import { LoggerService } from '@/logger/logger.service'; import { IGNORED_TEST_FIELDS } from '@/utils/test/constants'; import { installPermissionFixtures } from '@/utils/test/fixtures/permission'; @@ -75,6 +78,7 @@ describe('UserController', () => { PermissionModel, InvitationModel, AttachmentModel, + LanguageModel, ]), JwtModule, ], @@ -108,9 +112,11 @@ describe('UserController', () => { }, AttachmentService, AttachmentRepository, + LanguageService, + LanguageRepository, ValidateAccountService, { - provide: ExtendedI18nService, + provide: I18nService, useValue: { t: jest.fn().mockImplementation((t) => t), }, diff --git a/api/src/user/repositories/permission.repository.spec.ts b/api/src/user/repositories/permission.repository.spec.ts index 112d32d5..13b7fb18 100644 --- a/api/src/user/repositories/permission.repository.spec.ts +++ b/api/src/user/repositories/permission.repository.spec.ts @@ -35,6 +35,7 @@ describe('PermissionRepository', () => { let permissionRepository: PermissionRepository; let permissionModel: Model; let permission: Permission; + let permissionToDelete: Permission; beforeAll(async () => { const module: TestingModule = await Test.createTestingModule({ @@ -59,6 +60,9 @@ describe('PermissionRepository', () => { permission = await permissionRepository.findOne({ action: Action.CREATE, }); + permissionToDelete = await permissionRepository.findOne({ + action: Action.UPDATE, + }); }); afterAll(async () => { @@ -112,4 +116,36 @@ describe('PermissionRepository', () => { expect(result).toEqualPayload(permissionsWithRolesAndModels); }); }); + + describe('deleteOne', () => { + it('should delete a permission by id', async () => { + jest.spyOn(permissionModel, 'deleteOne'); + const result = await permissionRepository.deleteOne( + permissionToDelete.id, + ); + + expect(permissionModel.deleteOne).toHaveBeenCalledWith({ + _id: permissionToDelete.id, + }); + + expect(result).toEqual({ + acknowledged: true, + deletedCount: 1, + }); + + const permissions = await permissionRepository.find({ + role: permissionToDelete.id, + }); + expect(permissions.length).toEqual(0); + }); + + it('should fail to delete a permission that does not exist', async () => { + expect( + await permissionRepository.deleteOne(permissionToDelete.id), + ).toEqual({ + acknowledged: true, + deletedCount: 0, + }); + }); + }); }); diff --git a/api/src/user/repositories/role.repository.spec.ts b/api/src/user/repositories/role.repository.spec.ts index 932dc1b7..d63334c5 100644 --- a/api/src/user/repositories/role.repository.spec.ts +++ b/api/src/user/repositories/role.repository.spec.ts @@ -24,8 +24,8 @@ import { PermissionRepository } from '../repositories/permission.repository'; import { RoleRepository } from '../repositories/role.repository'; import { UserRepository } from '../repositories/user.repository'; import { PermissionModel } from '../schemas/permission.schema'; -import { RoleModel, Role } from '../schemas/role.schema'; -import { UserModel, User } from '../schemas/user.schema'; +import { Role, RoleModel } from '../schemas/role.schema'; +import { User, UserModel } from '../schemas/user.schema'; describe('RoleRepository', () => { let roleRepository: RoleRepository; @@ -34,6 +34,7 @@ describe('RoleRepository', () => { let roleModel: Model; let role: Role; let users: User[]; + let roleToDelete: Role; beforeAll(async () => { const module: TestingModule = await Test.createTestingModule({ @@ -57,6 +58,9 @@ describe('RoleRepository', () => { users = (await userRepository.findAll()).filter((user) => user.roles.includes(role.id), ); + roleToDelete = await roleRepository.findOne({ + name: 'manager', + }); }); afterAll(async () => { @@ -106,4 +110,31 @@ describe('RoleRepository', () => { expect(result).toEqualPayload(rolesWithPermissionsAndUsers); }); }); + + describe('deleteOne', () => { + it('should delete a role by id', async () => { + jest.spyOn(roleModel, 'deleteOne'); + const result = await roleRepository.deleteOne(roleToDelete.id); + + expect(roleModel.deleteOne).toHaveBeenCalledWith({ + _id: roleToDelete.id, + }); + expect(result).toEqual({ + acknowledged: true, + deletedCount: 1, + }); + + const permissions = await permissionRepository.find({ + role: roleToDelete.id, + }); + expect(permissions.length).toEqual(0); + }); + + it('should fail to delete a role that does not exist', async () => { + expect(await roleRepository.deleteOne(roleToDelete.id)).toEqual({ + acknowledged: true, + deletedCount: 0, + }); + }); + }); }); diff --git a/api/src/user/repositories/role.repository.ts b/api/src/user/repositories/role.repository.ts index d4336cb8..0c05b812 100644 --- a/api/src/user/repositories/role.repository.ts +++ b/api/src/user/repositories/role.repository.ts @@ -96,7 +96,7 @@ export class RoleRepository extends BaseRepository< * * @returns The result of the delete operation. */ - async deleteOneQuery(id: string) { + async deleteOne(id: string) { const result = await this.model.deleteOne({ _id: id }).exec(); if (result.deletedCount > 0) { await this.permissionModel.deleteMany({ role: id }); diff --git a/api/src/user/seeds/model.seed-model.ts b/api/src/user/seeds/model.seed-model.ts index 076f27ea..17b6d371 100644 --- a/api/src/user/seeds/model.seed-model.ts +++ b/api/src/user/seeds/model.seed-model.ts @@ -100,6 +100,11 @@ export const modelModels: ModelCreateDto[] = [ identity: 'subscriber', attributes: {}, }, + { + name: 'Language', + identity: 'language', + attributes: {}, + }, { name: 'Translation', identity: 'translation', diff --git a/api/src/user/services/invitation.service.spec.ts b/api/src/user/services/invitation.service.spec.ts index 82216aee..bb5e7ac0 100644 --- a/api/src/user/services/invitation.service.spec.ts +++ b/api/src/user/services/invitation.service.spec.ts @@ -16,7 +16,10 @@ import { Test, TestingModule } from '@nestjs/testing'; import { ISendMailOptions, MailerService } from '@nestjs-modules/mailer'; import { SentMessageInfo } from 'nodemailer'; -import { ExtendedI18nService } from '@/extended-i18n.service'; +import { LanguageRepository } from '@/i18n/repositories/language.repository'; +import { LanguageModel } from '@/i18n/schemas/language.schema'; +import { I18nService } from '@/i18n/services/i18n.service'; +import { LanguageService } from '@/i18n/services/language.service'; import { LoggerService } from '@/logger/logger.service'; import { IGNORED_TEST_FIELDS } from '@/utils/test/constants'; import { @@ -55,6 +58,7 @@ describe('InvitationService', () => { RoleModel, PermissionModel, InvitationModel, + LanguageModel, ]), JwtModule, ], @@ -66,10 +70,12 @@ describe('InvitationService', () => { PermissionRepository, InvitationRepository, InvitationService, + LanguageRepository, + LanguageService, JwtService, Logger, { - provide: ExtendedI18nService, + provide: I18nService, useValue: { t: jest.fn().mockImplementation((t) => t), }, diff --git a/api/src/user/services/invitation.service.ts b/api/src/user/services/invitation.service.ts index f6a921c6..a5f928d6 100644 --- a/api/src/user/services/invitation.service.ts +++ b/api/src/user/services/invitation.service.ts @@ -17,7 +17,8 @@ import { JwtService, JwtSignOptions } from '@nestjs/jwt'; import { MailerService } from '@nestjs-modules/mailer'; import { config } from '@/config'; -import { ExtendedI18nService } from '@/extended-i18n.service'; +import { I18nService } from '@/i18n/services/i18n.service'; +import { LanguageService } from '@/i18n/services/language.service'; import { LoggerService } from '@/logger/logger.service'; import { BaseService } from '@/utils/generics/base-service'; @@ -41,7 +42,8 @@ export class InvitationService extends BaseService< @Inject(JwtService) private readonly jwtService: JwtService, @Optional() private readonly mailerService: MailerService | undefined, private logger: LoggerService, - protected readonly i18n: ExtendedI18nService, + protected readonly i18n: I18nService, + public readonly languageService: LanguageService, ) { super(repository); } @@ -63,6 +65,7 @@ export class InvitationService extends BaseService< const jwt = await this.sign(dto); if (this.mailerService) { try { + const defaultLanguage = await this.languageService.getDefaultLanguage(); await this.mailerService.sendMail({ to: dto.email, template: 'invitation.mjml', @@ -70,7 +73,7 @@ export class InvitationService extends BaseService< token: jwt, // TODO: Which language should we use? t: (key: string) => - this.i18n.t(key, { lang: config.chatbot.lang.default }), + this.i18n.t(key, { lang: defaultLanguage.code }), }, subject: this.i18n.t('invitation_subject'), }); diff --git a/api/src/user/services/passwordReset.service.spec.ts b/api/src/user/services/passwordReset.service.spec.ts index c05b43b0..e2c84479 100644 --- a/api/src/user/services/passwordReset.service.spec.ts +++ b/api/src/user/services/passwordReset.service.spec.ts @@ -21,7 +21,10 @@ import { SentMessageInfo } from 'nodemailer'; import { AttachmentRepository } from '@/attachment/repositories/attachment.repository'; import { AttachmentModel } from '@/attachment/schemas/attachment.schema'; import { AttachmentService } from '@/attachment/services/attachment.service'; -import { ExtendedI18nService } from '@/extended-i18n.service'; +import { LanguageRepository } from '@/i18n/repositories/language.repository'; +import { LanguageModel } from '@/i18n/schemas/language.schema'; +import { I18nService } from '@/i18n/services/i18n.service'; +import { LanguageService } from '@/i18n/services/language.service'; import { LoggerService } from '@/logger/logger.service'; import { installUserFixtures, users } from '@/utils/test/fixtures/user'; import { @@ -52,6 +55,7 @@ describe('PasswordResetService', () => { RoleModel, PermissionModel, AttachmentModel, + LanguageModel, ]), JwtModule, ], @@ -62,6 +66,8 @@ describe('PasswordResetService', () => { AttachmentService, AttachmentRepository, RoleRepository, + LanguageService, + LanguageRepository, LoggerService, PasswordResetService, JwtService, @@ -75,7 +81,7 @@ describe('PasswordResetService', () => { }, }, { - provide: ExtendedI18nService, + provide: I18nService, useValue: { t: jest.fn().mockImplementation((t) => t), }, diff --git a/api/src/user/services/passwordReset.service.ts b/api/src/user/services/passwordReset.service.ts index 60143f82..f3ded11b 100644 --- a/api/src/user/services/passwordReset.service.ts +++ b/api/src/user/services/passwordReset.service.ts @@ -21,7 +21,8 @@ import { MailerService } from '@nestjs-modules/mailer'; import { compareSync } from 'bcryptjs'; import { config } from '@/config'; -import { ExtendedI18nService } from '@/extended-i18n.service'; +import { I18nService } from '@/i18n/services/i18n.service'; +import { LanguageService } from '@/i18n/services/language.service'; import { LoggerService } from '@/logger/logger.service'; import { UserService } from './user.service'; @@ -34,7 +35,8 @@ export class PasswordResetService { @Optional() private readonly mailerService: MailerService | undefined, private logger: LoggerService, private readonly userService: UserService, - public readonly i18n: ExtendedI18nService, + public readonly i18n: I18nService, + public readonly languageService: LanguageService, ) {} public readonly jwtSignOptions: JwtSignOptions = { @@ -59,6 +61,7 @@ export class PasswordResetService { if (this.mailerService) { try { + const defaultLanguage = await this.languageService.getDefaultLanguage(); await this.mailerService.sendMail({ to: dto.email, template: 'password_reset.mjml', @@ -66,7 +69,7 @@ export class PasswordResetService { token: jwt, first_name: user.first_name, t: (key: string) => - this.i18n.t(key, { lang: config.chatbot.lang.default }), + this.i18n.t(key, { lang: defaultLanguage.code }), }, subject: this.i18n.t('password_reset_subject'), }); diff --git a/api/src/user/services/validate-account.service.spec.ts b/api/src/user/services/validate-account.service.spec.ts index 701d5fe8..5d72f831 100644 --- a/api/src/user/services/validate-account.service.spec.ts +++ b/api/src/user/services/validate-account.service.spec.ts @@ -7,6 +7,7 @@ * 3. SaaS Restriction: This software, or any derivative of it, may not be used to offer a competing product or service (SaaS) without prior written consent from Hexastack. Offering the software as a service or using it in a commercial cloud environment without express permission is strictly prohibited. */ +import { CACHE_MANAGER } from '@nestjs/cache-manager'; import { EventEmitter2 } from '@nestjs/event-emitter'; import { JwtModule } from '@nestjs/jwt'; import { MongooseModule } from '@nestjs/mongoose'; @@ -17,7 +18,10 @@ import { SentMessageInfo } from 'nodemailer'; import { AttachmentRepository } from '@/attachment/repositories/attachment.repository'; import { AttachmentModel } from '@/attachment/schemas/attachment.schema'; import { AttachmentService } from '@/attachment/services/attachment.service'; -import { ExtendedI18nService } from '@/extended-i18n.service'; +import { LanguageRepository } from '@/i18n/repositories/language.repository'; +import { LanguageModel } from '@/i18n/schemas/language.schema'; +import { I18nService } from '@/i18n/services/i18n.service'; +import { LanguageService } from '@/i18n/services/language.service'; import { LoggerService } from '@/logger/logger.service'; import { installUserFixtures, users } from '@/utils/test/fixtures/user'; import { @@ -46,6 +50,7 @@ describe('ValidateAccountService', () => { RoleModel, PermissionModel, AttachmentModel, + LanguageModel, ]), JwtModule, ], @@ -56,6 +61,8 @@ describe('ValidateAccountService', () => { UserRepository, RoleService, RoleRepository, + LanguageService, + LanguageRepository, LoggerService, { provide: MailerService, @@ -69,11 +76,19 @@ describe('ValidateAccountService', () => { EventEmitter2, ValidateAccountService, { - provide: ExtendedI18nService, + provide: I18nService, useValue: { t: jest.fn().mockImplementation((t) => t), }, }, + { + provide: CACHE_MANAGER, + useValue: { + del: jest.fn(), + get: jest.fn(), + set: jest.fn(), + }, + }, ], }).compile(); validateAccountService = module.get( diff --git a/api/src/user/services/validate-account.service.ts b/api/src/user/services/validate-account.service.ts index 4c2d06c6..ccaa0dcd 100644 --- a/api/src/user/services/validate-account.service.ts +++ b/api/src/user/services/validate-account.service.ts @@ -18,7 +18,9 @@ import { JwtService, JwtSignOptions } from '@nestjs/jwt'; import { MailerService } from '@nestjs-modules/mailer'; import { config } from '@/config'; -import { ExtendedI18nService } from '@/extended-i18n.service'; +import { I18nService } from '@/i18n/services/i18n.service'; +import { LanguageService } from '@/i18n/services/language.service'; +import { LoggerService } from '@/logger/logger.service'; import { UserService } from './user.service'; import { UserCreateDto } from '../dto/user.dto'; @@ -35,7 +37,9 @@ export class ValidateAccountService { @Inject(JwtService) private readonly jwtService: JwtService, private readonly userService: UserService, @Optional() private readonly mailerService: MailerService | undefined, - private readonly i18n: ExtendedI18nService, + private logger: LoggerService, + private readonly i18n: I18nService, + private readonly languageService: LanguageService, ) {} /** @@ -73,17 +77,28 @@ export class ValidateAccountService { const confirmationToken = await this.sign({ email: dto.email }); if (this.mailerService) { - await this.mailerService.sendMail({ - to: dto.email, - template: 'account_confirmation.mjml', - context: { - token: confirmationToken, - first_name: dto.first_name, - t: (key: string) => - this.i18n.t(key, { lang: config.chatbot.lang.default }), - }, - subject: this.i18n.t('account_confirmation_subject'), - }); + try { + const defaultLanguage = await this.languageService.getDefaultLanguage(); + await this.mailerService.sendMail({ + to: dto.email, + template: 'account_confirmation.mjml', + context: { + token: confirmationToken, + first_name: dto.first_name, + t: (key: string) => + this.i18n.t(key, { lang: defaultLanguage.code }), + }, + subject: this.i18n.t('account_confirmation_subject'), + }); + } catch (e) { + this.logger.error( + 'Could not send email', + e.message, + e.stack, + 'ValidateAccount', + ); + throw new InternalServerErrorException('Could not send email'); + } } } diff --git a/api/src/user/types/model.type.ts b/api/src/user/types/model.type.ts index 55b62b6b..ae45463f 100644 --- a/api/src/user/types/model.type.ts +++ b/api/src/user/types/model.type.ts @@ -26,6 +26,7 @@ export type TModel = | 'conversation' | 'message' | 'subscriber' + | 'language' | 'translation' | 'botstats' | 'menu' diff --git a/api/src/utils/constants/cache.ts b/api/src/utils/constants/cache.ts index 966a7ce5..c22f4a9a 100644 --- a/api/src/utils/constants/cache.ts +++ b/api/src/utils/constants/cache.ts @@ -13,3 +13,7 @@ export const SETTING_CACHE_KEY = 'settings'; export const PERMISSION_CACHE_KEY = 'permissions'; export const MENU_CACHE_KEY = 'menu'; + +export const LANGUAGES_CACHE_KEY = 'languages'; + +export const DEFAULT_LANGUAGE_CACHE_KEY = 'default_language'; diff --git a/api/src/utils/helpers/URL.ts b/api/src/utils/helpers/URL.ts new file mode 100644 index 00000000..54016606 --- /dev/null +++ b/api/src/utils/helpers/URL.ts @@ -0,0 +1,9 @@ +export const buildURL = (baseUrl: string, relativePath: string): string => { + try { + const url = new URL(relativePath, baseUrl); + + return url.toString(); + } catch { + throw new Error(`Invalid base URL: ${baseUrl}`); + } +}; diff --git a/api/src/utils/pipes/search-filter.pipe.ts b/api/src/utils/pipes/search-filter.pipe.ts index d26750f1..b944338f 100644 --- a/api/src/utils/pipes/search-filter.pipe.ts +++ b/api/src/utils/pipes/search-filter.pipe.ts @@ -13,6 +13,7 @@ import { ArgumentMetadata, Logger, } from '@nestjs/common'; +import escapeRegExp from 'lodash/escapeRegExp'; import { TFilterQuery, Types } from 'mongoose'; import { @@ -36,9 +37,8 @@ export class SearchFilterPipe } private getRegexValue(val: string) { - const quote = (str: string) => - str.replace(/([.?*+^$[\]\\(){}|-])/g, '\\$1'); - return new RegExp(quote(val), 'i'); + const escapedRegExp = escapeRegExp(val); + return new RegExp(escapedRegExp, 'i'); } private isAllowedField(field: string) { diff --git a/api/src/utils/test/fixtures/language.ts b/api/src/utils/test/fixtures/language.ts new file mode 100644 index 00000000..18dbc107 --- /dev/null +++ b/api/src/utils/test/fixtures/language.ts @@ -0,0 +1,33 @@ +/* + * Copyright © 2024 Hexastack. All rights reserved. + * + * Licensed under the GNU Affero General Public License v3.0 (AGPLv3) with the following additional terms: + * 1. The name "Hexabot" is a trademark of Hexastack. You may not use this name in derivative works without express written permission. + * 2. All derivative works must include clear attribution to the original creator and software, Hexastack and Hexabot, in a prominent location (e.g., in the software's "About" section, documentation, and README file). + * 3. SaaS Restriction: This software, or any derivative of it, may not be used to offer a competing product or service (SaaS) without prior written consent from Hexastack. Offering the software as a service or using it in a commercial cloud environment without express permission is strictly prohibited. + */ + +import mongoose from 'mongoose'; + +import { LanguageUpdateDto } from '@/i18n/dto/language.dto'; +import { LanguageModel } from '@/i18n/schemas/language.schema'; + +export const languageFixtures: LanguageUpdateDto[] = [ + { + title: 'English', + code: 'en', + isDefault: true, + isRTL: false, + }, + { + title: 'Français', + code: 'fr', + isDefault: false, + isRTL: false, + }, +]; + +export const installLanguageFixtures = async () => { + const Language = mongoose.model(LanguageModel.name, LanguageModel.schema); + return await Language.insertMany(languageFixtures); +}; diff --git a/api/src/utils/test/fixtures/nlpentity.ts b/api/src/utils/test/fixtures/nlpentity.ts index 410ea055..466b3000 100644 --- a/api/src/utils/test/fixtures/nlpentity.ts +++ b/api/src/utils/test/fixtures/nlpentity.ts @@ -25,12 +25,6 @@ export const nlpEntityFixtures: NlpEntityCreateDto[] = [ doc: '', builtin: false, }, - { - name: 'language', - lookups: ['trait'], - doc: '', - builtin: false, - }, { name: 'built_in', lookups: ['trait'], diff --git a/api/src/utils/test/fixtures/nlpsample.ts b/api/src/utils/test/fixtures/nlpsample.ts index 87e83ebc..3ad79c6b 100644 --- a/api/src/utils/test/fixtures/nlpsample.ts +++ b/api/src/utils/test/fixtures/nlpsample.ts @@ -13,23 +13,28 @@ import { NlpSampleCreateDto } from '@/nlp/dto/nlp-sample.dto'; import { NlpSampleModel, NlpSample } from '@/nlp/schemas/nlp-sample.schema'; import { NlpSampleState } from '@/nlp/schemas/types'; +import { installLanguageFixtures } from './language'; import { getFixturesWithDefaultValues } from '../defaultValues'; import { TFixturesDefaultValues } from '../types'; const nlpSamples: NlpSampleCreateDto[] = [ { text: 'yess', + language: '0', }, { text: 'No', + language: '0', }, { text: 'Hello', trained: true, + language: '0', }, { text: 'Bye Jhon', trained: true, + language: '0', }, ]; @@ -44,6 +49,15 @@ export const nlpSampleFixtures = getFixturesWithDefaultValues({ }); export const installNlpSampleFixtures = async () => { + const languages = await installLanguageFixtures(); + const NlpSample = mongoose.model(NlpSampleModel.name, NlpSampleModel.schema); - return await NlpSample.insertMany(nlpSampleFixtures); + return await NlpSample.insertMany( + nlpSampleFixtures.map((v) => { + return { + ...v, + language: languages[parseInt(v.language)].id, + }; + }), + ); }; diff --git a/api/src/utils/test/fixtures/nlpvalue.ts b/api/src/utils/test/fixtures/nlpvalue.ts index ca5a05ed..a887f383 100644 --- a/api/src/utils/test/fixtures/nlpvalue.ts +++ b/api/src/utils/test/fixtures/nlpvalue.ts @@ -45,12 +45,6 @@ export const nlpValueFixtures: NlpValueCreateDto[] = [ expressions: ['bye', 'bye bye'], builtin: true, }, - { - entity: '2', - value: 'en', - expressions: [], - builtin: true, - }, ]; export const installNlpValueFixtures = async () => { diff --git a/api/src/utils/test/fixtures/translation.ts b/api/src/utils/test/fixtures/translation.ts index 35c2f8d5..3e9989b8 100644 --- a/api/src/utils/test/fixtures/translation.ts +++ b/api/src/utils/test/fixtures/translation.ts @@ -9,8 +9,8 @@ import mongoose from 'mongoose'; -import { TranslationUpdateDto } from '@/chat/dto/translation.dto'; -import { TranslationModel } from '@/chat/schemas/translation.schema'; +import { TranslationUpdateDto } from '@/i18n/dto/translation.dto'; +import { TranslationModel } from '@/i18n/schemas/translation.schema'; export const translationFixtures: TranslationUpdateDto[] = [ { diff --git a/docker/.env.example b/docker/.env.example index 8e4f837e..a78c294c 100644 --- a/docker/.env.example +++ b/docker/.env.example @@ -45,7 +45,8 @@ AUTH_TOKEN=token123 LANGUAGE_CLASSIFIER=language-classifier INTENT_CLASSIFIERS=en,fr TFLC_REPO_ID=Hexastack/tflc -JISF_REPO_ID=Hexastack/jisf +INTENT_CLASSIFIER_REPO_ID=Hexastack/intent-classifier +SLOT_FILLER_REPO_ID=Hexastack/slot-filler NLP_PORT=5000 # Frontend (Next.js) diff --git a/docker/docker-compose.nlu.dev.yml b/docker/docker-compose.nlu.dev.yml index b828f4a6..c5768bff 100644 --- a/docker/docker-compose.nlu.dev.yml +++ b/docker/docker-compose.nlu.dev.yml @@ -5,5 +5,6 @@ services: build: context: ../nlu dockerfile: Dockerfile + pull_policy: build ports: - ${NLP_PORT}:5000 diff --git a/docker/docker-compose.prod.yml b/docker/docker-compose.prod.yml index 1ef6237f..549512dd 100644 --- a/docker/docker-compose.prod.yml +++ b/docker/docker-compose.prod.yml @@ -1,5 +1 @@ version: "3.8" - -widget: - build: - target: production diff --git a/frontend/src/app-components/tables/columns/getColumns.tsx b/frontend/src/app-components/tables/columns/getColumns.tsx index 52d9b028..a4127f4f 100644 --- a/frontend/src/app-components/tables/columns/getColumns.tsx +++ b/frontend/src/app-components/tables/columns/getColumns.tsx @@ -7,6 +7,7 @@ * 3. SaaS Restriction: This software, or any derivative of it, may not be used to offer a competing product or service (SaaS) without prior written consent from Hexastack. Offering the software as a service or using it in a commercial cloud environment without express permission is strictly prohibited. */ +import { CheckCircle } from "@mui/icons-material"; import AdminPanelSettingsIcon from "@mui/icons-material/AdminPanelSettingsOutlined"; import DeleteIcon from "@mui/icons-material/DeleteOutlined"; import EditIcon from "@mui/icons-material/EditOutlined"; @@ -39,12 +40,16 @@ export enum ActionColumnLabel { Content = "button.content", Fields = "button.fields", Manage_Labels = "title.manage_labels", + Toggle = "button.toggle", } export interface ActionColumn { label: ActionColumnLabel; action?: (row: T) => void; requires?: PermissionAction[]; + getState?: (row: T) => boolean; + helperText?: string; + isDisabled?: (row: T) => boolean; } const BUTTON_WIDTH = 60; @@ -70,6 +75,8 @@ function getIcon(label: ActionColumnLabel) { return ; case ActionColumnLabel.Manage_Labels: return ; + case ActionColumnLabel.Toggle: + return ; default: return <>; } @@ -78,7 +85,7 @@ function getIcon(label: ActionColumnLabel) { function getColor(label: ActionColumnLabel) { switch (label) { case ActionColumnLabel.Edit: - return theme.palette.warning.main; + return theme.palette.grey[900]; case ActionColumnLabel.Delete: return theme.palette.error.main; default: @@ -97,29 +104,46 @@ function StackComponent({ return ( - {actions.map(({ label, action, requires = [] }) => ( - {getIcon(label)}} - label={t(label)} - showInMenu={false} - sx={{ - color: "grey", - "&:hover": { - color: getColor(label), - }, - }} - disabled={ - params.row.builtin && - (requires.includes(PermissionAction.UPDATE) || - requires.includes(PermissionAction.DELETE)) - } - onClick={() => { - action && action(params.row); - }} - /> - ))} + {actions.map( + ({ + label, + action, + requires = [], + getState, + helperText, + isDisabled, + }) => ( + {getIcon(label)} + } + label={helperText || t(label)} + showInMenu={false} + sx={{ + color: + label === ActionColumnLabel.Toggle && + getState && + getState(params.row) + ? getColor(label) + : theme.palette.grey[600], + "&:hover": { + color: getColor(label), + }, + }} + disabled={ + (isDisabled && isDisabled(params.row)) || + (params.row.builtin && + (requires.includes(PermissionAction.UPDATE) || + requires.includes(PermissionAction.DELETE))) + } + onClick={() => { + action && action(params.row); + }} + /> + ), + )} ); } diff --git a/frontend/src/components/Menu/MenuDialog.tsx b/frontend/src/components/Menu/MenuDialog.tsx index 936a23bc..e0144922 100644 --- a/frontend/src/components/Menu/MenuDialog.tsx +++ b/frontend/src/components/Menu/MenuDialog.tsx @@ -14,7 +14,6 @@ import { DialogProps, MenuItem, } from "@mui/material"; -import { isAbsoluteUrl } from "next/dist/shared/lib/utils"; import { useEffect, FC } from "react"; import { useForm, Controller } from "react-hook-form"; import { useTranslation } from "react-i18next"; @@ -26,6 +25,7 @@ import { ContentItem } from "@/app-components/dialogs/layouts/ContentItem"; import { Input } from "@/app-components/inputs/Input"; import { ToggleableInput } from "@/app-components/inputs/ToggleableInput"; import { IMenuItem, IMenuItemAttributes, MenuType } from "@/types/menu.types"; +import { isAbsoluteUrl } from "@/utils/URL"; export type MenuDialogProps = DialogProps & { open: boolean; diff --git a/frontend/src/components/languages/LanguageDialog.tsx b/frontend/src/components/languages/LanguageDialog.tsx new file mode 100644 index 00000000..73e77d74 --- /dev/null +++ b/frontend/src/components/languages/LanguageDialog.tsx @@ -0,0 +1,151 @@ +/* + * Copyright © 2024 Hexastack. All rights reserved. + * + * Licensed under the GNU Affero General Public License v3.0 (AGPLv3) with the following additional terms: + * 1. The name "Hexabot" is a trademark of Hexastack. You may not use this name in derivative works without express written permission. + * 2. All derivative works must include clear attribution to the original creator and software, Hexastack and Hexabot, in a prominent location (e.g., in the software's "About" section, documentation, and README file). + * 3. SaaS Restriction: This software, or any derivative of it, may not be used to offer a competing product or service (SaaS) without prior written consent from Hexastack. Offering the software as a service or using it in a commercial cloud environment without express permission is strictly prohibited. + */ + +import { + Dialog, + DialogActions, + DialogContent, + FormControlLabel, + Switch, +} from "@mui/material"; +import { FC, useEffect } from "react"; +import { Controller, useForm } from "react-hook-form"; +import { useTranslation } from "react-i18next"; + +import DialogButtons from "@/app-components/buttons/DialogButtons"; +import { DialogTitle } from "@/app-components/dialogs/DialogTitle"; +import { ContentContainer } from "@/app-components/dialogs/layouts/ContentContainer"; +import { ContentItem } from "@/app-components/dialogs/layouts/ContentItem"; +import { Input } from "@/app-components/inputs/Input"; +import { useCreate } from "@/hooks/crud/useCreate"; +import { useUpdate } from "@/hooks/crud/useUpdate"; +import { DialogControlProps } from "@/hooks/useDialog"; +import { useToast } from "@/hooks/useToast"; +import { EntityType } from "@/services/types"; +import { ILanguage, ILanguageAttributes } from "@/types/language.types"; + +export type LanguageDialogProps = DialogControlProps; +export const LanguageDialog: FC = ({ + open, + data, + closeDialog, + ...rest +}) => { + const { t } = useTranslation(); + const { toast } = useToast(); + const { mutateAsync: createLanguage } = useCreate(EntityType.LANGUAGE, { + onError: () => { + toast.error(t("message.internal_server_error")); + }, + onSuccess() { + closeDialog(); + toast.success(t("message.success_save")); + }, + }); + const { mutateAsync: updateLanguage } = useUpdate(EntityType.LANGUAGE, { + onError: () => { + toast.error(t("message.internal_server_error")); + }, + onSuccess() { + closeDialog(); + toast.success(t("message.success_save")); + }, + }); + const { + reset, + register, + formState: { errors }, + handleSubmit, + control, + } = useForm({ + defaultValues: { + title: data?.title || "", + code: data?.code || "", + isRTL: data?.isRTL || false, + }, + }); + const validationRules = { + title: { + required: t("message.title_is_required"), + }, + code: { + required: t("message.code_is_required"), + }, + }; + const onSubmitForm = async (params: ILanguageAttributes) => { + if (data) { + updateLanguage({ id: data.id, params }); + } else { + createLanguage(params); + } + }; + + useEffect(() => { + if (open) reset(); + }, [open, reset]); + + useEffect(() => { + if (data) { + reset({ + title: data.title, + code: data.code, + isRTL: data.isRTL, + }); + } else { + reset(); + } + }, [data, reset]); + + return ( +

+
+ + {data ? t("title.edit_label") : t("title.new_label")} + + + + + + + + + + + ( + } + label={t("label.is_rtl")} + /> + )} + /> + + + + + + +
+
+ ); +}; diff --git a/frontend/src/components/languages/index.tsx b/frontend/src/components/languages/index.tsx new file mode 100644 index 00000000..47fc4aec --- /dev/null +++ b/frontend/src/components/languages/index.tsx @@ -0,0 +1,223 @@ +/* + * Copyright © 2024 Hexastack. All rights reserved. + * + * Licensed under the GNU Affero General Public License v3.0 (AGPLv3) with the following additional terms: + * 1. The name "Hexabot" is a trademark of Hexastack. You may not use this name in derivative works without express written permission. + * 2. All derivative works must include clear attribution to the original creator and software, Hexastack and Hexabot, in a prominent location (e.g., in the software's "About" section, documentation, and README file). + * 3. SaaS Restriction: This software, or any derivative of it, may not be used to offer a competing product or service (SaaS) without prior written consent from Hexastack. Offering the software as a service or using it in a commercial cloud environment without express permission is strictly prohibited. + */ + +import { Flag } from "@mui/icons-material"; +import AddIcon from "@mui/icons-material/Add"; +import { Button, Grid, Paper } from "@mui/material"; +import { GridColDef } from "@mui/x-data-grid"; +import { useTranslation } from "react-i18next"; +import { useQueryClient } from "react-query"; + +import { DeleteDialog } from "@/app-components/dialogs/DeleteDialog"; +import { FilterTextfield } from "@/app-components/inputs/FilterTextfield"; +import { + ActionColumnLabel, + useActionColumns, +} from "@/app-components/tables/columns/getColumns"; +import { renderHeader } from "@/app-components/tables/columns/renderHeader"; +import { DataGrid } from "@/app-components/tables/DataGrid"; +import { isSameEntity } from "@/hooks/crud/helpers"; +import { useDelete } from "@/hooks/crud/useDelete"; +import { useFind } from "@/hooks/crud/useFind"; +import { useUpdate } from "@/hooks/crud/useUpdate"; +import { getDisplayDialogs, useDialog } from "@/hooks/useDialog"; +import { useHasPermission } from "@/hooks/useHasPermission"; +import { useSearch } from "@/hooks/useSearch"; +import { useToast } from "@/hooks/useToast"; +import { PageHeader } from "@/layout/content/PageHeader"; +import { EntityType } from "@/services/types"; +import { ILanguage } from "@/types/language.types"; +import { PermissionAction } from "@/types/permission.types"; +import { getDateTimeFormatter } from "@/utils/date"; + +import { LanguageDialog } from "./LanguageDialog"; + +export const Languages = () => { + const { t } = useTranslation(); + const { toast } = useToast(); + const addDialogCtl = useDialog(false); + const editDialogCtl = useDialog(false); + const deleteDialogCtl = useDialog(false); + const queryClient = useQueryClient(); + const hasPermission = useHasPermission(); + const { onSearch, searchPayload } = useSearch({ + $or: ["title", "code"], + }); + const { dataGridProps, refetch } = useFind( + { entity: EntityType.LANGUAGE }, + { + params: searchPayload, + }, + ); + const { mutateAsync: updateLanguage } = useUpdate(EntityType.LANGUAGE, { + onError: () => { + toast.error(t("message.internal_server_error")); + }, + onSuccess() { + refetch(); + toast.success(t("message.success_save")); + }, + }); + const { mutateAsync: deleteLanguage } = useDelete(EntityType.LANGUAGE, { + onError: () => { + toast.error(t("message.internal_server_error")); + }, + onSuccess() { + queryClient.removeQueries({ + predicate: ({ queryKey }) => { + const [_qType, qEntity] = queryKey; + + return isSameEntity(qEntity, EntityType.NLP_SAMPLE); + }, + }); + deleteDialogCtl.closeDialog(); + toast.success(t("message.item_delete_success")); + }, + }); + const toggleDefault = (row: ILanguage) => { + if (!row.isDefault) { + updateLanguage({ + id: row.id, + params: { + isDefault: true, + }, + }); + } + }; + const actionColumns = useActionColumns( + EntityType.LANGUAGE, + [ + { + label: ActionColumnLabel.Toggle, + action: (row) => toggleDefault(row), + requires: [PermissionAction.UPDATE], + getState: (row) => row.isDefault, + helperText: t("button.mark_as_default"), + }, + { + label: ActionColumnLabel.Edit, + action: (row) => editDialogCtl.openDialog(row), + requires: [PermissionAction.UPDATE], + }, + { + label: ActionColumnLabel.Delete, + action: (row) => deleteDialogCtl.openDialog(row.id), + requires: [PermissionAction.DELETE], + isDisabled: (row) => row.isDefault, + }, + ], + t("label.operations"), + ); + const columns: GridColDef[] = [ + { field: "id", headerName: "ID" }, + { + flex: 2, + field: "title", + headerName: t("label.title"), + disableColumnMenu: true, + renderHeader, + headerAlign: "left", + }, + { + flex: 1, + field: "code", + headerName: t("label.code"), + disableColumnMenu: true, + renderHeader, + headerAlign: "left", + }, + { + flex: 1, + field: "isDefault", + headerName: t("label.is_default"), + disableColumnMenu: true, + renderHeader, + headerAlign: "left", + valueGetter: (value) => (value ? t("label.yes") : t("label.no")), + }, + { + flex: 1, + field: "isRTL", + headerName: t("label.is_rtl"), + disableColumnMenu: true, + renderHeader, + headerAlign: "left", + valueGetter: (value) => (value ? t("label.yes") : t("label.no")), + }, + { + minWidth: 140, + field: "createdAt", + headerName: t("label.createdAt"), + disableColumnMenu: true, + renderHeader, + resizable: false, + headerAlign: "left", + valueGetter: (params) => + t("datetime.created_at", getDateTimeFormatter(params)), + }, + { + minWidth: 140, + field: "updatedAt", + headerName: t("label.updatedAt"), + disableColumnMenu: true, + renderHeader, + resizable: false, + headerAlign: "left", + valueGetter: (params) => + t("datetime.updated_at", getDateTimeFormatter(params)), + }, + actionColumns, + ]; + + return ( + + + + { + if (deleteDialogCtl?.data) deleteLanguage(deleteDialogCtl.data); + }} + /> + + + + + + {hasPermission(EntityType.LANGUAGE, PermissionAction.CREATE) ? ( + + + + ) : null} + + + + + + + + + + + ); +}; diff --git a/frontend/src/components/nlp/NlpImportDialog.tsx b/frontend/src/components/nlp/NlpImportDialog.tsx index f6289b19..d62fc400 100644 --- a/frontend/src/components/nlp/NlpImportDialog.tsx +++ b/frontend/src/components/nlp/NlpImportDialog.tsx @@ -17,6 +17,7 @@ import AttachmentInput from "@/app-components/attachment/AttachmentInput"; import { DialogTitle } from "@/app-components/dialogs/DialogTitle"; import { ContentContainer } from "@/app-components/dialogs/layouts/ContentContainer"; import { ContentItem } from "@/app-components/dialogs/layouts/ContentItem"; +import { isSameEntity } from "@/hooks/crud/helpers"; import { useApiClient } from "@/hooks/useApiClient"; import { DialogControlProps } from "@/hooks/useDialog"; import { useToast } from "@/hooks/useToast"; @@ -40,11 +41,19 @@ export const NlpImportDialog: FC = ({ attachmentId && (await apiClient.importNlpSamples(attachmentId)); }, onSuccess: () => { - queryClient.removeQueries([ - QueryType.collection, - EntityType.NLP_SAMPLE, - ]); + queryClient.removeQueries({ + predicate: ({ queryKey }) => { + const [qType, qEntity] = queryKey; + return ( + ((qType === QueryType.count || qType === QueryType.collection) && + isSameEntity(qEntity, EntityType.NLP_SAMPLE)) || + isSameEntity(qEntity, EntityType.NLP_SAMPLE_ENTITY) || + isSameEntity(qEntity, EntityType.NLP_ENTITY) || + isSameEntity(qEntity, EntityType.NLP_VALUE) + ); + }, + }); handleCloseDialog(); toast.success(t("message.success_save")); }, diff --git a/frontend/src/components/nlp/NlpSampleDialog.tsx b/frontend/src/components/nlp/NlpSampleDialog.tsx index aee45a4f..2236ca36 100644 --- a/frontend/src/components/nlp/NlpSampleDialog.tsx +++ b/frontend/src/components/nlp/NlpSampleDialog.tsx @@ -17,14 +17,14 @@ import { DialogControlProps } from "@/hooks/useDialog"; import { useToast } from "@/hooks/useToast"; import { EntityType } from "@/services/types"; import { + INlpDatasetSample, INlpDatasetSampleAttributes, INlpSampleFormAttributes, - INlpSampleFull, } from "@/types/nlp-sample.types"; import NlpDatasetSample from "./components/NlpTrainForm"; -export type NlpSampleDialogProps = DialogControlProps; +export type NlpSampleDialogProps = DialogControlProps; export const NlpSampleDialog: FC = ({ open, data: sample, @@ -44,15 +44,16 @@ export const NlpSampleDialog: FC = ({ toast.success(t("message.success_save")); }, }); - const onSubmitForm = (params: INlpSampleFormAttributes) => { + const onSubmitForm = (form: INlpSampleFormAttributes) => { if (sample?.id) { updateSample( { id: sample.id, params: { - text: params.text, - type: params.type, - entities: [...params.keywordEntities, ...params.traitEntities], + text: form.text, + type: form.type, + entities: [...form.keywordEntities, ...form.traitEntities], + language: form.language, }, }, { diff --git a/frontend/src/components/nlp/NlpValueDialog.tsx b/frontend/src/components/nlp/NlpValueDialog.tsx index 9eb0341e..2524805e 100644 --- a/frontend/src/components/nlp/NlpValueDialog.tsx +++ b/frontend/src/components/nlp/NlpValueDialog.tsx @@ -20,10 +20,11 @@ import { ContentItem } from "@/app-components/dialogs/layouts/ContentItem"; import { Input } from "@/app-components/inputs/Input"; import MultipleInput from "@/app-components/inputs/MultipleInput"; import { useCreate } from "@/hooks/crud/useCreate"; +import { useGet } from "@/hooks/crud/useGet"; import { useUpdate } from "@/hooks/crud/useUpdate"; import { DialogControlProps } from "@/hooks/useDialog"; import { useToast } from "@/hooks/useToast"; -import { EntityType } from "@/services/types"; +import { EntityType, Format } from "@/services/types"; import { INlpValue, INlpValueAttributes } from "@/types/nlp-value.types"; export type TNlpValueAttributesWithRequiredExpressions = INlpValueAttributes & { @@ -44,11 +45,16 @@ export const NlpValueDialog: FC = ({ const { t } = useTranslation(); const { toast } = useToast(); const { query } = useRouter(); + const { refetch: refetchEntity } = useGet(data?.entity || String(query.id), { + entity: EntityType.NLP_ENTITY, + format: Format.FULL, + }); const { mutateAsync: createNlpValue } = useCreate(EntityType.NLP_VALUE, { onError: () => { toast.error(t("message.internal_server_error")); }, onSuccess(data) { + refetchEntity(); closeDialog(); toast.success(t("message.success_save")); callback?.(data); diff --git a/frontend/src/components/nlp/components/NlpSample.tsx b/frontend/src/components/nlp/components/NlpSample.tsx index b98e9ae3..4d1a8852 100644 --- a/frontend/src/components/nlp/components/NlpSample.tsx +++ b/frontend/src/components/nlp/components/NlpSample.tsx @@ -19,6 +19,7 @@ import { Grid, IconButton, MenuItem, + Stack, } from "@mui/material"; import { GridColDef } from "@mui/x-data-grid"; import { useState } from "react"; @@ -26,6 +27,7 @@ import { useTranslation } from "react-i18next"; import { DeleteDialog } from "@/app-components/dialogs"; import { ChipEntity } from "@/app-components/displays/ChipEntity"; +import AutoCompleteEntitySelect from "@/app-components/inputs/AutoCompleteEntitySelect"; import { FilterTextfield } from "@/app-components/inputs/FilterTextfield"; import { Input } from "@/app-components/inputs/Input"; import { @@ -43,9 +45,10 @@ import { useHasPermission } from "@/hooks/useHasPermission"; import { useSearch } from "@/hooks/useSearch"; import { useToast } from "@/hooks/useToast"; import { EntityType, Format } from "@/services/types"; +import { ILanguage } from "@/types/language.types"; import { + INlpDatasetSample, INlpSample, - INlpSampleFull, NlpSampleType, } from "@/types/nlp-sample.types"; import { INlpSampleEntity } from "@/types/nlp-sample_entity.types"; @@ -66,12 +69,17 @@ export default function NlpSample() { const { apiUrl } = useConfig(); const { toast } = useToast(); const { t } = useTranslation(); - const [dataset, setDataSet] = useState(""); + const [type, setType] = useState(undefined); + const [language, setLanguage] = useState(undefined); const hasPermission = useHasPermission(); const getNlpEntityFromCache = useGetFromCache(EntityType.NLP_ENTITY); const getNlpValueFromCache = useGetFromCache(EntityType.NLP_VALUE); + const getSampleEntityFromCache = useGetFromCache( + EntityType.NLP_SAMPLE_ENTITY, + ); + const getLanguageFromCache = useGetFromCache(EntityType.LANGUAGE); const { onSearch, searchPayload } = useSearch({ - $eq: dataset === "" ? [] : [{ type: dataset as NlpSampleType }], + $eq: [...(type ? [{ type }] : []), ...(language ? [{ language }] : [])], $iLike: ["text"], }); const { mutateAsync: deleteNlpSample } = useDelete(EntityType.NLP_SAMPLE, { @@ -90,21 +98,30 @@ export default function NlpSample() { }, ); const deleteDialogCtl = useDialog(false); - const editDialogCtl = useDialog(false); + const editDialogCtl = useDialog(false); const importDialogCtl = useDialog(false); - const actionColumns = getActionsColumn( + const actionColumns = getActionsColumn( [ { label: ActionColumnLabel.Edit, - action: ({ entities, ...rest }) => { - const data: INlpSampleFull = { + action: ({ entities, language, ...rest }) => { + const data: INlpDatasetSample = { ...rest, - entities: entities?.map(({ end, start, value, entity }) => ({ - end, - start, - value: getNlpValueFromCache(value)?.value, - entity: getNlpEntityFromCache(entity)?.name, - })) as unknown as INlpSampleEntity[], + entities: entities?.map((e) => { + const sampleEntity = getSampleEntityFromCache(e); + const { end, start, value, entity } = + sampleEntity as INlpSampleEntity; + + return { + end, + start, + value: getNlpValueFromCache(value)?.value || "", + entity: getNlpEntityFromCache(entity)?.name || "", + }; + }), + language: language + ? (getLanguageFromCache(language) as ILanguage).code + : null, }; editDialogCtl.openDialog(data); @@ -119,7 +136,7 @@ export default function NlpSample() { ], t("label.operations"), ); - const columns: GridColDef[] = [ + const columns: GridColDef[] = [ { flex: 1, field: "text", @@ -131,39 +148,56 @@ export default function NlpSample() { { flex: 1, field: "entities", - renderCell: ({ row }) => - row.entities.map((entity) => ( - ( - ( + + {row.entities + .map((e) => getSampleEntityFromCache(e) as INlpSampleEntity) + .filter((e) => !!e) + .map((entity) => ( + - {value} - {` `}={` `} - - - } + field="name" + render={(value) => ( + + {value} + {` `}={` `} + + + } + /> + )} + entity={EntityType.NLP_ENTITY} /> - )} - entity={EntityType.NLP_ENTITY} - /> - )), + ))} + + ), headerName: t("label.entities"), sortable: false, disableColumnMenu: true, renderHeader, }, + { + maxWidth: 90, + field: "language", + renderCell: ({ row }) => { + return row.language ? getLanguageFromCache(row.language)?.title : ""; + }, + headerName: t("label.language"), + sortable: true, + disableColumnMenu: true, + renderHeader, + }, { maxWidth: 90, field: "type", @@ -232,18 +266,33 @@ export default function NlpSample() { fullWidth={false} sx={{ minWidth: "256px" }} /> + + fullWidth={false} + sx={{ + minWidth: "150px", + }} + autoFocus + searchFields={["title", "code"]} + entity={EntityType.LANGUAGE} + format={Format.BASIC} + labelKey="title" + label={t("label.language")} + multiple={false} + onChange={(_e, selected) => setLanguage(selected?.id)} + /> setDataSet(e.target.value)} + value={type} + onChange={(e) => setType(e.target.value as NlpSampleType)} SelectProps={{ - ...(dataset !== "" && { + ...(type && { IconComponent: () => ( - setDataSet("")}> + setType(undefined)}> ), @@ -288,7 +337,7 @@ export default function NlpSample() { variant="contained" href={buildURL( apiUrl, - `nlpsample/export${dataset ? `?type=${dataset}` : ""}`, + `nlpsample/export${type ? `?type=${type}` : ""}`, )} startIcon={} > diff --git a/frontend/src/components/nlp/components/NlpTrainForm.tsx b/frontend/src/components/nlp/components/NlpTrainForm.tsx index e43a86d4..edf991cd 100644 --- a/frontend/src/components/nlp/components/NlpTrainForm.tsx +++ b/frontend/src/components/nlp/components/NlpTrainForm.tsx @@ -23,7 +23,7 @@ import { RadioGroup, Typography, } from "@mui/material"; -import { FC, useCallback, useMemo, useState } from "react"; +import { FC, useCallback, useEffect, useMemo, useState } from "react"; import { Controller, useFieldArray, useForm } from "react-hook-form"; import { useTranslation } from "react-i18next"; import { useQuery } from "react-query"; @@ -36,18 +36,19 @@ import { useFind } from "@/hooks/crud/useFind"; import { useGetFromCache } from "@/hooks/crud/useGet"; import { useApiClient } from "@/hooks/useApiClient"; import { EntityType, Format } from "@/services/types"; +import { ILanguage } from "@/types/language.types"; import { INlpEntity } from "@/types/nlp-entity.types"; import { INlpDatasetKeywordEntity, + INlpDatasetSample, INlpDatasetTraitEntity, INlpSampleFormAttributes, - INlpSampleFull, NlpSampleType, } from "@/types/nlp-sample.types"; import { INlpValue } from "@/types/nlp-value.types"; type NlpDatasetSampleProps = { - sample?: INlpSampleFull; + sample?: INlpDatasetSample; submitForm: (params: INlpSampleFormAttributes) => void; }; @@ -64,68 +65,40 @@ const NlpDatasetSample: FC = ({ { hasCount: false, }, - { - onSuccess(entities) { - // By default append trait entities - if (!sample) { - removeTraitEntity(); - (entities || []) - .filter(({ lookups }) => lookups.includes("trait")) - .forEach(({ name }) => { - appendTraitEntity({ - entity: name, - value: "", - }); - }); - } - }, - }, ); const getNlpValueFromCache = useGetFromCache(EntityType.NLP_VALUE); - // Default trait entities to append to the form - const defaultTraitEntities = useMemo(() => { - if (!sample || !entities) return []; - - const traitEntities = entities.filter(({ lookups }) => - lookups.includes("trait"), - ); - const sampleTraitEntities = sample.entities.filter( - (e) => typeof e.start === "undefined", - ); - - if (sampleTraitEntities.length === traitEntities.length) { - return sampleTraitEntities; - } - - const sampleEntityNames = new Set(sampleTraitEntities.map((e) => e.entity)); - const missingEntities = traitEntities - .filter(({ name }) => !sampleEntityNames.has(name)) - .map(({ name }) => ({ - entity: name, - value: "", - })); - - return [...sampleTraitEntities, ...missingEntities]; - }, [entities, sample]); + // eslint-disable-next-line react-hooks/exhaustive-deps + const defaultValues: INlpSampleFormAttributes = useMemo( + () => ({ + type: sample?.type || NlpSampleType.train, + text: sample?.text || "", + language: sample?.language || null, + traitEntities: (entities || []) + .filter(({ lookups }) => { + return lookups.includes("trait"); + }) + .map((e) => { + return { + entity: e.name, + value: sample + ? sample.entities.find(({ entity }) => entity === e.name)?.value + : "", + } as INlpDatasetTraitEntity; + }), + keywordEntities: (sample?.entities || []).filter( + (e) => "start" in e && typeof e.start === "number", + ) as INlpDatasetKeywordEntity[], + }), + [sample, entities], + ); const { handleSubmit, control, register, reset, setValue, watch } = useForm({ - defaultValues: { - type: sample?.type || NlpSampleType.train, - text: sample?.text || "", - traitEntities: defaultTraitEntities, - keywordEntities: - sample?.entities.filter((e) => typeof e.start === "number") || [], - }, + defaultValues, }); const currentText = watch("text"); const currentType = watch("type"); const { apiClient } = useApiClient(); - const { - fields: traitEntities, - append: appendTraitEntity, - update: updateTraitEntity, - remove: removeTraitEntity, - } = useFieldArray({ + const { fields: traitEntities, update: updateTraitEntity } = useFieldArray({ control, name: "traitEntities", }); @@ -153,12 +126,16 @@ const NlpDatasetSample: FC = ({ }, onSuccess: (result) => { const traitEntities: INlpDatasetTraitEntity[] = result.entities.filter( - (e) => !("start" in e && "end" in e), + (e) => !("start" in e && "end" in e) && e.entity !== "language", ); const keywordEntities = result.entities.filter( (e) => "start" in e && "end" in e, ) as INlpDatasetKeywordEntity[]; + const language = result.entities.find( + ({ entity }) => entity === "language", + ); + setValue("language", language?.value || ""); setValue("traitEntities", traitEntities); setValue("keywordEntities", keywordEntities); }, @@ -167,7 +144,7 @@ const NlpDatasetSample: FC = ({ const findInsertIndex = (newItem: INlpDatasetKeywordEntity): number => { const index = keywordEntities.findIndex( - (entity) => entity.start > newItem.start, + (entity) => entity.start && newItem.start && entity.start > newItem.start, ); return index === -1 ? keywordEntities.length : index; @@ -177,14 +154,20 @@ const NlpDatasetSample: FC = ({ start: number; end: number; } | null>(null); - const onSubmitForm = (params: INlpSampleFormAttributes) => { - submitForm(params); - reset(); - removeTraitEntity(); - removeKeywordEntity(); + const onSubmitForm = (form: INlpSampleFormAttributes) => { + submitForm(form); refetchEntities(); + reset({ + ...defaultValues, + text: "", + }); }; + useEffect(() => { + reset(defaultValues); + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [JSON.stringify(defaultValues)]); + return (
@@ -247,6 +230,39 @@ const NlpDatasetSample: FC = ({ /> + + { + const { onChange, ...rest } = field; + + return ( + + fullWidth={true} + autoFocus + searchFields={["title", "code"]} + entity={EntityType.LANGUAGE} + format={Format.BASIC} + labelKey="title" + idKey="code" + label={t("label.language")} + multiple={false} + {...field} + onChange={(_e, selected) => { + onChange(selected?.code); + }} + {...rest} + /> + ); + }} + /> + {traitEntities.map((traitEntity, index) => ( { return ( - {/* */} - diff --git a/frontend/src/components/nlp/index.tsx b/frontend/src/components/nlp/index.tsx index 70654e73..506c08ab 100644 --- a/frontend/src/components/nlp/index.tsx +++ b/frontend/src/components/nlp/index.tsx @@ -81,6 +81,7 @@ export const Nlp = ({ text: params.text, type: params.type, entities: [...params.traitEntities, ...params.keywordEntities], + language: params.language, }); }; diff --git a/frontend/src/components/translations/EditTranslationDialog.tsx b/frontend/src/components/translations/EditTranslationDialog.tsx index d26cb55a..20bfab81 100644 --- a/frontend/src/components/translations/EditTranslationDialog.tsx +++ b/frontend/src/components/translations/EditTranslationDialog.tsx @@ -7,8 +7,14 @@ * 3. SaaS Restriction: This software, or any derivative of it, may not be used to offer a competing product or service (SaaS) without prior written consent from Hexastack. Offering the software as a service or using it in a commercial cloud environment without express permission is strictly prohibited. */ -import { Dialog, DialogActions, DialogContent } from "@mui/material"; -import { useEffect, FC, useMemo } from "react"; +import { + Dialog, + DialogActions, + DialogContent, + FormLabel, + Typography, +} from "@mui/material"; +import { FC, useEffect } from "react"; import { Controller, useForm } from "react-hook-form"; import { useTranslation } from "react-i18next"; @@ -16,15 +22,15 @@ import DialogButtons from "@/app-components/buttons/DialogButtons"; import { DialogTitle } from "@/app-components/dialogs/DialogTitle"; import { ContentContainer } from "@/app-components/dialogs/layouts/ContentContainer"; import { ContentItem } from "@/app-components/dialogs/layouts/ContentItem"; +import { useFind } from "@/hooks/crud/useFind"; import { useUpdate } from "@/hooks/crud/useUpdate"; import { DialogControlProps } from "@/hooks/useDialog"; -import { useSetting } from "@/hooks/useSetting"; import { useToast } from "@/hooks/useToast"; import { EntityType } from "@/services/types"; import { + ITranslation, ITranslationAttributes, ITranslations, - ITranslation, } from "@/types/translation.types"; import TranslationInput from "./TranslationInput"; @@ -36,10 +42,14 @@ export const EditTranslationDialog: FC = ({ closeDialog, ...rest }) => { + const { data: languages } = useFind( + { entity: EntityType.LANGUAGE }, + { + hasCount: false, + }, + ); const { t } = useTranslation(); const { toast } = useToast(); - const availableLanguages = useSetting("nlp_settings", "languages"); - const defaultLanguage = useSetting("nlp_settings", "default_lang"); const { mutateAsync: updateTranslation } = useUpdate(EntityType.TRANSLATION, { onError: () => { toast.error(t("message.internal_server_error")); @@ -49,29 +59,16 @@ export const EditTranslationDialog: FC = ({ toast.success(t("message.success_save")); }, }); - const defaultValues: ITranslation | undefined = useMemo( - () => - data - ? { - ...data, - translations: { - ...data?.translations, - [defaultLanguage]: data?.str, - }, - } - : undefined, - [defaultLanguage, data], - ); const { reset, control, handleSubmit } = useForm({ - defaultValues, + defaultValues: data, }); const onSubmitForm = async (params: ITranslationAttributes) => { if (data?.id) updateTranslation({ id: data.id, params }); }; useEffect(() => { - if (open) reset(defaultValues); - }, [open, reset, defaultValues]); + if (open) reset(data); + }, [open, reset, data]); return ( @@ -80,21 +77,26 @@ export const EditTranslationDialog: FC = ({ {t("title.update_translation")} + + {t("label.original_text")} + {data?.str} + - {availableLanguages?.map((language: string) => ( - - ( - - )} - /> - - ))} + {languages + .filter(({ isDefault }) => !isDefault) + .map((language) => ( + + ( + + )} + /> + + ))} diff --git a/frontend/src/components/translations/TranslationInput.tsx b/frontend/src/components/translations/TranslationInput.tsx index 73db4a1e..abad2781 100644 --- a/frontend/src/components/translations/TranslationInput.tsx +++ b/frontend/src/components/translations/TranslationInput.tsx @@ -7,24 +7,16 @@ * 3. SaaS Restriction: This software, or any derivative of it, may not be used to offer a competing product or service (SaaS) without prior written consent from Hexastack. Offering the software as a service or using it in a commercial cloud environment without express permission is strictly prohibited. */ -import CheckIcon from "@mui/icons-material/Check"; -import CloseIcon from "@mui/icons-material/Close"; import { Grid } from "@mui/material"; import React from "react"; import { ControllerRenderProps } from "react-hook-form"; import { Input } from "@/app-components/inputs/Input"; -import { - ITranslationAttributes, - ITranslations, -} from "@/types/translation.types"; - -const isRTL = (language: string) => { - return ["AR"].includes(language.toUpperCase()); -}; +import { ILanguage } from "@/types/language.types"; +import { ITranslationAttributes } from "@/types/translation.types"; interface RenderTranslationInputProps { - language: keyof ITranslations; + language: ILanguage; field: ControllerRenderProps; } @@ -34,14 +26,14 @@ const TranslationInput: React.FC = ({ }) => ( - {language.toUpperCase()} - {field.value ? : } + {language.title} } multiline={true} + minRows={3} {...field} /> ); diff --git a/frontend/src/components/translations/index.tsx b/frontend/src/components/translations/index.tsx index a078b5ef..7f44a621 100644 --- a/frontend/src/components/translations/index.tsx +++ b/frontend/src/components/translations/index.tsx @@ -9,7 +9,7 @@ import { faLanguage } from "@fortawesome/free-solid-svg-icons"; import AutorenewIcon from "@mui/icons-material/Autorenew"; -import { Button, Chip, Grid, Paper } from "@mui/material"; +import { Button, Chip, Grid, Paper, Stack } from "@mui/material"; import { GridColDef } from "@mui/x-data-grid"; import { useTranslation } from "react-i18next"; @@ -25,10 +25,10 @@ import { useFind } from "@/hooks/crud/useFind"; import { useRefreshTranslations } from "@/hooks/entities/translation-hooks"; import { getDisplayDialogs, useDialog } from "@/hooks/useDialog"; import { useSearch } from "@/hooks/useSearch"; -import { useSetting } from "@/hooks/useSetting"; import { useToast } from "@/hooks/useToast"; import { PageHeader } from "@/layout/content/PageHeader"; import { EntityType } from "@/services/types"; +import { ILanguage } from "@/types/language.types"; import { PermissionAction } from "@/types/permission.types"; import { ITranslation } from "@/types/translation.types"; import { getDateTimeFormatter } from "@/utils/date"; @@ -38,7 +38,12 @@ import { EditTranslationDialog } from "./EditTranslationDialog"; export const Translations = () => { const { t } = useTranslation(); const { toast } = useToast(); - const availableLanguages = useSetting("nlp_settings", "languages"); + const { data: languages } = useFind( + { entity: EntityType.LANGUAGE }, + { + hasCount: false, + }, + ); const editDialogCtl = useDialog(false); const deleteDialogCtl = useDialog(false); const { onSearch, searchPayload } = useSearch({ @@ -64,8 +69,8 @@ export const Translations = () => { onError: () => { toast.error(t("message.internal_server_error")); }, - onSuccess: (data) => { - if (data.acknowledged && data.deletedCount > 0) refreshTranslations(); + onSuccess: () => { + refreshTranslations(); toast.success(t("message.success_translation_refresh")); }, }); @@ -92,22 +97,23 @@ export const Translations = () => { field: "translations", headerName: t("label.translations"), sortable: false, - renderCell: (params) => - availableLanguages.map((language: string) => ( - - )), - }, - { - maxWidth: 127, - field: "translated", - resizable: false, - headerName: t("label.translated"), + renderCell: (params) => ( + + {languages + .filter(({ isDefault }) => !isDefault) + .map((language: ILanguage) => ( + + ))} + + ), }, { maxWidth: 140, @@ -167,7 +173,6 @@ export const Translations = () => { deleteTranslation(deleteDialogCtl.data); }} /> - diff --git a/frontend/src/components/visual-editor/form/inputs/triggers/PatternInput.tsx b/frontend/src/components/visual-editor/form/inputs/triggers/PatternInput.tsx index 38eb1ea2..1b11e2e5 100644 --- a/frontend/src/components/visual-editor/form/inputs/triggers/PatternInput.tsx +++ b/frontend/src/components/visual-editor/form/inputs/triggers/PatternInput.tsx @@ -15,6 +15,7 @@ import { useTranslation } from "react-i18next"; import AutoCompleteEntitySelect from "@/app-components/inputs/AutoCompleteEntitySelect"; import { Input } from "@/app-components/inputs/Input"; import { RegexInput } from "@/app-components/inputs/RegexInput"; +import { useGetFromCache } from "@/hooks/crud/useGet"; import { EntityType, Format } from "@/services/types"; import { IBlockAttributes, @@ -25,7 +26,8 @@ import { PayloadPattern, } from "@/types/block.types"; import { IMenuItem } from "@/types/menu.types"; -import { INlpValueFull } from "@/types/nlp-value.types"; +import { INlpEntity } from "@/types/nlp-entity.types"; +import { INlpValue } from "@/types/nlp-value.types"; import { ContentPostbackInput } from "./ContentPostbackInput"; import { PostbackInput } from "./PostbackInput"; @@ -64,6 +66,7 @@ const PatternInput: FC = ({ value, onChange, idx }) => { register, formState: { errors }, } = useFormContext(); + const getNlpEntityFromCache = useGetFromCache(EntityType.NLP_ENTITY); const [pattern, setPattern] = useState(value); const [patternType, setPatternType] = useState(getType(value)); const types = [ @@ -140,7 +143,7 @@ const PatternInput: FC = ({ value, onChange, idx }) => { {patternType === "nlp" ? ( - + value={(pattern as NlpPattern[]).map((v) => "value" in v && v.value ? v.value : v.entity, )} @@ -153,25 +156,31 @@ const PatternInput: FC = ({ value, onChange, idx }) => { multiple={true} onChange={(_e, data) => { setPattern( - data.map((d) => - d.value === "any" + data.map((d) => { + const entity = getNlpEntityFromCache(d.entity) as INlpEntity; + + return d.value === "any" ? { match: "entity", - entity: d.entity.name, + entity: entity.name, } : { match: "value", - entity: d.entity.name, + entity: entity.name, value: d.value, - }, - ), + }; + }), ); }} getOptionLabel={(option) => { - return `${option.entity.name}=${option.value}`; + const entity = getNlpEntityFromCache(option.entity) as INlpEntity; + + return `${entity.name}=${option.value}`; }} groupBy={(option) => { - return option.entity.name; + const entity = getNlpEntityFromCache(option.entity) as INlpEntity; + + return entity.name; }} renderGroup={(params) => (
  • @@ -188,23 +197,25 @@ const PatternInput: FC = ({ value, onChange, idx }) => { )} preprocess={(options) => { return options.reduce((acc, curr) => { - if (curr.entity.lookups.includes("keywords")) { + const entity = getNlpEntityFromCache(curr.entity) as INlpEntity; + + if (entity.lookups.includes("keywords")) { const exists = acc.find( - ({ value, id }) => value === "any" && id === curr.entity.id, + ({ value, id }) => value === "any" && id === entity.id, ); if (!exists) { acc.push({ - entity: curr.entity, - id: curr.entity.id, + entity: entity.id, + id: entity.id, value: "any", - } as INlpValueFull); + } as INlpValue); } } acc.push(curr); return acc; - }, [] as INlpValueFull[]); + }, [] as INlpValue[]); }} /> ) : null} diff --git a/frontend/src/components/visual-editor/v2/Diagrams.tsx b/frontend/src/components/visual-editor/v2/Diagrams.tsx index 37f29fc1..431a4085 100644 --- a/frontend/src/components/visual-editor/v2/Diagrams.tsx +++ b/frontend/src/components/visual-editor/v2/Diagrams.tsx @@ -22,7 +22,6 @@ import { Tab, Tabs, Tooltip, - debounce, tabsClasses, } from "@mui/material"; import { @@ -32,7 +31,13 @@ import { DiagramModel, DiagramModelGenerics, } from "@projectstorm/react-diagrams"; -import { SyntheticEvent, useEffect, useRef, useState } from "react"; +import { + SyntheticEvent, + useCallback, + useEffect, + useRef, + useState, +} from "react"; import { useTranslation } from "react-i18next"; import { DeleteDialog } from "@/app-components/dialogs"; @@ -41,6 +46,7 @@ import { useDelete, useDeleteFromCache } from "@/hooks/crud/useDelete"; import { useFind } from "@/hooks/crud/useFind"; import { useGetFromCache } from "@/hooks/crud/useGet"; import { useUpdate, useUpdateCache } from "@/hooks/crud/useUpdate"; +import useDebouncedUpdate from "@/hooks/useDebouncedUpdate"; import { getDisplayDialogs, useDialog } from "@/hooks/useDialog"; import { useSearch } from "@/hooks/useSearch"; import { EntityType, Format } from "@/services/types"; @@ -108,29 +114,36 @@ const Diagrams = () => { const { mutateAsync: updateBlock } = useUpdate(EntityType.BLOCK, { invalidate: false, }); - const debouncedZoomEvent = debounce((event) => { - if (selectedCategoryId) { - engine?.repaintCanvas(); - updateCategory({ - id: selectedCategoryId, - params: { - zoom: event.zoom, - }, - }); - } - event.stopPropagation(); - }, 200); - const debouncedOffsetEvent = debounce((event) => { - if (selectedCategoryId) { - updateCategory({ - id: selectedCategoryId, - params: { - offset: [event.offsetX, event.offsetY], - }, - }); - } - event.stopPropagation(); - }, 200); + const debouncedUpdateCategory = useDebouncedUpdate(updateCategory, 300); + const debouncedZoomEvent = useCallback( + (event: any) => { + if (selectedCategoryId) { + engine?.repaintCanvas(); + debouncedUpdateCategory({ + id: selectedCategoryId, + params: { + zoom: event.zoom, + }, + }); + } + event.stopPropagation(); + }, + [selectedCategoryId, engine, debouncedUpdateCategory], + ); + const debouncedOffsetEvent = useCallback( + (event: any) => { + if (selectedCategoryId) { + debouncedUpdateCategory({ + id: selectedCategoryId, + params: { + offset: [event.offsetX, event.offsetY], + }, + }); + } + event.stopPropagation(); + }, + [selectedCategoryId, debouncedUpdateCategory], + ); const getBlockFromCache = useGetFromCache(EntityType.BLOCK); const updateCachedBlock = useUpdateCache(EntityType.BLOCK); const deleteCachedBlock = useDeleteFromCache(EntityType.BLOCK); diff --git a/frontend/src/hooks/useDebouncedUpdate.tsx b/frontend/src/hooks/useDebouncedUpdate.tsx new file mode 100644 index 00000000..cb36ed9d --- /dev/null +++ b/frontend/src/hooks/useDebouncedUpdate.tsx @@ -0,0 +1,54 @@ +/* + * Copyright © 2024 Hexastack. All rights reserved. + * + * Licensed under the GNU Affero General Public License v3.0 (AGPLv3) with the following additional terms: + * 1. The name "Hexabot" is a trademark of Hexastack. You may not use this name in derivative works without express written permission. + * 2. All derivative works must include clear attribution to the original creator and software, Hexastack and Hexabot, in a prominent location (e.g., in the software's "About" section, documentation, and README file). + * 3. SaaS Restriction: This software, or any derivative of it, may not be used to offer a competing product or service (SaaS) without prior written consent from Hexastack. Offering the software as a service or using it in a commercial cloud environment without express permission is strictly prohibited. + */ + +import { debounce } from "@mui/material"; +import { useCallback, useEffect, useRef } from "react"; + +type DebouncedUpdateParams = { + id: string; + params: Record; +}; + +function useDebouncedUpdate( + apiUpdate: (params: DebouncedUpdateParams) => void, + delay: number = 300, +) { + const accumulatedUpdates = useRef(null); + const processUpdates = useRef( + debounce(() => { + if (accumulatedUpdates.current) { + apiUpdate(accumulatedUpdates.current); + accumulatedUpdates.current = null; + } + }, delay), + ).current; + const handleUpdate = useCallback( + (params: DebouncedUpdateParams) => { + accumulatedUpdates.current = { + id: params.id, + params: { + ...(accumulatedUpdates.current?.params || {}), + ...params.params, + }, + }; + processUpdates(); + }, + [processUpdates], + ); + + useEffect(() => { + return () => { + processUpdates.clear(); + }; + }, [processUpdates]); + + return handleUpdate; +} + +export default useDebouncedUpdate; diff --git a/frontend/src/hooks/usePagination.ts b/frontend/src/hooks/usePagination.ts index d1795ad0..93e5cbfb 100644 --- a/frontend/src/hooks/usePagination.ts +++ b/frontend/src/hooks/usePagination.ts @@ -34,7 +34,7 @@ export const usePagination = ( rowCount: number = -1, initialPaginationState: GridPaginationModel = { page: 0, - pageSize: 5, + pageSize: 10, }, initialSortState?: GridSortModel, hasCount: boolean = true, diff --git a/frontend/src/i18n/en/translation.json b/frontend/src/i18n/en/translation.json index e7d7855e..a3b4a5ca 100644 --- a/frontend/src/i18n/en/translation.json +++ b/frontend/src/i18n/en/translation.json @@ -100,7 +100,8 @@ "regex_is_invalid": "Regex is invalid", "attachment_not_found": "Attachment is not found", "title_length_exceeded": "You have reached the maximum length", - "no_label_found": "No label found" + "no_label_found": "No label found", + "code_is_required": "Language code is required" }, "menu": { "terms": "Terms of Use", @@ -122,6 +123,8 @@ "cms": "CMS", "nodes": "Content", "entities": "Content types", + "languages": "Languages", + "manage_localization": "Manage Localization", "translations": "Translations", "import": "Bulk Import", "media_library": "Media Library", @@ -183,6 +186,7 @@ "edit_node": "Edit Content", "import": "Bulk Import", "media_library": "Media Library", + "languages": "Languages", "translations": "Translations", "update_translation": "Update Translation", "broadcast": "Broadcast", @@ -545,7 +549,11 @@ "total": "Total", "general": "General", "other": "Other", - "no_data": "No data" + "no_data": "No data", + "code": "Code", + "is_default": "Default", + "is_rtl": "RTL", + "original_text": "Original Text" }, "placeholder": { "your_username": "Your username", @@ -643,7 +651,8 @@ "media_library": "Media Library", "manage_roles": "Manage Roles", "connect_with_sso": "Connect with SSO", - "add_pattern": "Add pattern" + "add_pattern": "Add pattern", + "mark_as_default": "Mark as Default" }, "input": { "search": "Search" diff --git a/frontend/src/i18n/fr/translation.json b/frontend/src/i18n/fr/translation.json index 2b012a34..89c47a9e 100644 --- a/frontend/src/i18n/fr/translation.json +++ b/frontend/src/i18n/fr/translation.json @@ -101,7 +101,8 @@ "regex_is_invalid": "Le regex est invalide", "attachment_not_found": "La pièce jointe est introuvable", "title_length_exceeded": "Vous avez atteint la longueur maximale", - "no_label_found": "Aucune étiquette trouvée" + "no_label_found": "Aucune étiquette trouvée", + "code_is_required": "Le code est requis" }, "menu": { "terms": "Conditions d'utilisation", @@ -123,6 +124,8 @@ "cms": "CMS", "nodes": "Contenu", "entities": "Types de contenu", + "manage_localization": "Internationalisation", + "languages": "Langues", "translations": "Traductions", "import": "Importation en masse", "media_library": "Bibliothéque Media", @@ -184,6 +187,7 @@ "edit_node": "Modifier le contenu", "import": "Importation en masse", "media_library": "Bibliothéque Media", + "languages": "Langues", "translations": "Traductions", "update_translation": "Mettre à jour la traduction", "broadcast": "Diffusion", @@ -545,7 +549,11 @@ "total": "Totale", "general": "Général", "other": "Autre", - "no_data": "Pas de données" + "no_data": "Pas de données", + "code": "Code", + "is_default": "Par Défaut", + "is_rtl": "RTL", + "original_text": "Texte par défaut" }, "placeholder": { "your_username": "Votre nom d'utilisateur", @@ -579,7 +587,8 @@ "start_date": "Date de début", "end_date": "Date de fin", "nlp_value": "Valeur", - "type_message_here": "Ecrivez quelque chose ici ...." + "type_message_here": "Ecrivez quelque chose ici ....", + "mark_as_default": "Par Défaut" }, "button": { "login": "Se connecter", diff --git a/frontend/src/layout/VerticalMenu.tsx b/frontend/src/layout/VerticalMenu.tsx index fe699d0a..73442329 100644 --- a/frontend/src/layout/VerticalMenu.tsx +++ b/frontend/src/layout/VerticalMenu.tsx @@ -21,6 +21,7 @@ import { faUsers, IconDefinition, } from "@fortawesome/free-solid-svg-icons"; +import { Flag, Language } from "@mui/icons-material"; import AppsIcon from "@mui/icons-material/Apps"; import ChevronLeftIcon from "@mui/icons-material/ChevronLeft"; import DriveFolderUploadIcon from "@mui/icons-material/DriveFolderUpload"; @@ -175,14 +176,6 @@ const getMenuItems = (ssoEnabled: boolean): MenuItem[] => [ [EntityType.CONTENT_TYPE]: [PermissionAction.READ], }, }, - { - text: "menu.translations", - href: "/translations", - Icon: faLanguage, - requires: { - [EntityType.TRANSLATION]: [PermissionAction.READ], - }, - }, { text: "menu.media_library", href: "/content/media-library", @@ -249,6 +242,28 @@ const getMenuItems = (ssoEnabled: boolean): MenuItem[] => [ : []), ], }, + { + text: "menu.manage_localization", + Icon: Language, + submenuItems: [ + { + text: "menu.languages", + href: "/localization/languages", + Icon: Flag, + requires: { + [EntityType.LANGUAGE]: [PermissionAction.READ], + }, + }, + { + text: "menu.translations", + href: "/localization/translations", + Icon: faLanguage, + requires: { + [EntityType.TRANSLATION]: [PermissionAction.READ], + }, + }, + ], + }, { text: "menu.settings", href: "/settings", diff --git a/frontend/src/pages/localization/languages.tsx b/frontend/src/pages/localization/languages.tsx new file mode 100644 index 00000000..d75c138a --- /dev/null +++ b/frontend/src/pages/localization/languages.tsx @@ -0,0 +1,23 @@ +/* + * Copyright © 2024 Hexastack. All rights reserved. + * + * Licensed under the GNU Affero General Public License v3.0 (AGPLv3) with the following additional terms: + * 1. The name "Hexabot" is a trademark of Hexastack. You may not use this name in derivative works without express written permission. + * 2. All derivative works must include clear attribution to the original creator and software, Hexastack and Hexabot, in a prominent location (e.g., in the software's "About" section, documentation, and README file). + * 3. SaaS Restriction: This software, or any derivative of it, may not be used to offer a competing product or service (SaaS) without prior written consent from Hexastack. Offering the software as a service or using it in a commercial cloud environment without express permission is strictly prohibited. + */ + +import { ReactElement } from "react"; + +import { Languages } from "@/components/languages"; +import { Layout } from "@/layout"; + +const LanguagesPage = () => { + return ; +}; + +LanguagesPage.getLayout = function getLayout(page: ReactElement) { + return {page}; +}; + +export default LanguagesPage; diff --git a/frontend/src/pages/translations.tsx b/frontend/src/pages/localization/translations.tsx similarity index 100% rename from frontend/src/pages/translations.tsx rename to frontend/src/pages/localization/translations.tsx diff --git a/frontend/src/services/api.class.ts b/frontend/src/services/api.class.ts index c5bdd76e..905d71bc 100644 --- a/frontend/src/services/api.class.ts +++ b/frontend/src/services/api.class.ts @@ -59,6 +59,7 @@ export const ROUTES = { [EntityType.NLP_VALUE]: "/nlpvalue", [EntityType.NLP_SAMPLE_ENTITY]: "", [EntityType.MESSAGE]: "/message", + [EntityType.LANGUAGE]: "/language", [EntityType.TRANSLATION]: "/translation", [EntityType.ATTACHMENT]: "/attachment", [EntityType.CHANNEL]: "/channel", diff --git a/frontend/src/services/entities.ts b/frontend/src/services/entities.ts index 59bd6926..2e9794a1 100644 --- a/frontend/src/services/entities.ts +++ b/frontend/src/services/entities.ts @@ -175,14 +175,24 @@ export const SettingEntity = new schema.Entity(EntityType.SETTING, { processStrategy: processCommonStrategy, }); -export const NlpSampleEntity = new schema.Entity( - EntityType.NLP_SAMPLE, +export const LanguageEntity = new schema.Entity( + EntityType.LANGUAGE, undefined, { idAttribute: ({ id }) => id, processStrategy: processCommonStrategy, }, ); + +export const TranslationEntity = new schema.Entity( + EntityType.TRANSLATION, + undefined, + { + idAttribute: ({ id }) => id, + processStrategy: processCommonStrategy, + }, +); + export const NlpValueEntity = new schema.Entity( EntityType.NLP_VALUE, undefined, @@ -201,18 +211,28 @@ export const NlpEntityEntity = new schema.Entity( }, ); +NlpValueEntity.define({ + entity: NlpEntityEntity, +}); + export const NlpSampleEntityEntity = new schema.Entity( EntityType.NLP_SAMPLE_ENTITY, - undefined, + { + entity: NlpEntityEntity, + value: NlpValueEntity, + }, { idAttribute: ({ id }) => id, processStrategy: processCommonStrategy, }, ); -export const TranslationEntity = new schema.Entity( - EntityType.TRANSLATION, - undefined, +export const NlpSampleEntity = new schema.Entity( + EntityType.NLP_SAMPLE, + { + entities: [NlpSampleEntityEntity], + language: LanguageEntity, + }, { idAttribute: ({ id }) => id, processStrategy: processCommonStrategy, @@ -280,6 +300,7 @@ export const ENTITY_MAP = { [EntityType.NLP_ENTITY]: NlpEntityEntity, [EntityType.NLP_SAMPLE_ENTITY]: NlpSampleEntityEntity, [EntityType.NLP_VALUE]: NlpValueEntity, + [EntityType.LANGUAGE]: LanguageEntity, [EntityType.TRANSLATION]: TranslationEntity, [EntityType.ATTACHMENT]: AttachmentEntity, [EntityType.BLOCK]: BlockEntity, diff --git a/frontend/src/services/types.ts b/frontend/src/services/types.ts index 62f37768..49e8d475 100644 --- a/frontend/src/services/types.ts +++ b/frontend/src/services/types.ts @@ -32,6 +32,7 @@ export enum EntityType { NLP_VALUE = "NlpValue", MESSAGE = "Message", MENU = "Menu", + LANGUAGE = "Language", TRANSLATION = "Translation", ATTACHMENT = "Attachment", CHANNEL = "Channel", diff --git a/frontend/src/types/base.types.ts b/frontend/src/types/base.types.ts index 678898f2..e063f9fe 100644 --- a/frontend/src/types/base.types.ts +++ b/frontend/src/types/base.types.ts @@ -24,6 +24,7 @@ import { IContentType, IContentTypeAttributes } from "./content-type.types"; import { IContent, IContentAttributes, IContentFull } from "./content.types"; import { IContextVar, IContextVarAttributes } from "./context-var.types"; import { ILabel, ILabelAttributes, ILabelFull } from "./label.types"; +import { ILanguage, ILanguageAttributes } from "./language.types"; import { IMenuNode, IMenuNodeAttributes, @@ -99,13 +100,14 @@ export const POPULATE_BY_TYPE = { "trigger_labels", "assignTo", ], - [EntityType.NLP_SAMPLE]: ["entities"], + [EntityType.NLP_SAMPLE]: ["language", "entities"], [EntityType.NLP_SAMPLE_ENTITY]: ["sample", "entity", "value"], [EntityType.NLP_ENTITY]: ["values"], [EntityType.NLP_VALUE]: ["entity"], [EntityType.MESSAGE]: ["sender", "recipient", "sentBy"], [EntityType.MENU]: ["parent"], [EntityType.MENUTREE]: [], + [EntityType.LANGUAGE]: [], [EntityType.TRANSLATION]: [], [EntityType.ATTACHMENT]: [], [EntityType.CUSTOM_BLOCK]: [], @@ -189,6 +191,7 @@ export interface IEntityMapTypes { ISubscriber, ISubscriberFull >; + [EntityType.LANGUAGE]: IEntityTypes; [EntityType.TRANSLATION]: IEntityTypes; [EntityType.USER]: IEntityTypes; [EntityType.ATTACHMENT]: IEntityTypes; diff --git a/frontend/src/types/language.types.ts b/frontend/src/types/language.types.ts new file mode 100644 index 00000000..4621bfb3 --- /dev/null +++ b/frontend/src/types/language.types.ts @@ -0,0 +1,27 @@ +/* + * Copyright © 2024 Hexastack. All rights reserved. + * + * Licensed under the GNU Affero General Public License v3.0 (AGPLv3) with the following additional terms: + * 1. The name "Hexabot" is a trademark of Hexastack. You may not use this name in derivative works without express written permission. + * 2. All derivative works must include clear attribution to the original creator and software, Hexastack and Hexabot, in a prominent location (e.g., in the software's "About" section, documentation, and README file). + * 3. SaaS Restriction: This software, or any derivative of it, may not be used to offer a competing product or service (SaaS) without prior written consent from Hexastack. Offering the software as a service or using it in a commercial cloud environment without express permission is strictly prohibited. + */ + +import { EntityType, Format } from "@/services/types"; + +import { IBaseSchema, IFormat, OmitPopulate } from "./base.types"; + +export type ILanguages = Record; + +export interface ILanguageAttributes { + title: string; + code: string; + isDefault: boolean; + isRTL: boolean; +} + +export interface ILanguageStub + extends IBaseSchema, + OmitPopulate {} + +export interface ILanguage extends ILanguageStub, IFormat {} diff --git a/frontend/src/types/nlp-sample.types.ts b/frontend/src/types/nlp-sample.types.ts index ebb4e541..e0b924e6 100644 --- a/frontend/src/types/nlp-sample.types.ts +++ b/frontend/src/types/nlp-sample.types.ts @@ -10,6 +10,7 @@ import { EntityType, Format } from "@/services/types"; import { IBaseSchema, IFormat, OmitPopulate } from "./base.types"; +import { ILanguage } from "./language.types"; import { INlpSampleEntity } from "./nlp-sample_entity.types"; export enum NlpSampleType { @@ -23,6 +24,7 @@ export interface INlpSampleAttributes { trained?: boolean; type?: NlpSampleType; entities: string[]; + language: string | null; } export interface INlpSampleStub @@ -31,14 +33,15 @@ export interface INlpSampleStub export interface INlpSample extends INlpSampleStub, IFormat { entities: string[]; + language: string | null; } export interface INlpSampleFull extends INlpSampleStub, IFormat { entities: INlpSampleEntity[]; + language: ILanguage | null; } // Dataset Trainer - export interface INlpDatasetTraitEntity { entity: string; // entity name value: string; // value name @@ -60,3 +63,7 @@ export interface INlpDatasetSampleAttributes extends Omit { entities: (INlpDatasetTraitEntity | INlpDatasetKeywordEntity)[]; } + +export interface INlpDatasetSample + extends IBaseSchema, + INlpDatasetSampleAttributes {} \ No newline at end of file diff --git a/frontend/src/types/translation.types.ts b/frontend/src/types/translation.types.ts index ae9dcc3a..a99d245f 100644 --- a/frontend/src/types/translation.types.ts +++ b/frontend/src/types/translation.types.ts @@ -7,9 +7,9 @@ * 3. SaaS Restriction: This software, or any derivative of it, may not be used to offer a competing product or service (SaaS) without prior written consent from Hexastack. Offering the software as a service or using it in a commercial cloud environment without express permission is strictly prohibited. */ -import { Format } from "@/services/types"; +import { EntityType, Format } from "@/services/types"; -import { IBaseSchema, IFormat } from "./base.types"; +import { IBaseSchema, IFormat, OmitPopulate } from "./base.types"; export type ITranslations = Record; @@ -19,11 +19,8 @@ export interface ITranslationAttributes { translated: number; } -export interface ITranslationStub extends IBaseSchema { - str: string; - translations: ITranslations; - translated: number; -} +export interface ITranslationStub + extends IBaseSchema, + OmitPopulate {} export interface ITranslation extends ITranslationStub, IFormat {} - diff --git a/frontend/src/utils/URL.ts b/frontend/src/utils/URL.ts index 086e4d7a..8db093d2 100644 --- a/frontend/src/utils/URL.ts +++ b/frontend/src/utils/URL.ts @@ -29,7 +29,6 @@ export const getFromQuery = ({ export const buildURL = (baseUrl: string, relativePath: string): string => { try { - const url = new URL(relativePath, baseUrl); return url.toString(); @@ -37,3 +36,18 @@ export const buildURL = (baseUrl: string, relativePath: string): string => { throw new Error(`Invalid base URL: ${baseUrl}`); } }; + +export const isAbsoluteUrl = (value: string = ""): boolean => { + try { + const url = new URL(value); + const hostnameParts = url.hostname.split("."); + + return ( + (url.protocol === "http:" || url.protocol === "https:") && + hostnameParts.length > 1 && + hostnameParts[hostnameParts.length - 1].length > 1 + ); + } catch (error) { + return false; + } +}; diff --git a/nlu/.env.dev b/nlu/.env.dev index d5023df1..6fac306e 100644 --- a/nlu/.env.dev +++ b/nlu/.env.dev @@ -2,4 +2,5 @@ AUTH_TOKEN=123 LANGUAGE_CLASSIFIER=language-classifier INTENT_CLASSIFIERS=ar,fr,tn TFLC_REPO_ID=Hexastack/tflc -JISF_REPO_ID=Hexastack/jisf +INTENT_CLASSIFIER_REPO_ID=Hexastack/intent-classifier +SLOT_FILLER_REPO_ID=Hexastack/slot-filler diff --git a/nlu/.env.example b/nlu/.env.example index 52370c7b..a863e43a 100644 --- a/nlu/.env.example +++ b/nlu/.env.example @@ -1,5 +1,5 @@ AUTH_TOKEN= LANGUAGE_CLASSIFIER= INTENT_CLASSIFIERS= -TFLC_REPO_ID= -JISF_REPO_ID= \ No newline at end of file +INTENT_CLASSIFIER_REPO_ID= +SLOT_FILLER_REPO_ID= \ No newline at end of file diff --git a/nlu/README.md b/nlu/README.md index ff5ba606..dd8a00b7 100644 --- a/nlu/README.md +++ b/nlu/README.md @@ -40,7 +40,7 @@ pip install -r requirements.txt You should run `source env.sh` on each new shell session. This activates the virtualenv and creates a nice alias for `run.py`: ```bash $ cat env.sh -source env/bin/activate +source venv/bin/activate alias run='python run.py' ``` @@ -53,7 +53,7 @@ run fit myexperiment1 mlp mnist --batch_size=32 --learning_rate=0.1 Examples : ```bash # Intent classification -run fit intent-classifier-en-30072024 jisf --intent_num_labels=88 --slot_num_labels=17 --language=en +run fit intent-classifier-en-30072024 intent_classifier --intent_num_labels=88 --slot_num_labels=17 --language=en run predict intent-classifier-fr-30072024 --intent_num_labels=7 --slot_num_labels=2 --language=fr # Language classification diff --git a/nlu/data_loaders/jisfdl.py b/nlu/data_loaders/jisfdl.py index 75ae9499..18f8a89a 100644 --- a/nlu/data_loaders/jisfdl.py +++ b/nlu/data_loaders/jisfdl.py @@ -4,8 +4,8 @@ import json import numpy as np from transformers import PreTrainedTokenizerFast, PreTrainedTokenizer + import boilerplate as tfbp -from utils.jisf_data_mapper import JisfDataMapper from utils.json_helper import JsonHelper @@ -101,8 +101,11 @@ class JISFDL(tfbp.DataLoader): # Filter examples by language lang = self.hparams.language all_examples = data["common_examples"] - examples = filter(lambda exp: any( - e['entity'] == 'language' and e['value'] == lang for e in exp['entities']), all_examples) + + if not bool(lang): + examples = all_examples + else: + examples = filter(lambda exp: any(e['entity'] == 'language' and e['value'] == lang for e in exp['entities']), all_examples) # Parse raw data for exp in examples: @@ -145,7 +148,6 @@ class JISFDL(tfbp.DataLoader): # the classifier. texts = [d.text for d in dataset] encoded_texts = self.encode_texts(texts, tokenizer) - # Map intents, load from the model (evaluate), recompute from dataset otherwise (train) intents = [d.intent for d in dataset] if not model_params: @@ -161,19 +163,35 @@ class JISFDL(tfbp.DataLoader): # To handle those we need to add to slots_names. It can be some other symbol as well. slot_names.insert(0, "") else: - intent_names = model_params.intent_names - slot_names = model_params.slot_names + if "intent_names" in model_params: + intent_names = model_params["intent_names"] + else: + intent_names = None + + if "slot_names" in model_params: + slot_names = model_params["slot_names"] + else: + slot_names = None - intent_map = dict() # Dict : intent -> index - for idx, ui in enumerate(intent_names): - intent_map[ui] = idx + if intent_names: + intent_map = dict() # Dict : intent -> index + for idx, ui in enumerate(intent_names): + intent_map[ui] = idx + else: + intent_map = None # Encode intents - encoded_intents = self.encode_intents(intents, intent_map) + if intent_map: + encoded_intents = self.encode_intents(intents, intent_map) + else: + encoded_intents = None - slot_map: Dict[str, int] = dict() # slot -> index - for idx, us in enumerate(slot_names): - slot_map[us] = idx + if slot_names: + slot_map: Dict[str, int] = dict() # slot -> index + for idx, us in enumerate(slot_names): + slot_map[us] = idx + else: + slot_map = None # Encode slots # Text : Add a tune to my elrow Guest List @@ -183,8 +201,12 @@ class JISFDL(tfbp.DataLoader): max_len = len(encoded_texts["input_ids"][0]) # type: ignore all_slots = [td.slots for td in dataset] all_texts = [td.text for td in dataset] - encoded_slots = self.encode_slots(tokenizer, + + if slot_map: + encoded_slots = self.encode_slots(tokenizer, all_slots, all_texts, slot_map, max_len) + else: + encoded_slots = None return encoded_texts, encoded_intents, encoded_slots, intent_names, slot_names diff --git a/nlu/data_loaders/tflcdl.py b/nlu/data_loaders/tflcdl.py index 09a23c0c..b765f788 100644 --- a/nlu/data_loaders/tflcdl.py +++ b/nlu/data_loaders/tflcdl.py @@ -29,7 +29,7 @@ class TFLCDL(tfbp.DataLoader): self.json_helper = JsonHelper("tflc") self._save_dir = save_dir - print(hparams) + # We will opt for a TF-IDF representation of the data as the frequency of word # roots should give us a good idea about which language we're dealing with. if method == "fit": diff --git a/nlu/main.py b/nlu/main.py index b85ce114..6e0ac9d8 100644 --- a/nlu/main.py +++ b/nlu/main.py @@ -15,8 +15,8 @@ AUTH_TOKEN = os.getenv("AUTH_TOKEN", "TOKEN_MUST_BE_DEFINED") AVAILABLE_LANGUAGES = os.getenv("AVAILABLE_LANGUAGES", "en,fr").split(',') TFLC_REPO_ID = os.getenv("TFLC_REPO_ID") -JISF_REPO_ID = os.getenv("JISF_REPO_ID") - +INTENT_CLASSIFIER_REPO_ID = os.getenv("INTENT_CLASSIFIER_REPO_ID") +SLOT_FILLER_REPO_ID = os.getenv("SLOT_FILLER_REPO_ID") def load_language_classifier(): # Init language classifier model @@ -27,21 +27,31 @@ def load_language_classifier(): logging.info(f'Successfully loaded the language classifier model') return model - def load_intent_classifiers(): - Model = tfbp.get_model("jisf") - models = {} + Model = tfbp.get_model("intent_classifier") + intent_classifiers = {} for language in AVAILABLE_LANGUAGES: kwargs = {} - models[language] = Model(save_dir=language, method="predict", repo_id=JISF_REPO_ID, **kwargs) - models[language].load_model() + intent_classifiers[language] = Model(save_dir=language, method="predict", repo_id=INTENT_CLASSIFIER_REPO_ID, **kwargs) + intent_classifiers[language].load_model() logging.info(f'Successfully loaded the intent classifier {language} model') - return models + return intent_classifiers + +def load_slot_classifiers(): + Model = tfbp.get_model("slot_classifier") + slot_fillers = {} + for language in AVAILABLE_LANGUAGES: + kwargs = {} + slot_fillers[language] = Model(save_dir=language, method="predict", repo_id=SLOT_FILLER_REPO_ID, **kwargs) + slot_fillers[language].load_model() + logging.info(f'Successfully loaded the slot filler {language} model') + return slot_fillers def load_models(): app.language_classifier = load_language_classifier() # type: ignore app.intent_classifiers = load_intent_classifiers() # type: ignore + app.slot_fillers = load_intent_classifiers() # type: ignore app = FastAPI() @@ -74,13 +84,26 @@ async def check_health(): @app.post("/parse") def parse(input: ParseInput, is_authenticated: Annotated[str, Depends(authenticate)]): - if not hasattr(app, 'language_classifier') or not hasattr(app, 'intent_classifiers'): + if not hasattr(app, 'language_classifier') or not hasattr(app, 'intent_classifiers') or not hasattr(app, 'slot_fillers'): headers = {"Retry-After": "120"} # Suggest retrying after 2 minutes - return JSONResponse(status_code=status.HTTP_503_SERVICE_UNAVAILABLE, content={"message": "Models are loading, please retry later."}, headers=headers) + return JSONResponse(status_code=status.HTTP_503_SERVICE_UNAVAILABLE, content={"message": "Models are still loading, please retry later."}, headers=headers) - language = app.language_classifier.get_prediction(input.q) # type: ignore - lang = language.get("value") - prediction = app.intent_classifiers[lang].get_prediction( + language_prediction = app.language_classifier.get_prediction(input.q) # type: ignore + language = language_prediction.get("value") + intent_prediction = app.intent_classifiers[language].get_prediction( input.q) # type: ignore - prediction.get("entities").append(language) - return prediction + slot_prediction = app.slot_fillers[language].get_prediction( + input.q) # type: ignore + + if slot_prediction.get("entities"): + entities = slot_prediction.get("entities") + else: + entities = [] + + entities.append(language_prediction) + + return { + "text": input.q, + "intent": intent_prediction.get("intent"), + "entities": entities, + } diff --git a/nlu/models/jisf.py b/nlu/models/intent_classifier.py similarity index 59% rename from nlu/models/jisf.py rename to nlu/models/intent_classifier.py index 71c14ef6..5491cb85 100644 --- a/nlu/models/jisf.py +++ b/nlu/models/intent_classifier.py @@ -1,4 +1,3 @@ -import functools import json import math from typing import Tuple, Dict, List @@ -22,8 +21,8 @@ from data_loaders.jisfdl import JISFDL import boilerplate as tfbp ## -# JISF : Joint Intent Classification and Slot filling with BERT -# This notebook is based on the paper BERT for Joint Intent Classification and Slot Filling by Chen et al. (2019), +# Intent Classification with BERT +# This code is based on the paper BERT for Joint Intent Classification and Slot Filling by Chen et al. (2019), # https://arxiv.org/abs/1902.10909 but on a different dataset made for a class project. # # Ideas were also taken from https://github.com/monologg/JointBERT, which is a PyTorch implementation of @@ -33,19 +32,16 @@ import boilerplate as tfbp BERT_MODEL_BY_LANGUAGE = { 'en': "bert-base-cased", 'fr': "dbmdz/bert-base-french-europeana-cased", - 'ar': 'asafaya/bert-base-arabic', - 'tn': 'dbmdz/bert-base-french-europeana-cased' } @tfbp.default_export -class JISF(tfbp.Model): +class IntentClassifier(tfbp.Model): default_hparams = { - "language": "fr", + "language": "", "num_epochs": 2, "dropout_prob": 0.1, "intent_num_labels": 7, - "slot_num_labels": 40 } data_loader: JISFDL @@ -57,8 +53,8 @@ class JISF(tfbp.Model): # Load Tokenizer from transformers # We will use a pretrained bert model bert-base-cased for both Tokenizer and our classifier. - bert_model_name = BERT_MODEL_BY_LANGUAGE[self.hparams.language] - # bert_model_name = typing.cast(str, self.hparams.bert_model_name) + bert_model_name = BERT_MODEL_BY_LANGUAGE[self.hparams.language or "en"] + self.tokenizer = AutoTokenizer.from_pretrained( bert_model_name, use_fast=False) self.bert = TFBertModel.from_pretrained(bert_model_name) @@ -66,27 +62,18 @@ class JISF(tfbp.Model): self.dropout = Dropout(self.hparams.dropout_prob) self.intent_classifier = Dense(self.hparams.intent_num_labels, name="intent_classifier", activation="softmax") - self.slot_classifier = Dense(self.hparams.slot_num_labels, - name="slot_classifier", activation="softmax") def call(self, inputs, **kwargs): - # two outputs from BERT trained_bert = self.bert(inputs, **kwargs) pooled_output = trained_bert.pooler_output - sequence_output = trained_bert.last_hidden_state - - # sequence_output will be used for slot_filling / classification - sequence_output = self.dropout(sequence_output, - training=kwargs.get("training", False)) - slot_probas = self.slot_classifier(sequence_output) - + # pooled_output for intent classification pooled_output = self.dropout(pooled_output, training=kwargs.get("training", False)) intent_probas = self.intent_classifier(pooled_output) - return slot_probas, intent_probas + return intent_probas def load_data(self, data_loader) -> Tuple[BatchEncoding, tf.Tensor, ndarray, int, int]: return data_loader(self.tokenizer) @@ -137,18 +124,11 @@ class JISF(tfbp.Model): raise ValueError( f"Hyperparam intent_num_labels mismatch, should be : {len(intent_names)}" ) - if self.hparams.slot_num_labels != len(slot_names): - raise ValueError( - f"Hyperparam slot_num_labels mismatch, should be : {len(slot_names)}" - ) # Hyperparams, Optimizer and Loss function opt = Adam(learning_rate=3e-5, epsilon=1e-08) - # two outputs, one for slots, another for intents - # we have to fine tune for both - losses = [SparseCategoricalCrossentropy(), - SparseCategoricalCrossentropy()] + losses = SparseCategoricalCrossentropy() metrics = [SparseCategoricalAccuracy("accuracy")] @@ -159,11 +139,10 @@ class JISF(tfbp.Model): "attention_mask": encoded_texts["attention_mask"]} super().fit( - x, (encoded_slots, encoded_intents), epochs=self.hparams.num_epochs, batch_size=32, shuffle=True) + x, encoded_intents, epochs=self.hparams.num_epochs, batch_size=32, shuffle=True) # Persist the model self.extra_params["intent_names"] = intent_names - self.extra_params["slot_names"] = slot_names self.save() @@ -175,8 +154,8 @@ class JISF(tfbp.Model): metrics = [SparseCategoricalAccuracy("accuracy")] self.compile(metrics=metrics) - _, intent_probas = self(encoded_texts) # type: ignore - + intent_probas = self(encoded_texts) # type: ignore + scores = self.get_metrics_by_intent(intent_probas, encoded_intents) overall_score = {} @@ -204,85 +183,10 @@ class JISF(tfbp.Model): print(json.dumps(info, indent=2)) return json.dumps(info, indent=2) - - def get_slots_prediction(self, text: str, inputs, slot_probas): - slot_probas_np = slot_probas.numpy() - # Get the indices of the maximum values - slot_ids = slot_probas_np.argmax(axis=-1)[0, :] - - # get all slot names and add to out_dict as keys - out_dict = {} - predicted_slots = set([self.extra_params["slot_names"][s] - for s in slot_ids if s != 0]) - for ps in predicted_slots: - out_dict[ps] = [] - - # retrieving the tokenization that was used in the predictions - tokens = self.tokenizer.convert_ids_to_tokens(inputs["input_ids"][0]) - - # We'd like to eliminate all special tokens from our output - special_tokens = self.tokenizer.special_tokens_map.values() - - for token, slot_id in zip(tokens, slot_ids): - if token in special_tokens: - continue - # add all to out_dict - slot_name = self.extra_params["slot_names"][slot_id] - - if slot_name == "": - continue - - # collect tokens - collected_tokens = [token] - idx = tokens.index(token) - - # see if it starts with ## - # then it belongs to the previous token - if token.startswith("##"): - # check if the token already exists or not - if tokens[idx - 1] not in out_dict[slot_name]: - collected_tokens.insert(0, tokens[idx - 1]) - - # add collected tokens to slots - out_dict[slot_name].extend(collected_tokens) - - slot_names_to_ids = {value: key for key, value in enumerate( - self.extra_params["slot_names"])} - - entities = [] - # process out_dict - for slot_name in out_dict: - slot_id = slot_names_to_ids[slot_name] - slot_tokens = out_dict[slot_name] - - slot_value = self.tokenizer.convert_tokens_to_string( - slot_tokens).strip() - - entity = { - "entity": slot_name, - "value": slot_value, - "start": text.find(slot_value), - "end": text.find(slot_value) + len(slot_value), - "confidence": 0, - } - - # The confidence of a slot is the average confidence of tokens in that slot. - indices = [tokens.index(token) for token in slot_tokens] - if len(slot_tokens) > 0: - total = functools.reduce( - lambda proba1, proba2: proba1+proba2, slot_probas_np[0, indices, slot_id], 0) - entity["confidence"] = total / len(slot_tokens) - else: - entity["confidence"] = 0 - - entities.append(entity) - - return entities - def get_prediction(self, text: str): inputs = self.data_loader.encode_text(text, self.tokenizer) - slot_probas, intent_probas = self(inputs) # type: ignore + intent_probas = self(inputs) # type: ignore intent_probas_np = intent_probas.numpy() @@ -292,15 +196,8 @@ class JISF(tfbp.Model): # get the confidences for each intent intent_confidences = intent_probas_np[0] - - entities = [] - if slot_probas is not None: - entities = self.get_slots_prediction(text, inputs, slot_probas) - return { "text": text, "intent": {"name": self.extra_params["intent_names"][intent_id], "confidence": float(intent_confidences[intent_id])}, - "entities": entities, } - diff --git a/nlu/models/slot_filler.py b/nlu/models/slot_filler.py new file mode 100644 index 00000000..0393fb3d --- /dev/null +++ b/nlu/models/slot_filler.py @@ -0,0 +1,250 @@ +import functools +import json +from transformers import TFBertModel, AutoTokenizer +from keras.layers import Dropout, Dense +from sys import platform + +if platform == "darwin": + from keras.optimizers.legacy import Adam +else: + from keras.optimizers import Adam + +from keras.losses import SparseCategoricalCrossentropy +from keras.metrics import SparseCategoricalAccuracy +import numpy as np + +from data_loaders.jisfdl import JISFDL + +from sklearn.metrics import classification_report + + +import boilerplate as tfbp + +## +# Slot filling with BERT +# This notebook is based on the paper BERT for Joint Intent Classification and Slot Filling by Chen et al. (2019), +# https://arxiv.org/abs/1902.10909 but on a different dataset made for a class project. +# +# Ideas were also taken from https://github.com/monologg/JointBERT, which is a PyTorch implementation of +# the paper with the original dataset. +## + +BERT_MODEL_BY_LANGUAGE = { + 'en': "bert-base-cased", + 'fr': "dbmdz/bert-base-french-europeana-cased", +} + + +@tfbp.default_export +class SlotFiller(tfbp.Model): + default_hparams = { + "language": "", + "num_epochs": 2, + "dropout_prob": 0.1, + "slot_num_labels": 40 + } + data_loader: JISFDL + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + # Init data loader + self.data_loader = JISFDL(**kwargs) + + # Load Tokenizer from transformers + # We will use a pretrained bert model bert-base-cased for both Tokenizer and our classifier. + bert_model_name = BERT_MODEL_BY_LANGUAGE[self.hparams.language or "en"] + + self.tokenizer = AutoTokenizer.from_pretrained( + bert_model_name, use_fast=False) + self.bert = TFBertModel.from_pretrained(bert_model_name) + + self.dropout = Dropout(self.hparams.dropout_prob) + self.slot_classifier = Dense(self.hparams.slot_num_labels, + name="slot_classifier", activation="softmax") + + + def call(self, inputs, **kwargs): + trained_bert = self.bert(inputs, **kwargs) + sequence_output = trained_bert.last_hidden_state + + # sequence_output will be used for slot_filling + sequence_output = self.dropout(sequence_output, + training=kwargs.get("training", False)) + slot_probas = self.slot_classifier(sequence_output) + + return slot_probas + + @tfbp.runnable + def fit(self): + """Training""" + encoded_texts, encoded_intents, encoded_slots, intent_names, slot_names = self.data_loader( + self.tokenizer) + + if self.hparams.slot_num_labels != len(slot_names): + raise ValueError( + f"Hyperparam slot_num_labels mismatch, should be : {len(slot_names)}" + ) + + # Hyperparams, Optimizer and Loss function + opt = Adam(learning_rate=3e-5, epsilon=1e-08) + + # two outputs, one for slots, another for intents + # we have to fine tune for both + losses = SparseCategoricalCrossentropy() + + metrics = [SparseCategoricalAccuracy("accuracy")] + + # Compile model + self.compile(optimizer=opt, loss=losses, metrics=metrics) + + x = {"input_ids": encoded_texts["input_ids"], "token_type_ids": encoded_texts["token_type_ids"], + "attention_mask": encoded_texts["attention_mask"]} + + super().fit( + x, encoded_slots, epochs=self.hparams.num_epochs, batch_size=32, shuffle=True) + + # Persist the model + self.extra_params["slot_names"] = slot_names + + self.save() + + @tfbp.runnable + def evaluate(self): + """Evaluation""" + # Load test data + # Assuming your data loader can return test data when mode='test' is specified + encoded_texts, _, encoded_slots, _, slot_names = self.data_loader( + self.tokenizer, self.extra_params) + + # Get predictions + predictions = self(encoded_texts) + predicted_slot_ids = np.argmax(predictions, axis=-1) # Shape: (batch_size, sequence_length) + + true_labels = encoded_slots.flatten() + pred_labels = predicted_slot_ids.flatten() + + # Filter out padding tokens (assuming padding label id is 0) + mask = true_labels != 0 + filtered_true_labels = true_labels[mask] + filtered_pred_labels = pred_labels[mask] + + # Adjust labels to start from 0 (since padding label 0 is removed) + filtered_true_labels -= 1 + filtered_pred_labels -= 1 + + # Get slot names excluding padding + slot_names_no_pad = self.extra_params["slot_names"][1:] # Exclude padding label + + + report = classification_report( + filtered_true_labels, + filtered_pred_labels, + target_names=slot_names_no_pad, + zero_division=0 + ) + + print(report) + + # Optionally, you can return the report as a string or dictionary + return report + + @tfbp.runnable + def predict(self): + text = self.data_loader.get_prediction_data() + + info = self.get_prediction(text) + + print(self.summary()) + print("Text : " + text) + print(json.dumps(info, indent=2)) + + return json.dumps(info, indent=2) + + def get_slots_prediction(self, text: str, inputs, slot_probas): + slot_probas_np = slot_probas.numpy() + # Get the indices of the maximum values + slot_ids = slot_probas_np.argmax(axis=-1)[0, :] + + # get all slot names and add to out_dict as keys + out_dict = {} + predicted_slots = set([self.extra_params["slot_names"][s] + for s in slot_ids if s != 0]) + for ps in predicted_slots: + out_dict[ps] = [] + + # retrieving the tokenization that was used in the predictions + tokens = self.tokenizer.convert_ids_to_tokens(inputs["input_ids"][0]) + + # We'd like to eliminate all special tokens from our output + special_tokens = self.tokenizer.special_tokens_map.values() + + for token, slot_id in zip(tokens, slot_ids): + if token in special_tokens: + continue + # add all to out_dict + slot_name = self.extra_params["slot_names"][slot_id] + + if slot_name == "": + continue + + # collect tokens + collected_tokens = [token] + idx = tokens.index(token) + + # see if it starts with ## + # then it belongs to the previous token + if token.startswith("##"): + # check if the token already exists or not + if tokens[idx - 1] not in out_dict[slot_name]: + collected_tokens.insert(0, tokens[idx - 1]) + + # add collected tokens to slots + out_dict[slot_name].extend(collected_tokens) + + slot_names_to_ids = {value: key for key, value in enumerate( + self.extra_params["slot_names"])} + + entities = [] + # process out_dict + for slot_name in out_dict: + slot_id = slot_names_to_ids[slot_name] + slot_tokens = out_dict[slot_name] + + slot_value = self.tokenizer.convert_tokens_to_string( + slot_tokens).strip() + + entity = { + "entity": slot_name, + "value": slot_value, + "start": text.find(slot_value), + "end": text.find(slot_value) + len(slot_value), + "confidence": 0, + } + + # The confidence of a slot is the average confidence of tokens in that slot. + indices = [tokens.index(token) for token in slot_tokens] + if len(slot_tokens) > 0: + total = functools.reduce( + lambda proba1, proba2: proba1+proba2, slot_probas_np[0, indices, slot_id], 0) + entity["confidence"] = total / len(slot_tokens) + else: + entity["confidence"] = 0 + + entities.append(entity) + + return entities + + + def get_prediction(self, text: str): + inputs = self.data_loader.encode_text(text, self.tokenizer) + slot_probas = self(inputs) # type: ignore + + entities = [] + if slot_probas is not None: + entities = self.get_slots_prediction(text, inputs, slot_probas) + + return { + "text": text, + "entities": entities, + } diff --git a/nlu/requirements.txt b/nlu/requirements.txt index 54891740..de0fb47e 100644 --- a/nlu/requirements.txt +++ b/nlu/requirements.txt @@ -6,3 +6,4 @@ scikit_learn==1.2.2 fastapi==0.100.0 uvicorn[standard]==0.23.1 autopep8==2.0.2 +h5py --only-binary=h5py diff --git a/nlu/utils/json_helper.py b/nlu/utils/json_helper.py index 7292e72b..c22a6e04 100644 --- a/nlu/utils/json_helper.py +++ b/nlu/utils/json_helper.py @@ -4,7 +4,7 @@ import json class JsonHelper: data_folder: str - def __init__(self, model:str="jisf"): + def __init__(self, model:str = "intent_classifier"): self.data_folder=os.path.join("data",model) def read_dataset_json_file(self, filename): diff --git a/package-lock.json b/package-lock.json index 5ac837dd..00fa48e8 100644 --- a/package-lock.json +++ b/package-lock.json @@ -7,6 +7,7 @@ "": { "name": "hexabot", "version": "2.0.0", + "license": "AGPL-3.0-only", "workspaces": [ "frontend", "widget" @@ -19,7 +20,8 @@ }, "frontend": { "name": "hexabot-ui", - "version": "0.1.0", + "version": "2.0.0", + "license": "AGPL-3.0-only", "dependencies": { "@chatscope/chat-ui-kit-react": "^2.0.3", "@chatscope/chat-ui-kit-styles": "^1.4.0", @@ -9802,7 +9804,8 @@ }, "widget": { "name": "hexabot-widget", - "version": "0.0.0", + "version": "2.0.0", + "license": "AGPL-3.0-only", "dependencies": { "@types/emoji-js": "^3.5.2", "autolinker": "^4.0.0",