feat(api): plugin to automate generating flows

This commit is contained in:
yassinedorbozgithub 2024-10-18 17:23:26 +01:00
parent 57a04da0e9
commit 48d795df26
7 changed files with 408 additions and 1 deletions

View File

@ -95,6 +95,12 @@ import { SubscriberService } from './services/subscriber.service';
ChatService,
BotService,
],
exports: [SubscriberService, MessageService, LabelService, BlockService],
exports: [
SubscriberService,
MessageService,
LabelService,
BlockService,
CategoryService,
],
})
export class ChatModule {}

View File

@ -91,6 +91,11 @@ export class ChatService {
}
}
@OnEvent('hook:gemini:flowGenerated')
async handleReceivedTextToActions(actions: string[]) {
this.websocketGateway.broadcastTextToActions(actions);
}
/**
* Creates the received message and broadcast it to the websocket "Message" room
*

View File

@ -0,0 +1,82 @@
# Google Gemini Flow Plugin for Hexabot Chatbot Builder
The Gemini Flow Plugin allows you to integrate Google Generative AI into your Hexabot chatbot workflows, enabling the creation of generative AI Flow. This plugin provides settings for customizing responses, including controlling the maximum length of the generated content and using recent conversation messages as context for generating more relevant replies.
[Hexabot](https://hexabot.ai/) is an open-source chatbot / agent solution that allows users to create and manage AI-powered, multi-channel, and multilingual chatbots with ease. If you would like to learn more, please visit the [official github repo](https://github.com/Hexastack/Hexabot/).
## Prerequisites
Before setting up the Gemini Flow Plugin, you will need to generate an API token from Googles Generative AI platform.
1. Go to the [Google Generative AI API page](https://ai.google.dev/gemini-api).
2. Select **"Develop in your own environment"** to generate your API token.
3. Once you have your API token, you can proceed to configure the plugin within Hexabot.
## Installation
First, navigate to your Hexabot project directory and make sure the dependencies are installed:
```sh
cd ~/projects/Hexabot
npm install
```
## Configuration
The Gemini Flow Plugin provides several customizable settings that can be configured through the Hexabot admin interface. The following settings are available:
- **Token**: Your Google API token for authentication. This is a required field.
- **Model**: The model to be used for generating responses (e.g., `gemini-1.5-flash`).
- **Temperature**: Controls the creativity of the response. Higher values lead to more creative outputs.
- **Max Length**: Specifies the maximum length of the response generated by the AI.
- **Messages to Retrieve**: The number of recent messages to include as context when sending a request to the AI.
- **Context**: A description or context about the chatbot that is passed along with each request to the API.
- **Instructions**: Custom instructions for how the AI should handle each request, based on your use case.
## How to Use
1. Go to the Hexabot Visual Editor.
2. Drag-n-drop the Google Gemini under "Custom Blocks" into the canvas.
3. Double-click on the block to edit
## Example
Heres an example of a typical prompt that the plugin generates when sending a request to the API:
```
CONTEXT: You are an AI Chatbot that works for Hexastack, This is their description: ...
DOCUMENTS:
DOCUMENT 0
Title: Example Title
Data: Example Data...
RECENT MESSAGES:
- Message 1: Hello, how can I help you?
- Message 2: Can you provide more information about the company?
INSTRUCTIONS:
Answer the user QUESTION using the DOCUMENTS text above...
QUESTION: What services does Hexastack offer?
```
The plugin will then use this prompt to generate a response via the Google Generative AI API.
## Contributing
We welcome contributions from the community! Whether you want to report a bug, suggest new features, or submit a pull request, your input is valuable to us.
Please refer to our contribution policy first : [How to contribute to Hexabot](./CONTRIBUTING.md)
[![Contributor Covenant](https://img.shields.io/badge/Contributor%20Covenant-2.1-4baaaa.svg)](./CODE_OF_CONDUCT.md)
Feel free to join us on [Discord](https://discord.gg/rNb9t2MFkG)
## License
This software is licensed under the GNU Affero General Public License v3.0 (AGPLv3) with the following additional terms:
1. The name "Hexabot" is a trademark of Hexastack. You may not use this name in derivative works without express written permission.
2. All derivative works must include clear attribution to the original creator and software, Hexastack and Hexabot, in a prominent location (e.g., in the software's "About" section, documentation, and README file).
---
_Happy Chatbot Building!_

View File

@ -0,0 +1,287 @@
import { GoogleGenerativeAI } from '@google/generative-ai'; // Importing Google Generative AI
import { Injectable } from '@nestjs/common';
import { EventEmitter2 } from '@nestjs/event-emitter';
import { Block } from '@/chat/schemas/block.schema';
import { Context } from '@/chat/schemas/types/context';
import {
OutgoingMessageFormat,
StdOutgoingTextEnvelope,
} from '@/chat/schemas/types/message';
import { BlockService } from '@/chat/services/block.service';
import { CategoryService } from '@/chat/services/category.service';
import { MessageService } from '@/chat/services/message.service';
import { ContentService } from '@/cms/services/content.service';
import { LoggerService } from '@/logger/logger.service';
import { BaseBlockPlugin } from '@/plugins/base-block-plugin';
import { PluginService } from '@/plugins/plugins.service';
import { SettingType } from '@/setting/schemas/types';
@Injectable()
export class GeminiFlowPlugin extends BaseBlockPlugin {
private generativeAI: GoogleGenerativeAI;
constructor(
pluginService: PluginService,
private logger: LoggerService,
private contentService: ContentService,
private messageService: MessageService,
private blockService: BlockService,
private categoryService: CategoryService,
private eventEmitter: EventEmitter2,
) {
super('gemini-flow-plugin', pluginService);
this.settings = [
{
id: 'token',
label: 'Token',
group: 'default',
type: SettingType.secret,
value: '',
},
{
id: 'model',
label: 'Model',
group: 'default',
type: SettingType.text,
value: 'gemini-1.5-flash',
},
{
id: 'temperature',
label: 'Temperature',
group: 'default',
type: SettingType.number,
value: 0.8,
},
{
id: 'maxLength',
label: 'Max Length',
group: 'default',
type: SettingType.number,
value: 4048, // Default value for max length
},
{
id: 'messagesToRetrieve',
label: 'Messages to Retrieve',
group: 'default',
type: SettingType.number,
value: 5, // Default number of messages to retrieve for context
},
{
id: 'context',
label: 'Context',
group: 'default',
type: SettingType.textarea,
value: `help client to generate a JSON respecting the typescript type bellow
type TBlocks =
| {
shortTitle: string;
question: string;
respond: string;
step: number;
action: string;
type: 'text';
}
| {
shortTitle: string;
question: string;
respond: string;
step: number;
action: string;
type: 'multiple-chooses';
options: string[];
};`,
},
{
id: 'instructions',
label: 'Instructions',
group: 'default',
type: SettingType.textarea,
value: `STEPS are ordered from specific topic to public topic.
NO welcome STEP.
only the last STEP need to be of type 'text' containing a confirming of the operation without questions.
flow STEPS number is between 5 and 7.
'multiple-chooses' type max OPTIONS number is 3 or less.
OPTION length don't exceed 20 characters.
OPTIONS need to be informative no actions are allowed.`,
},
];
this.title = 'Gemini Flow Plugin';
this.template = { name: 'Gemini Flow Plugin' };
this.effects = {
onStoreContextData: () => {},
};
}
private async getMessagesContext(context: Context, messagesToRetrieve = 5) {
// Retrieve the last few messages for context
const recentMessages = await this.messageService.findPage(
{
$or: [{ sender: context.user.id }, { recipient: context.user.id }],
},
{ sort: ['createdAt', 'desc'], skip: 0, limit: messagesToRetrieve },
);
const messagesContext = recentMessages
.reverse()
.map((m) => {
const text =
'text' in m.message && m.message.text
? m.message.text
: JSON.stringify(m.message);
return 'sender' in m && m.sender ? `user: ${text}` : `bot: ${text}`;
})
.join('\n');
return messagesContext;
}
async process(block: Block, context: Context, _convId: string) {
const ragContent = await this.contentService.textSearch(context.text);
const args = block.message['args'];
const client = this.getInstance(args.token);
const model = client.getGenerativeModel({
model: args['model'],
generationConfig: {
/*
=====================================================================
Check the documentation for more details on the generation config
https://ai.google.dev/api/generate-content#v1beta.GenerationConfig
=====================================================================
*/
// controls the randomness of the output. Use higher values for more creative responses,
// and lower values for more deterministic responses. Values can range from [0.0, 2.0].
temperature: args['temperature'],
maxOutputTokens: args['maxLength'] || 256, // Use maxLength setting for the response length
},
});
const messagesContext = await this.getMessagesContext(
context,
args['messagesToRetrieve'],
);
const prompt = [
`CONTEXT: ${args.context}`,
`DOCUMENTS:`,
...ragContent.map(
(curr, index) =>
`\tDOCUMENT ${index + 1} \n\t\tTitle: ${curr.title} \n\t\tData: ${curr.rag}`,
),
`RECENT MESSAGES:`,
messagesContext,
`INSTRUCTIONS:`,
args.instructions,
`QUESTION:`,
context.text,
].join('\n');
this.logger.debug('Gemini: Prompt', prompt);
const result = await model.generateContent(prompt);
const textResponse = result.response.text();
let envelope: StdOutgoingTextEnvelope = {
format: OutgoingMessageFormat.text,
message: {
text: 'This case is not support !',
},
};
const hasJSONResponse = /\[[^]+\]/.test(textResponse);
const hasHotKeys = ['generate', 'flow'].every((key) =>
context.text.includes(key),
);
if (hasJSONResponse && hasHotKeys) {
const [generatedStepsText] = textResponse.match(/\[[^]+\]/);
//TODO cover exception
const generatedStepsTextParsed: any[] = JSON.parse(generatedStepsText);
const { id: aiGeneratedFlowId } =
await this.categoryService.findOneOrCreate(
{ label: 'AI Flow' },
{ label: 'AI Flow', zoom: 40, offset: [90, 140] },
);
await this.blockService.deleteMany({ category: aiGeneratedFlowId });
const blocks = (await Promise.all(
generatedStepsTextParsed.map(async (generatedStepTextParsed, index) => {
return await this.blockService.create({
patterns: [index === 0 ? 'yes' : generatedStepTextParsed.question],
options: {
typing: 0,
fallback: { active: false, max_attempts: 1, message: [] },
effects: [],
},
message: (generatedStepTextParsed.type === 'multiple-chooses' &&
generatedStepTextParsed.options
? {
quickReplies: (generatedStepTextParsed.options as any[]).map(
(option) => ({
content_type: 'text',
title: String(option),
payload: String(option),
}),
),
text: generatedStepTextParsed.question,
}
: [generatedStepTextParsed.respond]) as any,
starts_conversation: index === 0,
category: aiGeneratedFlowId,
name: String(generatedStepTextParsed.shortTitle),
position: { x: 430 * index, y: 20 * index },
});
}),
)) as unknown as Block[];
//link blocks
blocks.map((block, index) =>
this.blockService.updateOne(
{ _id: block.id },
{
patterns:
index == 0
? block.patterns
: blocks[index - 1].message['quickReplies'].map(
(quickReply) => quickReply.payload,
),
nextBlocks: blocks[index + 1] && [blocks[index + 1].id],
},
),
);
this.eventEmitter.emit('hook:gemini:flowGenerated', ['navigateToAiFlow']);
envelope = {
...envelope,
message: {
...envelope.message,
text: 'Do you want my assistance ?',
},
};
}
// console.log(JSON.parse(result.response.text()));
return envelope;
}
private getInstance(token: string) {
if (this.generativeAI) {
return this.generativeAI;
}
try {
this.generativeAI = new GoogleGenerativeAI(token);
return this.generativeAI;
} catch (err) {
this.logger.warn('Gemini: Unable to instantiate GoogleGenerativeAI', err);
}
}
}

View File

@ -0,0 +1,10 @@
declare module '@nestjs/event-emitter' {
interface IHookSettingsGroupLabelOperationMap {
gemini: TDefinition<
object,
{
flowGenerated: string[];
}
>;
}
}

View File

@ -0,0 +1,10 @@
{
"name": "hexabot-channel-messenger",
"version": "2.0.0",
"description": "The Google Gemini Plugin for Hexabot Chatbot / Agent Builder to enable the LLM RAG Capability",
"dependencies": {
"@google/generative-ai": "^0.19.0"
},
"author": "Hexastack",
"license": "AGPL-3.0-only"
}

View File

@ -62,6 +62,13 @@ export class WebsocketGateway
});
}
broadcastTextToActions(actions: string[]): void {
this.io.to(Room.SUBSCRIBER).emit('actions', {
op: 'textToActions',
msg: actions,
});
}
broadcastMessageReceived(
message: MessageFull,
subscriber: Subscriber | SubscriberFull,