mirror of
https://github.com/hexastack/hexabot
synced 2025-06-26 18:27:28 +00:00
feat: add support to filter samples by entities/values
This commit is contained in:
parent
e8bf38440a
commit
ad684676a7
@ -29,17 +29,21 @@ import {
|
||||
import { FileInterceptor } from '@nestjs/platform-express';
|
||||
import { CsrfCheck } from '@tekuconcept/nestjs-csrf';
|
||||
import { Response } from 'express';
|
||||
import { z } from 'zod';
|
||||
|
||||
import { NlpPattern, nlpPatternSchema } from '@/chat/schemas/types/pattern';
|
||||
import { HelperService } from '@/helper/helper.service';
|
||||
import { HelperType } from '@/helper/types';
|
||||
import { LanguageService } from '@/i18n/services/language.service';
|
||||
import { CsrfInterceptor } from '@/interceptors/csrf.interceptor';
|
||||
import { Roles } from '@/utils/decorators/roles.decorator';
|
||||
import { BaseController } from '@/utils/generics/base-controller';
|
||||
import { DeleteResult } from '@/utils/generics/base-repository';
|
||||
import { PageQueryDto } from '@/utils/pagination/pagination-query.dto';
|
||||
import { PageQueryPipe } from '@/utils/pagination/pagination-query.pipe';
|
||||
import { PopulatePipe } from '@/utils/pipes/populate.pipe';
|
||||
import { SearchFilterPipe } from '@/utils/pipes/search-filter.pipe';
|
||||
import { ZodQueryParamPipe } from '@/utils/pipes/zod.pipe';
|
||||
import { TFilterQuery } from '@/utils/types/filter.types';
|
||||
|
||||
import { NlpSampleDto, TNlpSampleDto } from '../dto/nlp-sample.dto';
|
||||
@ -177,6 +181,7 @@ export class NlpSampleController extends BaseController<
|
||||
*
|
||||
* @returns The count of samples that match the filters.
|
||||
*/
|
||||
@Roles('public')
|
||||
@Get('count')
|
||||
async filterCount(
|
||||
@Query(
|
||||
@ -184,8 +189,18 @@ export class NlpSampleController extends BaseController<
|
||||
allowedFields: ['text', 'type', 'language'],
|
||||
}),
|
||||
)
|
||||
filters?: TFilterQuery<NlpSample>,
|
||||
filters: TFilterQuery<NlpSample> = {},
|
||||
@Query(
|
||||
new ZodQueryParamPipe(
|
||||
z.array(nlpPatternSchema),
|
||||
(q) => q?.where?.patterns,
|
||||
),
|
||||
)
|
||||
patterns: NlpPattern[] = [],
|
||||
) {
|
||||
if (patterns.length) {
|
||||
return await this.nlpSampleService.countByPatterns({ filters, patterns });
|
||||
}
|
||||
return await this.count(filters);
|
||||
}
|
||||
|
||||
@ -276,6 +291,7 @@ export class NlpSampleController extends BaseController<
|
||||
* @returns A paginated list of NLP samples.
|
||||
*/
|
||||
@Get()
|
||||
@Roles('public')
|
||||
async findPage(
|
||||
@Query(PageQueryPipe) pageQuery: PageQueryDto<NlpSample>,
|
||||
@Query(PopulatePipe) populate: string[],
|
||||
@ -285,7 +301,25 @@ export class NlpSampleController extends BaseController<
|
||||
}),
|
||||
)
|
||||
filters: TFilterQuery<NlpSample>,
|
||||
@Query(
|
||||
new ZodQueryParamPipe(
|
||||
z.array(nlpPatternSchema),
|
||||
(q) => q?.where?.patterns,
|
||||
),
|
||||
)
|
||||
patterns: NlpPattern[] = [],
|
||||
) {
|
||||
if (patterns.length) {
|
||||
return this.canPopulate(populate)
|
||||
? await this.nlpSampleService.findByPatternsAndPopulate(
|
||||
{ filters, patterns },
|
||||
pageQuery,
|
||||
)
|
||||
: await this.nlpSampleService.findByPatterns(
|
||||
{ filters, patterns },
|
||||
pageQuery,
|
||||
);
|
||||
}
|
||||
return this.canPopulate(populate)
|
||||
? await this.nlpSampleService.findAndPopulate(filters, pageQuery)
|
||||
: await this.nlpSampleService.find(filters, pageQuery);
|
||||
|
@ -8,15 +8,27 @@
|
||||
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { InjectModel } from '@nestjs/mongoose';
|
||||
import { Document, Model, Query } from 'mongoose';
|
||||
import { plainToClass } from 'class-transformer';
|
||||
import {
|
||||
Aggregate,
|
||||
Document,
|
||||
Model,
|
||||
PipelineStage,
|
||||
ProjectionType,
|
||||
Query,
|
||||
Types,
|
||||
} from 'mongoose';
|
||||
|
||||
import { BaseRepository, DeleteResult } from '@/utils/generics/base-repository';
|
||||
import { PageQueryDto } from '@/utils/pagination/pagination-query.dto';
|
||||
import { TFilterQuery } from '@/utils/types/filter.types';
|
||||
|
||||
import { TNlpSampleDto } from '../dto/nlp-sample.dto';
|
||||
import { NlpSampleEntity } from '../schemas/nlp-sample-entity.schema';
|
||||
import {
|
||||
NLP_SAMPLE_POPULATE,
|
||||
NlpSample,
|
||||
NlpSampleDocument,
|
||||
NlpSampleFull,
|
||||
NlpSamplePopulate,
|
||||
} from '../schemas/nlp-sample.schema';
|
||||
@ -32,11 +44,167 @@ export class NlpSampleRepository extends BaseRepository<
|
||||
> {
|
||||
constructor(
|
||||
@InjectModel(NlpSample.name) readonly model: Model<NlpSample>,
|
||||
@InjectModel(NlpSampleEntity.name)
|
||||
readonly sampleEntityModel: Model<NlpSampleEntity>,
|
||||
private readonly nlpSampleEntityRepository: NlpSampleEntityRepository,
|
||||
) {
|
||||
super(model, NlpSample, NLP_SAMPLE_POPULATE, NlpSampleFull);
|
||||
}
|
||||
|
||||
buildFindByEntitiesStages({
|
||||
filters,
|
||||
entityIds,
|
||||
valueIds,
|
||||
}: {
|
||||
filters: TFilterQuery<NlpSample>;
|
||||
entityIds: Types.ObjectId[];
|
||||
valueIds: Types.ObjectId[];
|
||||
}): PipelineStage[] {
|
||||
return [
|
||||
// pick link docs whose entity / value matches a pattern
|
||||
{
|
||||
$match: {
|
||||
...(entityIds.length && { entity: { $in: entityIds } }),
|
||||
...(valueIds.length && { value: { $in: valueIds } }),
|
||||
},
|
||||
},
|
||||
|
||||
// join to the real sample *and* apply sample-side filters early
|
||||
{
|
||||
$lookup: {
|
||||
from: 'nlpsamples',
|
||||
let: { sampleId: '$sample' },
|
||||
pipeline: [
|
||||
{
|
||||
$match: {
|
||||
$expr: { $eq: ['$_id', '$$sampleId'] },
|
||||
...(filters?.$and
|
||||
? {
|
||||
$and: filters.$and?.map((condition) => {
|
||||
if ('language' in condition && condition.language) {
|
||||
return {
|
||||
language: new Types.ObjectId(condition.language),
|
||||
};
|
||||
}
|
||||
return condition;
|
||||
}),
|
||||
}
|
||||
: {}),
|
||||
},
|
||||
},
|
||||
],
|
||||
as: 'sample',
|
||||
},
|
||||
},
|
||||
{ $unwind: '$sample' },
|
||||
];
|
||||
}
|
||||
|
||||
findByEntitiesAggregation(
|
||||
criterias: {
|
||||
filters: TFilterQuery<NlpSample>;
|
||||
entityIds: Types.ObjectId[];
|
||||
valueIds: Types.ObjectId[];
|
||||
},
|
||||
page?: PageQueryDto<NlpSample>,
|
||||
projection?: ProjectionType<NlpSample>,
|
||||
): Aggregate<NlpSampleDocument[]> {
|
||||
return this.sampleEntityModel.aggregate<NlpSampleDocument>([
|
||||
...this.buildFindByEntitiesStages(criterias),
|
||||
|
||||
// promote the sample document
|
||||
{ $replaceRoot: { newRoot: '$sample' } },
|
||||
|
||||
// sort / skip / limit
|
||||
...this.buildPaginationPipelineStages(page),
|
||||
|
||||
// projection
|
||||
...(projection
|
||||
? [
|
||||
{
|
||||
$project:
|
||||
typeof projection === 'string'
|
||||
? { [projection]: 1 }
|
||||
: projection,
|
||||
},
|
||||
]
|
||||
: []),
|
||||
]);
|
||||
}
|
||||
|
||||
async findByEntities(
|
||||
criterias: {
|
||||
filters: TFilterQuery<NlpSample>;
|
||||
entityIds: Types.ObjectId[];
|
||||
valueIds: Types.ObjectId[];
|
||||
},
|
||||
page?: PageQueryDto<NlpSample>,
|
||||
projection?: ProjectionType<NlpSample>,
|
||||
): Promise<NlpSample[]> {
|
||||
const aggregation = this.findByEntitiesAggregation(
|
||||
criterias,
|
||||
page,
|
||||
projection,
|
||||
);
|
||||
|
||||
const resultSet = await aggregation.exec();
|
||||
return resultSet.map((doc) =>
|
||||
plainToClass(NlpSample, doc, this.transformOpts),
|
||||
);
|
||||
}
|
||||
|
||||
async findByEntitiesAndPopulate(
|
||||
criterias: {
|
||||
filters: TFilterQuery<NlpSample>;
|
||||
entityIds: Types.ObjectId[];
|
||||
valueIds: Types.ObjectId[];
|
||||
},
|
||||
page?: PageQueryDto<NlpSample>,
|
||||
projection?: ProjectionType<NlpSample>,
|
||||
): Promise<NlpSampleFull[]> {
|
||||
const aggregation = this.findByEntitiesAggregation(
|
||||
criterias,
|
||||
page,
|
||||
projection,
|
||||
);
|
||||
|
||||
const docs = await aggregation.exec();
|
||||
|
||||
const populatedResultSet = await this.populate(docs);
|
||||
|
||||
return populatedResultSet.map((doc) =>
|
||||
plainToClass(NlpSampleFull, doc, this.transformOpts),
|
||||
);
|
||||
}
|
||||
|
||||
countByEntitiesAggregation(criterias: {
|
||||
filters: TFilterQuery<NlpSample>;
|
||||
entityIds: Types.ObjectId[];
|
||||
valueIds: Types.ObjectId[];
|
||||
}): Aggregate<{ count: number }[]> {
|
||||
return this.sampleEntityModel.aggregate<{ count: number }>([
|
||||
...this.buildFindByEntitiesStages(criterias),
|
||||
|
||||
// Collapse duplicates: one bucket per unique sample
|
||||
{ $group: { _id: '$sample._id' } },
|
||||
|
||||
// Final count
|
||||
{ $count: 'count' },
|
||||
]);
|
||||
}
|
||||
|
||||
async countByEntities(criterias: {
|
||||
filters: TFilterQuery<NlpSample>;
|
||||
entityIds: Types.ObjectId[];
|
||||
valueIds: Types.ObjectId[];
|
||||
}): Promise<{ count: number }> {
|
||||
const aggregation = this.countByEntitiesAggregation(criterias);
|
||||
|
||||
const [result] = await aggregation.exec();
|
||||
|
||||
return { count: result?.count || 0 };
|
||||
}
|
||||
|
||||
/**
|
||||
* Deletes NLP sample entities associated with the provided criteria before deleting the sample itself.
|
||||
*
|
||||
|
@ -10,7 +10,9 @@ import { CACHE_MANAGER } from '@nestjs/cache-manager';
|
||||
import { BadRequestException, Inject, Injectable } from '@nestjs/common';
|
||||
import { OnEvent } from '@nestjs/event-emitter';
|
||||
import { Cache } from 'cache-manager';
|
||||
import { Types } from 'mongoose';
|
||||
|
||||
import { NlpPattern } from '@/chat/schemas/types/pattern';
|
||||
import { NLP_MAP_CACHE_KEY } from '@/utils/constants/cache';
|
||||
import { Cacheable } from '@/utils/decorators/cacheable.decorator';
|
||||
import { BaseService } from '@/utils/generics/base-service';
|
||||
@ -72,6 +74,19 @@ export class NlpEntityService extends BaseService<
|
||||
return await this.repository.updateOne(id, { weight: updatedWeight });
|
||||
}
|
||||
|
||||
async findObjectIdsByPatterns(patterns: NlpPattern[]) {
|
||||
// resolve pattern → ids (kept here because it uses other services)
|
||||
return (
|
||||
await this.find({
|
||||
name: {
|
||||
$in: patterns
|
||||
.filter((p) => p.match === 'entity')
|
||||
.map((p) => p.entity),
|
||||
},
|
||||
})
|
||||
).map((e) => new Types.ObjectId(e.id));
|
||||
}
|
||||
|
||||
/**
|
||||
* Stores new entities based on the sample text and sample entities.
|
||||
* Deletes all values relative to this entity before deleting the entity itself.
|
||||
|
@ -12,14 +12,16 @@ import {
|
||||
NotFoundException,
|
||||
} from '@nestjs/common';
|
||||
import { OnEvent } from '@nestjs/event-emitter';
|
||||
import { Document, Query } from 'mongoose';
|
||||
import { Document, ProjectionType, Query } from 'mongoose';
|
||||
import Papa from 'papaparse';
|
||||
|
||||
import { Message } from '@/chat/schemas/message.schema';
|
||||
import { NlpPattern } from '@/chat/schemas/types/pattern';
|
||||
import { Language } from '@/i18n/schemas/language.schema';
|
||||
import { LanguageService } from '@/i18n/services/language.service';
|
||||
import { DeleteResult } from '@/utils/generics/base-repository';
|
||||
import { BaseService } from '@/utils/generics/base-service';
|
||||
import { PageQueryDto } from '@/utils/pagination/pagination-query.dto';
|
||||
import { TFilterQuery, THydratedDocument } from '@/utils/types/filter.types';
|
||||
|
||||
import { NlpSampleEntityCreateDto } from '../dto/nlp-sample-entity.dto';
|
||||
@ -35,6 +37,7 @@ import { NlpSampleEntityValue, NlpSampleState } from '../schemas/types';
|
||||
|
||||
import { NlpEntityService } from './nlp-entity.service';
|
||||
import { NlpSampleEntityService } from './nlp-sample-entity.service';
|
||||
import { NlpValueService } from './nlp-value.service';
|
||||
|
||||
@Injectable()
|
||||
export class NlpSampleService extends BaseService<
|
||||
@ -47,6 +50,7 @@ export class NlpSampleService extends BaseService<
|
||||
readonly repository: NlpSampleRepository,
|
||||
private readonly nlpSampleEntityService: NlpSampleEntityService,
|
||||
private readonly nlpEntityService: NlpEntityService,
|
||||
private readonly nlpValueService: NlpValueService,
|
||||
private readonly languageService: LanguageService,
|
||||
) {
|
||||
super(repository);
|
||||
@ -279,6 +283,66 @@ export class NlpSampleService extends BaseService<
|
||||
}
|
||||
}
|
||||
|
||||
async findByPatterns(
|
||||
{
|
||||
filters,
|
||||
patterns,
|
||||
}: {
|
||||
filters: TFilterQuery<NlpSample>;
|
||||
patterns: NlpPattern[];
|
||||
},
|
||||
page?: PageQueryDto<NlpSample>,
|
||||
projection?: ProjectionType<NlpSample>,
|
||||
): Promise<NlpSample[]> {
|
||||
return await this.repository.findByEntities(
|
||||
{
|
||||
filters,
|
||||
entityIds:
|
||||
await this.nlpEntityService.findObjectIdsByPatterns(patterns),
|
||||
valueIds: await this.nlpValueService.findObjectIdsByPatterns(patterns),
|
||||
},
|
||||
page,
|
||||
projection,
|
||||
);
|
||||
}
|
||||
|
||||
async findByPatternsAndPopulate(
|
||||
{
|
||||
filters,
|
||||
patterns,
|
||||
}: {
|
||||
filters: TFilterQuery<NlpSample>;
|
||||
patterns: NlpPattern[];
|
||||
},
|
||||
page?: PageQueryDto<NlpSample>,
|
||||
projection?: ProjectionType<NlpSample>,
|
||||
): Promise<NlpSampleFull[]> {
|
||||
return await this.repository.findByEntitiesAndPopulate(
|
||||
{
|
||||
filters,
|
||||
entityIds:
|
||||
await this.nlpEntityService.findObjectIdsByPatterns(patterns),
|
||||
valueIds: await this.nlpValueService.findObjectIdsByPatterns(patterns),
|
||||
},
|
||||
page,
|
||||
projection,
|
||||
);
|
||||
}
|
||||
|
||||
async countByPatterns({
|
||||
filters,
|
||||
patterns,
|
||||
}: {
|
||||
filters: TFilterQuery<NlpSample>;
|
||||
patterns: NlpPattern[];
|
||||
}): Promise<{ count: number }> {
|
||||
return await this.repository.countByEntities({
|
||||
filters,
|
||||
entityIds: await this.nlpEntityService.findObjectIdsByPatterns(patterns),
|
||||
valueIds: await this.nlpValueService.findObjectIdsByPatterns(patterns),
|
||||
});
|
||||
}
|
||||
|
||||
@OnEvent('hook:message:preCreate')
|
||||
async handleNewMessage(doc: THydratedDocument<Message>) {
|
||||
// If message is sent by the user then add it as an inbox sample
|
||||
|
@ -7,7 +7,9 @@
|
||||
*/
|
||||
|
||||
import { forwardRef, Inject, Injectable } from '@nestjs/common';
|
||||
import { Types } from 'mongoose';
|
||||
|
||||
import { NlpPattern } from '@/chat/schemas/types/pattern';
|
||||
import { DeleteResult } from '@/utils/generics/base-repository';
|
||||
import { BaseService } from '@/utils/generics/base-service';
|
||||
import { PageQueryDto } from '@/utils/pagination/pagination-query.dto';
|
||||
@ -42,6 +44,19 @@ export class NlpValueService extends BaseService<
|
||||
super(repository);
|
||||
}
|
||||
|
||||
async findObjectIdsByPatterns(patterns: NlpPattern[]) {
|
||||
// resolve pattern → ids (kept here because it uses other services)
|
||||
return (
|
||||
await this.find({
|
||||
value: {
|
||||
$in: patterns
|
||||
.map((p) => (p.match === 'value' ? p.value : null))
|
||||
.filter(Boolean),
|
||||
},
|
||||
})
|
||||
).map((v) => new Types.ObjectId(v.id));
|
||||
}
|
||||
|
||||
/**
|
||||
* Deletes an NLP value by its ID, cascading any dependent data.
|
||||
*
|
||||
|
@ -19,6 +19,7 @@ import {
|
||||
FlattenMaps,
|
||||
HydratedDocument,
|
||||
Model,
|
||||
PipelineStage,
|
||||
ProjectionType,
|
||||
Query,
|
||||
SortOrder,
|
||||
@ -31,6 +32,7 @@ import { LoggerService } from '@/logger/logger.service';
|
||||
import {
|
||||
TFilterQuery,
|
||||
TFlattenOption,
|
||||
THydratedDocument,
|
||||
TQueryOptions,
|
||||
} from '@/utils/types/filter.types';
|
||||
|
||||
@ -81,9 +83,13 @@ export abstract class BaseRepository<
|
||||
U extends Omit<T, keyof BaseSchema> = Omit<T, keyof BaseSchema>,
|
||||
D = Document<T>,
|
||||
> {
|
||||
private readonly transformOpts = { excludePrefixes: ['_', 'password'] };
|
||||
protected readonly transformOpts = { excludePrefixes: ['_', 'password'] };
|
||||
|
||||
private readonly leanOpts = { virtuals: true, defaults: true, getters: true };
|
||||
protected readonly leanOpts = {
|
||||
virtuals: true,
|
||||
defaults: true,
|
||||
getters: true,
|
||||
};
|
||||
|
||||
@Inject(EventEmitter2)
|
||||
readonly eventEmitter: EventEmitter2;
|
||||
@ -643,4 +649,39 @@ export abstract class BaseRepository<
|
||||
): Promise<void> {
|
||||
// Nothing ...
|
||||
}
|
||||
|
||||
buildPaginationPipelineStages<T>(page?: PageQueryDto<T>): PipelineStage[] {
|
||||
if (!page) return [];
|
||||
|
||||
const stages: PipelineStage[] = [];
|
||||
|
||||
if (page.sort) {
|
||||
const [field, dir] = page.sort;
|
||||
stages.push({
|
||||
$sort: {
|
||||
[field]:
|
||||
typeof dir === 'number'
|
||||
? dir
|
||||
: ['asc', 'ascending'].includes(dir as string)
|
||||
? 1
|
||||
: -1,
|
||||
} as Record<string, 1 | -1>,
|
||||
});
|
||||
}
|
||||
|
||||
if (page.skip) stages.push({ $skip: page.skip });
|
||||
if (page.limit) stages.push({ $limit: page.limit });
|
||||
|
||||
return stages;
|
||||
}
|
||||
|
||||
async populate(docs: THydratedDocument<T>[]) {
|
||||
return await this.model.populate(
|
||||
docs,
|
||||
this.populatePaths.map((path) => ({
|
||||
path,
|
||||
options: { lean: true },
|
||||
})),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright © 2024 Hexastack. All rights reserved.
|
||||
* Copyright © 2025 Hexastack. All rights reserved.
|
||||
*
|
||||
* Licensed under the GNU Affero General Public License v3.0 (AGPLv3) with the following additional terms:
|
||||
* 1. The name "Hexabot" is a trademark of Hexastack. You may not use this name in derivative works without express written permission.
|
||||
@ -17,7 +17,7 @@ import {
|
||||
Typography,
|
||||
useTheme,
|
||||
} from "@mui/material";
|
||||
import Autocomplete from "@mui/material/Autocomplete";
|
||||
import Autocomplete, { AutocompleteProps } from "@mui/material/Autocomplete";
|
||||
import { forwardRef, SyntheticEvent, useRef } from "react";
|
||||
|
||||
import { Input } from "@/app-components/inputs/Input";
|
||||
@ -30,13 +30,24 @@ import { NlpPattern } from "@/types/block.types";
|
||||
import { INlpEntity } from "@/types/nlp-entity.types";
|
||||
import { INlpValue } from "@/types/nlp-value.types";
|
||||
|
||||
type NlpPatternSelectProps = {
|
||||
interface NlpPatternSelectProps
|
||||
extends Omit<
|
||||
AutocompleteProps<INlpEntity, true, true, false>,
|
||||
| "onChange"
|
||||
| "value"
|
||||
| "options"
|
||||
| "multiple"
|
||||
| "disabled"
|
||||
| "renderTags"
|
||||
| "renderOptions"
|
||||
| "renderInput"
|
||||
> {
|
||||
patterns: NlpPattern[];
|
||||
onChange: (patterns: NlpPattern[]) => void;
|
||||
};
|
||||
}
|
||||
|
||||
const NlpPatternSelect = (
|
||||
{ patterns, onChange }: NlpPatternSelectProps,
|
||||
{ patterns, onChange, ...props }: NlpPatternSelectProps,
|
||||
ref,
|
||||
) => {
|
||||
const inputRef = useRef(null);
|
||||
@ -116,8 +127,8 @@ const NlpPatternSelect = (
|
||||
return (
|
||||
<Autocomplete
|
||||
ref={ref}
|
||||
{...props}
|
||||
size="medium"
|
||||
fullWidth={true}
|
||||
disabled={options.length === 0}
|
||||
value={defaultValue}
|
||||
multiple={true}
|
||||
|
@ -32,6 +32,7 @@ import AutoCompleteEntitySelect from "@/app-components/inputs/AutoCompleteEntity
|
||||
import FileUploadButton from "@/app-components/inputs/FileInput";
|
||||
import { FilterTextfield } from "@/app-components/inputs/FilterTextfield";
|
||||
import { Input } from "@/app-components/inputs/Input";
|
||||
import NlpPatternSelect from "@/app-components/inputs/NlpPatternSelect";
|
||||
import {
|
||||
ActionColumnLabel,
|
||||
getActionsColumn,
|
||||
@ -51,6 +52,7 @@ import { useSearch } from "@/hooks/useSearch";
|
||||
import { useToast } from "@/hooks/useToast";
|
||||
import { useTranslate } from "@/hooks/useTranslate";
|
||||
import { EntityType, Format } from "@/services/types";
|
||||
import { NlpPattern } from "@/types/block.types";
|
||||
import { ILanguage } from "@/types/language.types";
|
||||
import {
|
||||
INlpDatasetSample,
|
||||
@ -79,6 +81,7 @@ export default function NlpSample() {
|
||||
const queryClient = useQueryClient();
|
||||
const [type, setType] = useState<NlpSampleType | "all">("all");
|
||||
const [language, setLanguage] = useState<string | undefined>(undefined);
|
||||
const [patterns, setPatterns] = useState<NlpPattern[]>([]);
|
||||
const hasPermission = useHasPermission();
|
||||
const getNlpEntityFromCache = useGetFromCache(EntityType.NLP_ENTITY);
|
||||
const getNlpValueFromCache = useGetFromCache(EntityType.NLP_VALUE);
|
||||
@ -86,11 +89,14 @@ export default function NlpSample() {
|
||||
EntityType.NLP_SAMPLE_ENTITY,
|
||||
);
|
||||
const getLanguageFromCache = useGetFromCache(EntityType.LANGUAGE);
|
||||
const { onSearch, searchPayload, searchText } = useSearch<INlpSample>(
|
||||
const { onSearch, searchPayload, searchText } = useSearch<
|
||||
INlpSample & { patterns: NlpPattern[] }
|
||||
>(
|
||||
{
|
||||
$eq: [
|
||||
...(type !== "all" ? [{ type }] : []),
|
||||
...(language ? [{ language }] : []),
|
||||
...(patterns ? [{ patterns }] : []),
|
||||
],
|
||||
$iLike: ["text"],
|
||||
},
|
||||
@ -425,6 +431,22 @@ export default function NlpSample() {
|
||||
</Button>
|
||||
</ButtonGroup>
|
||||
</Grid>
|
||||
<Grid
|
||||
container
|
||||
display="flex"
|
||||
flexDirection="row"
|
||||
gap={2}
|
||||
direction="row"
|
||||
mt={2}
|
||||
>
|
||||
<NlpPatternSelect
|
||||
patterns={patterns}
|
||||
onChange={(patterns: NlpPattern[]) => {
|
||||
setPatterns(patterns);
|
||||
}}
|
||||
fullWidth={true}
|
||||
/>
|
||||
</Grid>
|
||||
</Grid>
|
||||
|
||||
<Grid mt={3}>
|
||||
|
@ -103,6 +103,7 @@ const PatternInput: FC<PatternInputProps> = ({
|
||||
<NlpPatternSelect
|
||||
patterns={pattern as NlpPattern[]}
|
||||
onChange={setPattern}
|
||||
fullWidth={true}
|
||||
/>
|
||||
)}
|
||||
{["payload", "content", "menu"].includes(patternType) ? (
|
||||
|
Loading…
Reference in New Issue
Block a user