mirror of
https://github.com/hexastack/hexabot
synced 2025-06-26 18:27:28 +00:00
Merge pull request #842 from Hexastack/839-nlu-values---display-samples-count-per-value
feat: support the display samples count per value
This commit is contained in:
commit
aed32e8a21
@ -10,17 +10,14 @@ import { BadRequestException, NotFoundException } from '@nestjs/common';
|
|||||||
import { MongooseModule } from '@nestjs/mongoose';
|
import { MongooseModule } from '@nestjs/mongoose';
|
||||||
|
|
||||||
import { getUpdateOneError } from '@/utils/test/errors/messages';
|
import { getUpdateOneError } from '@/utils/test/errors/messages';
|
||||||
import { nlpEntityFixtures } from '@/utils/test/fixtures/nlpentity';
|
|
||||||
import {
|
import {
|
||||||
installNlpValueFixtures,
|
installNlpValueFixtures,
|
||||||
nlpValueFixtures,
|
nlpValueFixtures,
|
||||||
} from '@/utils/test/fixtures/nlpvalue';
|
} from '@/utils/test/fixtures/nlpvalue';
|
||||||
import { getPageQuery } from '@/utils/test/pagination';
|
|
||||||
import {
|
import {
|
||||||
closeInMongodConnection,
|
closeInMongodConnection,
|
||||||
rootMongooseTestModule,
|
rootMongooseTestModule,
|
||||||
} from '@/utils/test/test';
|
} from '@/utils/test/test';
|
||||||
import { TFixtures } from '@/utils/test/types';
|
|
||||||
import { buildTestingMocks } from '@/utils/test/utils';
|
import { buildTestingMocks } from '@/utils/test/utils';
|
||||||
|
|
||||||
import { NlpValueCreateDto } from '../dto/nlp-value.dto';
|
import { NlpValueCreateDto } from '../dto/nlp-value.dto';
|
||||||
@ -29,11 +26,7 @@ import { NlpSampleEntityRepository } from '../repositories/nlp-sample-entity.rep
|
|||||||
import { NlpValueRepository } from '../repositories/nlp-value.repository';
|
import { NlpValueRepository } from '../repositories/nlp-value.repository';
|
||||||
import { NlpEntityModel } from '../schemas/nlp-entity.schema';
|
import { NlpEntityModel } from '../schemas/nlp-entity.schema';
|
||||||
import { NlpSampleEntityModel } from '../schemas/nlp-sample-entity.schema';
|
import { NlpSampleEntityModel } from '../schemas/nlp-sample-entity.schema';
|
||||||
import {
|
import { NlpValue, NlpValueModel } from '../schemas/nlp-value.schema';
|
||||||
NlpValue,
|
|
||||||
NlpValueFull,
|
|
||||||
NlpValueModel,
|
|
||||||
} from '../schemas/nlp-value.schema';
|
|
||||||
import { NlpEntityService } from '../services/nlp-entity.service';
|
import { NlpEntityService } from '../services/nlp-entity.service';
|
||||||
import { NlpValueService } from '../services/nlp-value.service';
|
import { NlpValueService } from '../services/nlp-value.service';
|
||||||
|
|
||||||
@ -80,63 +73,6 @@ describe('NlpValueController', () => {
|
|||||||
|
|
||||||
afterEach(jest.clearAllMocks);
|
afterEach(jest.clearAllMocks);
|
||||||
|
|
||||||
describe('findPage', () => {
|
|
||||||
it('should find nlp Values, and foreach nlp value populate the corresponding entity', async () => {
|
|
||||||
const pageQuery = getPageQuery<NlpValue>({
|
|
||||||
sort: ['value', 'desc'],
|
|
||||||
});
|
|
||||||
const result = await nlpValueController.findPage(
|
|
||||||
pageQuery,
|
|
||||||
['entity'],
|
|
||||||
{},
|
|
||||||
);
|
|
||||||
|
|
||||||
const nlpValueFixturesWithEntities = nlpValueFixtures.reduce(
|
|
||||||
(acc, curr) => {
|
|
||||||
acc.push({
|
|
||||||
...curr,
|
|
||||||
entity: nlpEntityFixtures[
|
|
||||||
parseInt(curr.entity!)
|
|
||||||
] as NlpValueFull['entity'],
|
|
||||||
builtin: curr.builtin!,
|
|
||||||
expressions: curr.expressions!,
|
|
||||||
metadata: curr.metadata!,
|
|
||||||
});
|
|
||||||
return acc;
|
|
||||||
},
|
|
||||||
[] as TFixtures<NlpValueFull>[],
|
|
||||||
);
|
|
||||||
expect(result).toEqualPayload(nlpValueFixturesWithEntities);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should find nlp Values', async () => {
|
|
||||||
const pageQuery = getPageQuery<NlpValue>({
|
|
||||||
sort: ['value', 'desc'],
|
|
||||||
});
|
|
||||||
const result = await nlpValueController.findPage(
|
|
||||||
pageQuery,
|
|
||||||
['invalidCriteria'],
|
|
||||||
{},
|
|
||||||
);
|
|
||||||
const nlpEntities = await nlpEntityService.findAll();
|
|
||||||
const nlpValueFixturesWithEntities = nlpValueFixtures.reduce(
|
|
||||||
(acc, curr) => {
|
|
||||||
const ValueWithEntities = {
|
|
||||||
...curr,
|
|
||||||
entity: curr.entity ? nlpEntities[parseInt(curr.entity!)].id : null,
|
|
||||||
expressions: curr.expressions!,
|
|
||||||
metadata: curr.metadata!,
|
|
||||||
builtin: curr.builtin!,
|
|
||||||
};
|
|
||||||
acc.push(ValueWithEntities);
|
|
||||||
return acc;
|
|
||||||
},
|
|
||||||
[] as TFixtures<NlpValueCreateDto>[],
|
|
||||||
);
|
|
||||||
expect(result).toEqualPayload(nlpValueFixturesWithEntities);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('count', () => {
|
describe('count', () => {
|
||||||
it('should count the nlp Values', async () => {
|
it('should count the nlp Values', async () => {
|
||||||
const result = await nlpValueController.filterCount();
|
const result = await nlpValueController.filterCount();
|
||||||
|
@ -30,6 +30,7 @@ import { PageQueryPipe } from '@/utils/pagination/pagination-query.pipe';
|
|||||||
import { PopulatePipe } from '@/utils/pipes/populate.pipe';
|
import { PopulatePipe } from '@/utils/pipes/populate.pipe';
|
||||||
import { SearchFilterPipe } from '@/utils/pipes/search-filter.pipe';
|
import { SearchFilterPipe } from '@/utils/pipes/search-filter.pipe';
|
||||||
import { TFilterQuery } from '@/utils/types/filter.types';
|
import { TFilterQuery } from '@/utils/types/filter.types';
|
||||||
|
import { Format } from '@/utils/types/format.types';
|
||||||
|
|
||||||
import { NlpValueCreateDto, NlpValueUpdateDto } from '../dto/nlp-value.dto';
|
import { NlpValueCreateDto, NlpValueUpdateDto } from '../dto/nlp-value.dto';
|
||||||
import {
|
import {
|
||||||
@ -126,7 +127,7 @@ export class NlpValueController extends BaseController<
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Retrieves a paginated list of NLP values.
|
* Retrieves a paginated list of NLP values with NLP Samples count.
|
||||||
*
|
*
|
||||||
* Supports filtering, pagination, and optional population of related entities.
|
* Supports filtering, pagination, and optional population of related entities.
|
||||||
*
|
*
|
||||||
@ -134,10 +135,10 @@ export class NlpValueController extends BaseController<
|
|||||||
* @param populate - An array of related entities to populate.
|
* @param populate - An array of related entities to populate.
|
||||||
* @param filters - Filters to apply when retrieving the NLP values.
|
* @param filters - Filters to apply when retrieving the NLP values.
|
||||||
*
|
*
|
||||||
* @returns A promise resolving to a paginated list of NLP values.
|
* @returns A promise resolving to a paginated list of NLP values with NLP Samples count.
|
||||||
*/
|
*/
|
||||||
@Get()
|
@Get()
|
||||||
async findPage(
|
async findWithCount(
|
||||||
@Query(PageQueryPipe) pageQuery: PageQueryDto<NlpValue>,
|
@Query(PageQueryPipe) pageQuery: PageQueryDto<NlpValue>,
|
||||||
@Query(PopulatePipe) populate: string[],
|
@Query(PopulatePipe) populate: string[],
|
||||||
@Query(
|
@Query(
|
||||||
@ -147,9 +148,11 @@ export class NlpValueController extends BaseController<
|
|||||||
)
|
)
|
||||||
filters: TFilterQuery<NlpValue>,
|
filters: TFilterQuery<NlpValue>,
|
||||||
) {
|
) {
|
||||||
return this.canPopulate(populate)
|
return await this.nlpValueService.findWithCount(
|
||||||
? await this.nlpValueService.findAndPopulate(filters, pageQuery)
|
this.canPopulate(populate) ? Format.FULL : Format.STUB,
|
||||||
: await this.nlpValueService.find(filters, pageQuery);
|
pageQuery,
|
||||||
|
filters,
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -8,10 +8,20 @@
|
|||||||
|
|
||||||
import { Injectable } from '@nestjs/common';
|
import { Injectable } from '@nestjs/common';
|
||||||
import { InjectModel } from '@nestjs/mongoose';
|
import { InjectModel } from '@nestjs/mongoose';
|
||||||
import { Document, Model, Query } from 'mongoose';
|
import { plainToInstance } from 'class-transformer';
|
||||||
|
import {
|
||||||
|
Document,
|
||||||
|
Model,
|
||||||
|
PipelineStage,
|
||||||
|
Query,
|
||||||
|
SortOrder,
|
||||||
|
Types,
|
||||||
|
} from 'mongoose';
|
||||||
|
|
||||||
import { BaseRepository, DeleteResult } from '@/utils/generics/base-repository';
|
import { BaseRepository, DeleteResult } from '@/utils/generics/base-repository';
|
||||||
|
import { PageQueryDto } from '@/utils/pagination/pagination-query.dto';
|
||||||
import { TFilterQuery } from '@/utils/types/filter.types';
|
import { TFilterQuery } from '@/utils/types/filter.types';
|
||||||
|
import { Format } from '@/utils/types/format.types';
|
||||||
|
|
||||||
import { NlpValueDto } from '../dto/nlp-value.dto';
|
import { NlpValueDto } from '../dto/nlp-value.dto';
|
||||||
import {
|
import {
|
||||||
@ -19,7 +29,10 @@ import {
|
|||||||
NlpValue,
|
NlpValue,
|
||||||
NlpValueDocument,
|
NlpValueDocument,
|
||||||
NlpValueFull,
|
NlpValueFull,
|
||||||
|
NlpValueFullWithCount,
|
||||||
NlpValuePopulate,
|
NlpValuePopulate,
|
||||||
|
NlpValueWithCount,
|
||||||
|
TNlpValueCount,
|
||||||
} from '../schemas/nlp-value.schema';
|
} from '../schemas/nlp-value.schema';
|
||||||
|
|
||||||
import { NlpSampleEntityRepository } from './nlp-sample-entity.repository';
|
import { NlpSampleEntityRepository } from './nlp-sample-entity.repository';
|
||||||
@ -106,4 +119,139 @@ export class NlpValueRepository extends BaseRepository<
|
|||||||
throw new Error('Attempted to delete a NLP value using unknown criteria');
|
throw new Error('Attempted to delete a NLP value using unknown criteria');
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private getSortDirection(sortOrder: SortOrder) {
|
||||||
|
return typeof sortOrder === 'number'
|
||||||
|
? sortOrder
|
||||||
|
: sortOrder.toString().toLowerCase() === 'desc'
|
||||||
|
? -1
|
||||||
|
: 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Performs an aggregation to retrieve NLP values with their sample counts.
|
||||||
|
*
|
||||||
|
* @param format - The format can be full or stub
|
||||||
|
* @param pageQuery - The pagination parameters
|
||||||
|
* @param filterQuery - The filter criteria
|
||||||
|
* @returns Aggregated Nlp Value results with sample counts
|
||||||
|
*/
|
||||||
|
private async aggregateWithCount<F extends Format>(
|
||||||
|
format: F,
|
||||||
|
{
|
||||||
|
limit = 10,
|
||||||
|
skip = 0,
|
||||||
|
sort = ['createdAt', 'desc'],
|
||||||
|
}: PageQueryDto<NlpValue>,
|
||||||
|
{ $and = [], ...rest }: TFilterQuery<NlpValue>,
|
||||||
|
): Promise<TNlpValueCount<F>[]> {
|
||||||
|
const pipeline: PipelineStage[] = [
|
||||||
|
{
|
||||||
|
$match: {
|
||||||
|
...rest,
|
||||||
|
...($and.length
|
||||||
|
? {
|
||||||
|
$and: $and.map(({ entity, ...rest }) => ({
|
||||||
|
...rest,
|
||||||
|
...(entity
|
||||||
|
? { entity: new Types.ObjectId(String(entity)) }
|
||||||
|
: {}),
|
||||||
|
})),
|
||||||
|
}
|
||||||
|
: {}),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
$skip: skip,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
$limit: limit,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
$lookup: {
|
||||||
|
from: 'nlpsampleentities',
|
||||||
|
localField: '_id',
|
||||||
|
foreignField: 'value',
|
||||||
|
as: '_sampleEntities',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
$unwind: {
|
||||||
|
path: '$_sampleEntities',
|
||||||
|
preserveNullAndEmptyArrays: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
$group: {
|
||||||
|
_id: '$_id',
|
||||||
|
_originalDoc: {
|
||||||
|
$first: {
|
||||||
|
$unsetField: { input: '$$ROOT', field: 'nlpSamplesCount' },
|
||||||
|
},
|
||||||
|
},
|
||||||
|
nlpSamplesCount: {
|
||||||
|
$sum: { $cond: [{ $ifNull: ['$_sampleEntities', false] }, 1, 0] },
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
$replaceWith: {
|
||||||
|
$mergeObjects: [
|
||||||
|
'$_originalDoc',
|
||||||
|
{ nlpSamplesCount: '$nlpSamplesCount' },
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
...(format === Format.FULL
|
||||||
|
? [
|
||||||
|
{
|
||||||
|
$lookup: {
|
||||||
|
from: 'nlpentities',
|
||||||
|
localField: 'entity',
|
||||||
|
foreignField: '_id',
|
||||||
|
as: 'entity',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
$unwind: '$entity',
|
||||||
|
},
|
||||||
|
]
|
||||||
|
: []),
|
||||||
|
{
|
||||||
|
$sort: {
|
||||||
|
[sort[0]]: this.getSortDirection(sort[1]),
|
||||||
|
_id: this.getSortDirection(sort[1]),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
return await this.model.aggregate<TNlpValueCount<F>>(pipeline).exec();
|
||||||
|
}
|
||||||
|
|
||||||
|
async findWithCount<F extends Format>(
|
||||||
|
format: F,
|
||||||
|
pageQuery: PageQueryDto<NlpValue>,
|
||||||
|
filterQuery: TFilterQuery<NlpValue>,
|
||||||
|
): Promise<TNlpValueCount<F>[]> {
|
||||||
|
try {
|
||||||
|
const aggregatedResults = await this.aggregateWithCount(
|
||||||
|
format,
|
||||||
|
pageQuery,
|
||||||
|
filterQuery,
|
||||||
|
);
|
||||||
|
|
||||||
|
if (format === Format.FULL) {
|
||||||
|
return plainToInstance(NlpValueFullWithCount, aggregatedResults, {
|
||||||
|
excludePrefixes: ['_'],
|
||||||
|
}) as TNlpValueCount<F>[];
|
||||||
|
}
|
||||||
|
|
||||||
|
return plainToInstance(NlpValueWithCount, aggregatedResults, {
|
||||||
|
excludePrefixes: ['_'],
|
||||||
|
}) as TNlpValueCount<F>[];
|
||||||
|
} catch (error) {
|
||||||
|
this.logger.error(`Error in findWithCount: ${error.message}`, error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -16,6 +16,7 @@ import {
|
|||||||
TFilterPopulateFields,
|
TFilterPopulateFields,
|
||||||
THydratedDocument,
|
THydratedDocument,
|
||||||
} from '@/utils/types/filter.types';
|
} from '@/utils/types/filter.types';
|
||||||
|
import { TStubOrFull } from '@/utils/types/format.types';
|
||||||
|
|
||||||
import { NlpEntity, NlpEntityFull } from './nlp-entity.schema';
|
import { NlpEntity, NlpEntityFull } from './nlp-entity.schema';
|
||||||
import { NlpValueMap } from './types';
|
import { NlpValueMap } from './types';
|
||||||
@ -106,6 +107,14 @@ export class NlpValueFull extends NlpValueStub {
|
|||||||
entity: NlpEntity;
|
entity: NlpEntity;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export class NlpValueWithCount extends NlpValue {
|
||||||
|
nlpSamplesCount: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export class NlpValueFullWithCount extends NlpValueFull {
|
||||||
|
nlpSamplesCount: number;
|
||||||
|
}
|
||||||
|
|
||||||
export type NlpValueDocument = THydratedDocument<NlpValue>;
|
export type NlpValueDocument = THydratedDocument<NlpValue>;
|
||||||
|
|
||||||
export const NlpValueModel: ModelDefinition = LifecycleHookManager.attach({
|
export const NlpValueModel: ModelDefinition = LifecycleHookManager.attach({
|
||||||
@ -121,3 +130,9 @@ export type NlpValuePopulate = keyof TFilterPopulateFields<
|
|||||||
>;
|
>;
|
||||||
|
|
||||||
export const NLP_VALUE_POPULATE: NlpValuePopulate[] = ['entity'];
|
export const NLP_VALUE_POPULATE: NlpValuePopulate[] = ['entity'];
|
||||||
|
|
||||||
|
export type TNlpValueCount<T> = TStubOrFull<
|
||||||
|
T,
|
||||||
|
NlpValueWithCount,
|
||||||
|
NlpValueFullWithCount
|
||||||
|
>;
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
/*
|
/*
|
||||||
* Copyright © 2024 Hexastack. All rights reserved.
|
* Copyright © 2025 Hexastack. All rights reserved.
|
||||||
*
|
*
|
||||||
* Licensed under the GNU Affero General Public License v3.0 (AGPLv3) with the following additional terms:
|
* Licensed under the GNU Affero General Public License v3.0 (AGPLv3) with the following additional terms:
|
||||||
* 1. The name "Hexabot" is a trademark of Hexastack. You may not use this name in derivative works without express written permission.
|
* 1. The name "Hexabot" is a trademark of Hexastack. You may not use this name in derivative works without express written permission.
|
||||||
@ -10,6 +10,9 @@ import { forwardRef, Inject, Injectable } from '@nestjs/common';
|
|||||||
|
|
||||||
import { DeleteResult } from '@/utils/generics/base-repository';
|
import { DeleteResult } from '@/utils/generics/base-repository';
|
||||||
import { BaseService } from '@/utils/generics/base-service';
|
import { BaseService } from '@/utils/generics/base-service';
|
||||||
|
import { PageQueryDto } from '@/utils/pagination/pagination-query.dto';
|
||||||
|
import { TFilterQuery } from '@/utils/types/filter.types';
|
||||||
|
import { Format } from '@/utils/types/format.types';
|
||||||
|
|
||||||
import { NlpValueCreateDto, NlpValueDto } from '../dto/nlp-value.dto';
|
import { NlpValueCreateDto, NlpValueDto } from '../dto/nlp-value.dto';
|
||||||
import { NlpValueRepository } from '../repositories/nlp-value.repository';
|
import { NlpValueRepository } from '../repositories/nlp-value.repository';
|
||||||
@ -18,6 +21,7 @@ import {
|
|||||||
NlpValue,
|
NlpValue,
|
||||||
NlpValueFull,
|
NlpValueFull,
|
||||||
NlpValuePopulate,
|
NlpValuePopulate,
|
||||||
|
TNlpValueCount,
|
||||||
} from '../schemas/nlp-value.schema';
|
} from '../schemas/nlp-value.schema';
|
||||||
import { NlpSampleEntityValue } from '../schemas/types';
|
import { NlpSampleEntityValue } from '../schemas/types';
|
||||||
|
|
||||||
@ -218,4 +222,12 @@ export class NlpValueService extends BaseService<
|
|||||||
});
|
});
|
||||||
return Promise.all(promises);
|
return Promise.all(promises);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async findWithCount<F extends Format>(
|
||||||
|
format: F,
|
||||||
|
pageQuery: PageQueryDto<NlpValue>,
|
||||||
|
filters: TFilterQuery<NlpValue>,
|
||||||
|
): Promise<TNlpValueCount<F>[]> {
|
||||||
|
return await this.repository.findWithCount(format, pageQuery, filters);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
18
api/src/utils/types/format.types.ts
Normal file
18
api/src/utils/types/format.types.ts
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
/*
|
||||||
|
* Copyright © 2025 Hexastack. All rights reserved.
|
||||||
|
*
|
||||||
|
* Licensed under the GNU Affero General Public License v3.0 (AGPLv3) with the following additional terms:
|
||||||
|
* 1. The name "Hexabot" is a trademark of Hexastack. You may not use this name in derivative works without express written permission.
|
||||||
|
* 2. All derivative works must include clear attribution to the original creator and software, Hexastack and Hexabot, in a prominent location (e.g., in the software's "About" section, documentation, and README file).
|
||||||
|
*/
|
||||||
|
|
||||||
|
export enum Format {
|
||||||
|
NONE = 0,
|
||||||
|
STUB = 1,
|
||||||
|
BASIC = 2,
|
||||||
|
FULL = 3,
|
||||||
|
}
|
||||||
|
|
||||||
|
export type TStubOrFull<TF, TStub, TFull> = TF extends Format.STUB
|
||||||
|
? TStub
|
||||||
|
: TFull;
|
@ -334,6 +334,7 @@
|
|||||||
"nlp": "NLU",
|
"nlp": "NLU",
|
||||||
"nlp_entity": "Entity",
|
"nlp_entity": "Entity",
|
||||||
"nlp_entity_value": "Value",
|
"nlp_entity_value": "Value",
|
||||||
|
"nlp_samples_count": "Samples count",
|
||||||
"value": "Value",
|
"value": "Value",
|
||||||
"synonyms": "Synonyms",
|
"synonyms": "Synonyms",
|
||||||
"lookups": "Lookups",
|
"lookups": "Lookups",
|
||||||
|
@ -334,6 +334,7 @@
|
|||||||
"nlp": "NLU",
|
"nlp": "NLU",
|
||||||
"nlp_entity": "Entité NLU",
|
"nlp_entity": "Entité NLU",
|
||||||
"nlp_entity_value": "Valeur NLU",
|
"nlp_entity_value": "Valeur NLU",
|
||||||
|
"nlp_samples_count": "Nombre des échantillons",
|
||||||
"value": "Valeur",
|
"value": "Valeur",
|
||||||
"lookups": "Stratégies",
|
"lookups": "Stratégies",
|
||||||
"lookup_strategies": "Stratégie de recherche",
|
"lookup_strategies": "Stratégie de recherche",
|
||||||
|
@ -55,10 +55,10 @@ export const NlpValues = ({ entityId }: { entityId: string }) => {
|
|||||||
const canHaveSynonyms = nlpEntity?.lookups?.[0] === NlpLookups.keywords;
|
const canHaveSynonyms = nlpEntity?.lookups?.[0] === NlpLookups.keywords;
|
||||||
const { onSearch, searchPayload } = useSearch<INlpValue>({
|
const { onSearch, searchPayload } = useSearch<INlpValue>({
|
||||||
$eq: [{ entity: entityId }],
|
$eq: [{ entity: entityId }],
|
||||||
$or: ["doc", "value"]
|
$or: ["doc", "value"],
|
||||||
});
|
});
|
||||||
const { dataGridProps } = useFind(
|
const { dataGridProps } = useFind(
|
||||||
{ entity: EntityType.NLP_VALUE },
|
{ entity: EntityType.NLP_VALUE, format: Format.FULL },
|
||||||
{
|
{
|
||||||
params: searchPayload,
|
params: searchPayload,
|
||||||
},
|
},
|
||||||
@ -103,7 +103,7 @@ export const NlpValues = ({ entityId }: { entityId: string }) => {
|
|||||||
],
|
],
|
||||||
t("label.operations"),
|
t("label.operations"),
|
||||||
);
|
);
|
||||||
const synonymsColumn = {
|
const synonymsColumn = {
|
||||||
flex: 3,
|
flex: 3,
|
||||||
field: "synonyms",
|
field: "synonyms",
|
||||||
headerName: t("label.synonyms"),
|
headerName: t("label.synonyms"),
|
||||||
@ -125,6 +125,24 @@ export const NlpValues = ({ entityId }: { entityId: string }) => {
|
|||||||
disableColumnMenu: true,
|
disableColumnMenu: true,
|
||||||
renderHeader,
|
renderHeader,
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
flex: 2,
|
||||||
|
field: "nlpSamplesCount",
|
||||||
|
align: "center",
|
||||||
|
headerName: t("label.nlp_samples_count"),
|
||||||
|
sortable: true,
|
||||||
|
disableColumnMenu: true,
|
||||||
|
headerAlign: "center",
|
||||||
|
renderHeader,
|
||||||
|
renderCell: ({ row }) => (
|
||||||
|
<Chip
|
||||||
|
sx={{ alignContent: "center" }}
|
||||||
|
id={row.id}
|
||||||
|
label={row.nlpSamplesCount}
|
||||||
|
variant="inbox"
|
||||||
|
/>
|
||||||
|
),
|
||||||
|
},
|
||||||
{
|
{
|
||||||
flex: 3,
|
flex: 3,
|
||||||
field: "doc",
|
field: "doc",
|
||||||
|
@ -19,6 +19,7 @@ export interface INlpValueAttributes {
|
|||||||
expressions?: string[];
|
expressions?: string[];
|
||||||
metadata?: Record<string, any>;
|
metadata?: Record<string, any>;
|
||||||
builtin?: boolean;
|
builtin?: boolean;
|
||||||
|
nlpSamplesCount?: number;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface INlpValueStub extends IBaseSchema, INlpValueAttributes {}
|
export interface INlpValueStub extends IBaseSchema, INlpValueAttributes {}
|
||||||
|
Loading…
Reference in New Issue
Block a user