mirror of
https://github.com/hexastack/hexabot
synced 2025-06-26 18:27:28 +00:00
fix: update nlp value aggregate
This commit is contained in:
parent
486def7e75
commit
1eb09ab84e
@ -10,18 +10,14 @@ import { BadRequestException, NotFoundException } from '@nestjs/common';
|
|||||||
import { MongooseModule } from '@nestjs/mongoose';
|
import { MongooseModule } from '@nestjs/mongoose';
|
||||||
|
|
||||||
import { getUpdateOneError } from '@/utils/test/errors/messages';
|
import { getUpdateOneError } from '@/utils/test/errors/messages';
|
||||||
import { nlpEntityFixtures } from '@/utils/test/fixtures/nlpentity';
|
|
||||||
import {
|
import {
|
||||||
installNlpValueFixtures,
|
installNlpValueFixtures,
|
||||||
nlpValueFixtures,
|
nlpValueFixtures,
|
||||||
} from '@/utils/test/fixtures/nlpvalue';
|
} from '@/utils/test/fixtures/nlpvalue';
|
||||||
import { getPageQuery } from '@/utils/test/pagination';
|
|
||||||
import {
|
import {
|
||||||
closeInMongodConnection,
|
closeInMongodConnection,
|
||||||
rootMongooseTestModule,
|
rootMongooseTestModule,
|
||||||
} from '@/utils/test/test';
|
} from '@/utils/test/test';
|
||||||
import { TFixtures } from '@/utils/test/types';
|
|
||||||
import { buildTestingMocks } from '@/utils/test/utils';
|
|
||||||
|
|
||||||
import { NlpValueCreateDto } from '../dto/nlp-value.dto';
|
import { NlpValueCreateDto } from '../dto/nlp-value.dto';
|
||||||
import { NlpEntityRepository } from '../repositories/nlp-entity.repository';
|
import { NlpEntityRepository } from '../repositories/nlp-entity.repository';
|
||||||
@ -29,11 +25,7 @@ import { NlpSampleEntityRepository } from '../repositories/nlp-sample-entity.rep
|
|||||||
import { NlpValueRepository } from '../repositories/nlp-value.repository';
|
import { NlpValueRepository } from '../repositories/nlp-value.repository';
|
||||||
import { NlpEntityModel } from '../schemas/nlp-entity.schema';
|
import { NlpEntityModel } from '../schemas/nlp-entity.schema';
|
||||||
import { NlpSampleEntityModel } from '../schemas/nlp-sample-entity.schema';
|
import { NlpSampleEntityModel } from '../schemas/nlp-sample-entity.schema';
|
||||||
import {
|
import { NlpValue, NlpValueModel } from '../schemas/nlp-value.schema';
|
||||||
NlpValue,
|
|
||||||
NlpValueFull,
|
|
||||||
NlpValueModel,
|
|
||||||
} from '../schemas/nlp-value.schema';
|
|
||||||
import { NlpEntityService } from '../services/nlp-entity.service';
|
import { NlpEntityService } from '../services/nlp-entity.service';
|
||||||
import { NlpValueService } from '../services/nlp-value.service';
|
import { NlpValueService } from '../services/nlp-value.service';
|
||||||
|
|
||||||
@ -80,63 +72,6 @@ describe('NlpValueController', () => {
|
|||||||
|
|
||||||
afterEach(jest.clearAllMocks);
|
afterEach(jest.clearAllMocks);
|
||||||
|
|
||||||
describe('findPage', () => {
|
|
||||||
it('should find nlp Values, and foreach nlp value populate the corresponding entity', async () => {
|
|
||||||
const pageQuery = getPageQuery<NlpValue>({
|
|
||||||
sort: ['value', 'desc'],
|
|
||||||
});
|
|
||||||
const result = await nlpValueController.findPage(
|
|
||||||
pageQuery,
|
|
||||||
['entity'],
|
|
||||||
{},
|
|
||||||
);
|
|
||||||
|
|
||||||
const nlpValueFixturesWithEntities = nlpValueFixtures.reduce(
|
|
||||||
(acc, curr) => {
|
|
||||||
acc.push({
|
|
||||||
...curr,
|
|
||||||
entity: nlpEntityFixtures[
|
|
||||||
parseInt(curr.entity!)
|
|
||||||
] as NlpValueFull['entity'],
|
|
||||||
builtin: curr.builtin!,
|
|
||||||
expressions: curr.expressions!,
|
|
||||||
metadata: curr.metadata!,
|
|
||||||
});
|
|
||||||
return acc;
|
|
||||||
},
|
|
||||||
[] as TFixtures<NlpValueFull>[],
|
|
||||||
);
|
|
||||||
expect(result).toEqualPayload(nlpValueFixturesWithEntities);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should find nlp Values', async () => {
|
|
||||||
const pageQuery = getPageQuery<NlpValue>({
|
|
||||||
sort: ['value', 'desc'],
|
|
||||||
});
|
|
||||||
const result = await nlpValueController.findPage(
|
|
||||||
pageQuery,
|
|
||||||
['invalidCriteria'],
|
|
||||||
{},
|
|
||||||
);
|
|
||||||
const nlpEntities = await nlpEntityService.findAll();
|
|
||||||
const nlpValueFixturesWithEntities = nlpValueFixtures.reduce(
|
|
||||||
(acc, curr) => {
|
|
||||||
const ValueWithEntities = {
|
|
||||||
...curr,
|
|
||||||
entity: curr.entity ? nlpEntities[parseInt(curr.entity!)].id : null,
|
|
||||||
expressions: curr.expressions!,
|
|
||||||
metadata: curr.metadata!,
|
|
||||||
builtin: curr.builtin!,
|
|
||||||
};
|
|
||||||
acc.push(ValueWithEntities);
|
|
||||||
return acc;
|
|
||||||
},
|
|
||||||
[] as TFixtures<NlpValueCreateDto>[],
|
|
||||||
);
|
|
||||||
expect(result).toEqualPayload(nlpValueFixturesWithEntities);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('count', () => {
|
describe('count', () => {
|
||||||
it('should count the nlp Values', async () => {
|
it('should count the nlp Values', async () => {
|
||||||
const result = await nlpValueController.filterCount();
|
const result = await nlpValueController.filterCount();
|
||||||
|
@ -125,24 +125,8 @@ export class NlpValueController extends BaseController<
|
|||||||
return doc;
|
return doc;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Get('')
|
|
||||||
async findAndPopulateWithCount(
|
|
||||||
@Query(PageQueryPipe) pageQuery: PageQueryDto<NlpValue>,
|
|
||||||
@Query(PopulatePipe) populate: string[],
|
|
||||||
@Query(
|
|
||||||
new SearchFilterPipe<NlpValue>({ allowedFields: ['entity', 'value'] }),
|
|
||||||
)
|
|
||||||
filters: TFilterQuery<NlpValue>,
|
|
||||||
) {
|
|
||||||
return await this.nlpValueService.findAndPopulateWithCount(
|
|
||||||
pageQuery,
|
|
||||||
populate,
|
|
||||||
filters,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Retrieves a paginated list of NLP values.
|
* Retrieves a paginated list of NLP values with NLP Samples count.
|
||||||
*
|
*
|
||||||
* Supports filtering, pagination, and optional population of related entities.
|
* Supports filtering, pagination, and optional population of related entities.
|
||||||
*
|
*
|
||||||
@ -150,10 +134,10 @@ export class NlpValueController extends BaseController<
|
|||||||
* @param populate - An array of related entities to populate.
|
* @param populate - An array of related entities to populate.
|
||||||
* @param filters - Filters to apply when retrieving the NLP values.
|
* @param filters - Filters to apply when retrieving the NLP values.
|
||||||
*
|
*
|
||||||
* @returns A promise resolving to a paginated list of NLP values.
|
* @returns A promise resolving to a paginated list of NLP values with NLP Samples count.
|
||||||
*/
|
*/
|
||||||
// @Get('') disabled
|
@Get()
|
||||||
async findPage(
|
async findWithCount(
|
||||||
@Query(PageQueryPipe) pageQuery: PageQueryDto<NlpValue>,
|
@Query(PageQueryPipe) pageQuery: PageQueryDto<NlpValue>,
|
||||||
@Query(PopulatePipe) populate: string[],
|
@Query(PopulatePipe) populate: string[],
|
||||||
@Query(
|
@Query(
|
||||||
@ -164,8 +148,8 @@ export class NlpValueController extends BaseController<
|
|||||||
filters: TFilterQuery<NlpValue>,
|
filters: TFilterQuery<NlpValue>,
|
||||||
) {
|
) {
|
||||||
return this.canPopulate(populate)
|
return this.canPopulate(populate)
|
||||||
? await this.nlpValueService.findAndPopulate(filters, pageQuery)
|
? await this.nlpValueService.findAndPopulateWithCount(pageQuery, filters)
|
||||||
: await this.nlpValueService.find(filters, pageQuery);
|
: await this.nlpValueService.findWithCount(pageQuery, filters);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -8,21 +8,27 @@
|
|||||||
|
|
||||||
import { Injectable } from '@nestjs/common';
|
import { Injectable } from '@nestjs/common';
|
||||||
import { InjectModel } from '@nestjs/mongoose';
|
import { InjectModel } from '@nestjs/mongoose';
|
||||||
import { Document, Model, Query, Types } from 'mongoose';
|
import { plainToClass } from 'class-transformer';
|
||||||
|
import { Document, Model, PipelineStage, Query, Types } from 'mongoose';
|
||||||
|
|
||||||
import { BaseRepository, DeleteResult } from '@/utils/generics/base-repository';
|
import { BaseRepository, DeleteResult } from '@/utils/generics/base-repository';
|
||||||
import { PageQueryDto } from '@/utils/pagination/pagination-query.dto';
|
import { PageQueryDto } from '@/utils/pagination/pagination-query.dto';
|
||||||
import { TFilterQuery } from '@/utils/types/filter.types';
|
import { TFilterQuery } from '@/utils/types/filter.types';
|
||||||
|
|
||||||
import { NlpValueDto } from '../dto/nlp-value.dto';
|
import { NlpValueDto } from '../dto/nlp-value.dto';
|
||||||
|
import { NlpEntity } from '../schemas/nlp-entity.schema';
|
||||||
import {
|
import {
|
||||||
NLP_VALUE_POPULATE,
|
NLP_VALUE_POPULATE,
|
||||||
NlpValue,
|
NlpValue,
|
||||||
NlpValueDocument,
|
NlpValueDocument,
|
||||||
NlpValueFull,
|
NlpValueFull,
|
||||||
|
NlpValueFullWithCount,
|
||||||
NlpValuePopulate,
|
NlpValuePopulate,
|
||||||
|
NlpValueWithCount,
|
||||||
|
TNlpValueCountFormat,
|
||||||
} from '../schemas/nlp-value.schema';
|
} from '../schemas/nlp-value.schema';
|
||||||
|
|
||||||
|
import { NlpEntityRepository } from './nlp-entity.repository';
|
||||||
import { NlpSampleEntityRepository } from './nlp-sample-entity.repository';
|
import { NlpSampleEntityRepository } from './nlp-sample-entity.repository';
|
||||||
|
|
||||||
@Injectable()
|
@Injectable()
|
||||||
@ -35,6 +41,8 @@ export class NlpValueRepository extends BaseRepository<
|
|||||||
constructor(
|
constructor(
|
||||||
@InjectModel(NlpValue.name) readonly model: Model<NlpValue>,
|
@InjectModel(NlpValue.name) readonly model: Model<NlpValue>,
|
||||||
private readonly nlpSampleEntityRepository: NlpSampleEntityRepository,
|
private readonly nlpSampleEntityRepository: NlpSampleEntityRepository,
|
||||||
|
@Inject(forwardRef(() => NlpEntityRepository))
|
||||||
|
private readonly nlpEntityRepository: NlpEntityRepository,
|
||||||
) {
|
) {
|
||||||
super(model, NlpValue, NLP_VALUE_POPULATE, NlpValueFull);
|
super(model, NlpValue, NLP_VALUE_POPULATE, NlpValueFull);
|
||||||
}
|
}
|
||||||
@ -108,97 +116,162 @@ export class NlpValueRepository extends BaseRepository<
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async findAndPopulateWithCount(
|
private async aggregateWithCount<T extends 'full' | 'stub' = 'stub'>(
|
||||||
{ limit = 10, skip = 0, sort = ['createdAt', -1] }: PageQueryDto<NlpValue>,
|
{ limit = 10, skip = 0, sort = ['createdAt', -1] }: PageQueryDto<NlpValue>,
|
||||||
populate: string[],
|
|
||||||
{ $and = [], ...rest }: TFilterQuery<NlpValue>,
|
{ $and = [], ...rest }: TFilterQuery<NlpValue>,
|
||||||
|
populatePipelineStages: PipelineStage[] = [],
|
||||||
) {
|
) {
|
||||||
return this.model
|
const pipeline: PipelineStage[] = [
|
||||||
.aggregate([
|
// support pageQuery
|
||||||
{
|
{
|
||||||
// support filters
|
$limit: limit,
|
||||||
$match: {
|
},
|
||||||
...rest,
|
{
|
||||||
...($and.length && {
|
$skip: skip,
|
||||||
$and:
|
},
|
||||||
$and.map(({ entity, ...rest }) =>
|
{
|
||||||
entity
|
$sort: {
|
||||||
? {
|
[sort[0]]: sort[1] === 'desc' ? -1 : 1,
|
||||||
...rest,
|
_id: sort[1] === 'desc' ? -1 : 1,
|
||||||
entity: new Types.ObjectId(String(entity)),
|
},
|
||||||
}
|
},
|
||||||
: rest,
|
{
|
||||||
) || [],
|
// support filters
|
||||||
}),
|
$match: {
|
||||||
|
...rest,
|
||||||
|
...($and.length && {
|
||||||
|
$and:
|
||||||
|
$and.map(({ entity, ...rest }) =>
|
||||||
|
entity
|
||||||
|
? {
|
||||||
|
...rest,
|
||||||
|
entity: new Types.ObjectId(String(entity)),
|
||||||
|
}
|
||||||
|
: rest,
|
||||||
|
) || [],
|
||||||
|
}),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
$lookup: {
|
||||||
|
from: 'nlpsampleentities',
|
||||||
|
localField: '_id',
|
||||||
|
foreignField: 'value',
|
||||||
|
as: 'sampleEntities',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
$unwind: {
|
||||||
|
path: '$sampleEntities',
|
||||||
|
preserveNullAndEmptyArrays: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
$group: {
|
||||||
|
_id: '$_id',
|
||||||
|
value: { $first: '$value' },
|
||||||
|
expressions: { $first: '$expressions' },
|
||||||
|
builtin: { $first: '$builtin' },
|
||||||
|
metadata: { $first: '$metadata' },
|
||||||
|
createdAt: { $first: '$createdAt' },
|
||||||
|
updatedAt: { $first: '$updatedAt' },
|
||||||
|
entity: { $first: '$entity' },
|
||||||
|
nlpSamplesCount: {
|
||||||
|
$sum: { $cond: [{ $ifNull: ['$sampleEntities', false] }, 1, 0] },
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
// support pageQuery
|
},
|
||||||
{
|
{
|
||||||
$limit: limit,
|
$project: {
|
||||||
},
|
id: '$_id',
|
||||||
{
|
_id: 0,
|
||||||
$skip: skip,
|
value: 1,
|
||||||
},
|
expressions: 1,
|
||||||
{
|
builtin: 1,
|
||||||
$sort: {
|
entity: 1,
|
||||||
[sort[0]]: sort[1] === 'desc' ? -1 : 1,
|
metadata: 1,
|
||||||
},
|
createdAt: 1,
|
||||||
},
|
updatedAt: 1,
|
||||||
{
|
nlpSamplesCount: 1,
|
||||||
$lookup: {
|
|
||||||
from: 'nlpsampleentities',
|
|
||||||
localField: '_id',
|
|
||||||
foreignField: 'value',
|
|
||||||
as: 'sampleEntities',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
$unwind: {
|
|
||||||
path: '$sampleEntities',
|
|
||||||
preserveNullAndEmptyArrays: true,
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
|
},
|
||||||
|
...populatePipelineStages,
|
||||||
|
];
|
||||||
|
|
||||||
|
return await this.model.aggregate<TNlpValueCountFormat<T>>(pipeline).exec();
|
||||||
|
}
|
||||||
|
|
||||||
|
private async plainToClass<T extends 'full' | 'stub'>(
|
||||||
|
format: 'full' | 'stub',
|
||||||
|
aggregatedResults: (NlpValueWithCount | NlpValueFullWithCount)[],
|
||||||
|
): Promise<TNlpValueCountFormat<T>[]> {
|
||||||
|
if (format === 'full') {
|
||||||
|
const nestedNlpEntities: NlpValueFullWithCount[] = [];
|
||||||
|
for (const { entity, ...rest } of aggregatedResults) {
|
||||||
|
const plainNlpValue = {
|
||||||
|
...rest,
|
||||||
|
entity: plainToClass(
|
||||||
|
NlpEntity,
|
||||||
|
await this.nlpEntityRepository.findOne(entity),
|
||||||
|
{
|
||||||
|
excludePrefixes: ['_'],
|
||||||
|
},
|
||||||
|
),
|
||||||
|
};
|
||||||
|
nestedNlpEntities.push(
|
||||||
|
plainToClass(NlpValueFullWithCount, plainNlpValue, {
|
||||||
|
excludePrefixes: ['_'],
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
return nestedNlpEntities as TNlpValueCountFormat<T>[];
|
||||||
|
} else {
|
||||||
|
const nestedNlpEntities: NlpValueWithCount[] = [];
|
||||||
|
for (const aggregatedResult of aggregatedResults) {
|
||||||
|
nestedNlpEntities.push(
|
||||||
|
plainToClass(NlpValueWithCount, aggregatedResult, {
|
||||||
|
excludePrefixes: ['_'],
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
return nestedNlpEntities as TNlpValueCountFormat<T>[];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async findWithCount(
|
||||||
|
pageQuery: PageQueryDto<NlpValue>,
|
||||||
|
filterQuery: TFilterQuery<NlpValue>,
|
||||||
|
): Promise<NlpValueWithCount[]> {
|
||||||
|
const aggregatedResults = await this.aggregateWithCount<'stub'>(
|
||||||
|
pageQuery,
|
||||||
|
filterQuery,
|
||||||
|
);
|
||||||
|
|
||||||
|
return await this.plainToClass<'stub'>('stub', aggregatedResults);
|
||||||
|
}
|
||||||
|
|
||||||
|
async findAndPopulateWithCount(
|
||||||
|
pageQuery: PageQueryDto<NlpValue>,
|
||||||
|
filterQuery: TFilterQuery<NlpValue>,
|
||||||
|
): Promise<NlpValueFullWithCount[]> {
|
||||||
|
const aggregatedResults = await this.aggregateWithCount<'full'>(
|
||||||
|
pageQuery,
|
||||||
|
filterQuery,
|
||||||
|
[
|
||||||
{
|
{
|
||||||
$lookup: {
|
$lookup: {
|
||||||
from: 'nlpentities',
|
from: 'nlpentities',
|
||||||
localField: 'entity',
|
localField: 'entity',
|
||||||
foreignField: '_id',
|
foreignField: '_id',
|
||||||
as: 'entities',
|
as: 'entity',
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
$group: {
|
$unwind: '$entity',
|
||||||
_id: '$_id',
|
|
||||||
value: { $first: '$value' },
|
|
||||||
expressions: { $first: '$expressions' },
|
|
||||||
builtin: { $first: '$builtin' },
|
|
||||||
metadata: { $first: '$metadata' },
|
|
||||||
createdAt: { $first: '$createdAt' },
|
|
||||||
updatedAt: { $first: '$updatedAt' },
|
|
||||||
entity: {
|
|
||||||
// support populate
|
|
||||||
$first: this.canPopulate(populate) ? '$entities' : '$entity',
|
|
||||||
},
|
|
||||||
nlpSamplesCount: {
|
|
||||||
$sum: { $cond: [{ $ifNull: ['$sampleEntities', false] }, 1, 0] },
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
{
|
],
|
||||||
$project: {
|
);
|
||||||
id: '$_id',
|
|
||||||
_id: 0,
|
return await this.plainToClass<'full'>('full', aggregatedResults);
|
||||||
value: 1,
|
|
||||||
expressions: 1,
|
|
||||||
builtin: 1,
|
|
||||||
entity: 1,
|
|
||||||
metadata: 1,
|
|
||||||
createdAt: 1,
|
|
||||||
updatedAt: 1,
|
|
||||||
nlpSamplesCount: 1,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
])
|
|
||||||
.exec();
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -106,6 +106,18 @@ export class NlpValueFull extends NlpValueStub {
|
|||||||
entity: NlpEntity;
|
entity: NlpEntity;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export class NlpValueWithCount extends NlpValue {
|
||||||
|
nlpSamplesCount: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export class NlpValueFullWithCount extends NlpValueFull {
|
||||||
|
nlpSamplesCount: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export class NlpValueFullWithCountDto {
|
||||||
|
nlpSamplesCount: number;
|
||||||
|
}
|
||||||
|
|
||||||
export type NlpValueDocument = THydratedDocument<NlpValue>;
|
export type NlpValueDocument = THydratedDocument<NlpValue>;
|
||||||
|
|
||||||
export const NlpValueModel: ModelDefinition = LifecycleHookManager.attach({
|
export const NlpValueModel: ModelDefinition = LifecycleHookManager.attach({
|
||||||
@ -121,3 +133,7 @@ export type NlpValuePopulate = keyof TFilterPopulateFields<
|
|||||||
>;
|
>;
|
||||||
|
|
||||||
export const NLP_VALUE_POPULATE: NlpValuePopulate[] = ['entity'];
|
export const NLP_VALUE_POPULATE: NlpValuePopulate[] = ['entity'];
|
||||||
|
|
||||||
|
export type TNlpValueCountFormat<T> = T extends 'stub'
|
||||||
|
? NlpValueWithCount
|
||||||
|
: NlpValueFullWithCount;
|
||||||
|
@ -19,7 +19,9 @@ import { NlpEntity } from '../schemas/nlp-entity.schema';
|
|||||||
import {
|
import {
|
||||||
NlpValue,
|
NlpValue,
|
||||||
NlpValueFull,
|
NlpValueFull,
|
||||||
|
NlpValueFullWithCount,
|
||||||
NlpValuePopulate,
|
NlpValuePopulate,
|
||||||
|
NlpValueWithCount,
|
||||||
} from '../schemas/nlp-value.schema';
|
} from '../schemas/nlp-value.schema';
|
||||||
import { NlpSampleEntityValue } from '../schemas/types';
|
import { NlpSampleEntityValue } from '../schemas/types';
|
||||||
|
|
||||||
@ -221,15 +223,17 @@ export class NlpValueService extends BaseService<
|
|||||||
return Promise.all(promises);
|
return Promise.all(promises);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async findWithCount(
|
||||||
|
pageQuery: PageQueryDto<NlpValue>,
|
||||||
|
filters: TFilterQuery<NlpValue>,
|
||||||
|
): Promise<NlpValueWithCount[]> {
|
||||||
|
return await this.repository.findWithCount(pageQuery, filters);
|
||||||
|
}
|
||||||
|
|
||||||
async findAndPopulateWithCount(
|
async findAndPopulateWithCount(
|
||||||
pageQuery: PageQueryDto<NlpValue>,
|
pageQuery: PageQueryDto<NlpValue>,
|
||||||
populate: string[],
|
|
||||||
filters: TFilterQuery<NlpValue>,
|
filters: TFilterQuery<NlpValue>,
|
||||||
) {
|
): Promise<NlpValueFullWithCount[]> {
|
||||||
return await this.repository.findAndPopulateWithCount(
|
return await this.repository.findAndPopulateWithCount(pageQuery, filters);
|
||||||
pageQuery,
|
|
||||||
populate,
|
|
||||||
filters,
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user