mirror of
https://github.com/hexastack/hexabot
synced 2025-05-23 13:34:58 +00:00
feat(frontend): apply nlp language updates
This commit is contained in:
parent
56c6a5306b
commit
0c02b51cf6
@ -82,6 +82,7 @@ export class MessageRepository extends BaseRepository<
|
||||
text: _doc.message.text,
|
||||
type: NlpSampleState.inbox,
|
||||
trained: false,
|
||||
// @TODO : We need to define the language in the message entity
|
||||
language: defaultLang.id,
|
||||
};
|
||||
try {
|
||||
|
@ -56,4 +56,14 @@ export class LanguageService extends BaseService<Language> {
|
||||
async getDefaultLanguage() {
|
||||
return await this.findOne({ default: true });
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves the language by code.
|
||||
*
|
||||
* @returns A promise that resolves to the `Language` object.
|
||||
*/
|
||||
@Cacheable(DEFAULT_LANGUAGE_CACHE_KEY)
|
||||
async getLanguageByCode(code: string) {
|
||||
return await this.findOne({ code });
|
||||
}
|
||||
}
|
||||
|
@ -199,7 +199,7 @@ describe('NlpSampleController', () => {
|
||||
trained: true,
|
||||
type: NlpSampleState.test,
|
||||
entities: [],
|
||||
language: enLang.id,
|
||||
language: 'en',
|
||||
};
|
||||
const result = await nlpSampleController.create(nlSample);
|
||||
expect(result).toEqualPayload({
|
||||
@ -279,7 +279,7 @@ describe('NlpSampleController', () => {
|
||||
value: 'update',
|
||||
},
|
||||
],
|
||||
language: frLang.id,
|
||||
language: 'fr',
|
||||
});
|
||||
const updatedSample = {
|
||||
text: 'updated',
|
||||
@ -302,15 +302,12 @@ describe('NlpSampleController', () => {
|
||||
});
|
||||
|
||||
it('should throw exception when nlp sample id not found', async () => {
|
||||
const frLang = await languageService.findOne({
|
||||
code: 'fr',
|
||||
});
|
||||
await expect(
|
||||
nlpSampleController.updateOne(byeJhonSampleId, {
|
||||
text: 'updated',
|
||||
trained: true,
|
||||
type: NlpSampleState.test,
|
||||
language: frLang.id,
|
||||
language: 'fr',
|
||||
}),
|
||||
).rejects.toThrow(NotFoundException);
|
||||
});
|
||||
|
@ -122,21 +122,24 @@ export class NlpSampleController extends BaseController<
|
||||
@CsrfCheck(true)
|
||||
@Post()
|
||||
async create(
|
||||
@Body() { entities: nlpEntities, ...createNlpSampleDto }: NlpSampleDto,
|
||||
@Body()
|
||||
{
|
||||
entities: nlpEntities,
|
||||
language: languageCode,
|
||||
...createNlpSampleDto
|
||||
}: NlpSampleDto,
|
||||
): Promise<NlpSampleFull> {
|
||||
const nlpSample = await this.nlpSampleService.create(
|
||||
createNlpSampleDto as NlpSampleCreateDto,
|
||||
);
|
||||
const language = await this.languageService.getLanguageByCode(languageCode);
|
||||
const nlpSample = await this.nlpSampleService.create({
|
||||
...createNlpSampleDto,
|
||||
language: language.id,
|
||||
});
|
||||
|
||||
const entities = await this.nlpSampleEntityService.storeSampleEntities(
|
||||
nlpSample,
|
||||
nlpEntities,
|
||||
);
|
||||
|
||||
const language = await this.languageService.findOne(
|
||||
createNlpSampleDto.language,
|
||||
);
|
||||
|
||||
return {
|
||||
...nlpSample,
|
||||
entities,
|
||||
@ -250,7 +253,11 @@ export class NlpSampleController extends BaseController<
|
||||
async findPage(
|
||||
@Query(PageQueryPipe) pageQuery: PageQueryDto<NlpSample>,
|
||||
@Query(PopulatePipe) populate: string[],
|
||||
@Query(new SearchFilterPipe<NlpSample>({ allowedFields: ['text', 'type'] }))
|
||||
@Query(
|
||||
new SearchFilterPipe<NlpSample>({
|
||||
allowedFields: ['text', 'type', 'language'],
|
||||
}),
|
||||
)
|
||||
filters: TFilterQuery<NlpSample>,
|
||||
) {
|
||||
return this.canPopulate(populate)
|
||||
@ -270,11 +277,12 @@ export class NlpSampleController extends BaseController<
|
||||
@Patch(':id')
|
||||
async updateOne(
|
||||
@Param('id') id: string,
|
||||
@Body() updateNlpSampleDto: NlpSampleDto,
|
||||
@Body() { entities, language: languageCode, ...sampleAttrs }: NlpSampleDto,
|
||||
): Promise<NlpSampleFull> {
|
||||
const { entities, ...sampleAttrs } = updateNlpSampleDto;
|
||||
const language = await this.languageService.getLanguageByCode(languageCode);
|
||||
const sample = await this.nlpSampleService.updateOne(id, {
|
||||
...sampleAttrs,
|
||||
language: language.id,
|
||||
trained: false,
|
||||
});
|
||||
|
||||
@ -288,8 +296,6 @@ export class NlpSampleController extends BaseController<
|
||||
const updatedSampleEntities =
|
||||
await this.nlpSampleEntityService.storeSampleEntities(sample, entities);
|
||||
|
||||
const language = await this.languageService.findOne(sampleAttrs.language);
|
||||
|
||||
return {
|
||||
...sample,
|
||||
language,
|
||||
|
@ -43,7 +43,7 @@ export class NlpSampleCreateDto {
|
||||
@IsOptional()
|
||||
type?: NlpSampleState;
|
||||
|
||||
@ApiProperty({ description: 'NLP sample language', type: String })
|
||||
@ApiProperty({ description: 'NLP sample language id', type: String })
|
||||
@IsString()
|
||||
@IsNotEmpty()
|
||||
@IsObjectId({ message: 'Language must be a valid ObjectId' })
|
||||
@ -56,6 +56,11 @@ export class NlpSampleDto extends NlpSampleCreateDto {
|
||||
})
|
||||
@IsOptional()
|
||||
entities?: NlpSampleEntityValue[];
|
||||
|
||||
@ApiProperty({ description: 'NLP sample language code', type: String })
|
||||
@IsString()
|
||||
@IsNotEmpty()
|
||||
language: string;
|
||||
}
|
||||
|
||||
export class NlpSampleUpdateDto extends PartialType(NlpSampleCreateDto) {}
|
||||
|
@ -17,14 +17,14 @@ import { DialogControlProps } from "@/hooks/useDialog";
|
||||
import { useToast } from "@/hooks/useToast";
|
||||
import { EntityType } from "@/services/types";
|
||||
import {
|
||||
INlpDatasetSample,
|
||||
INlpDatasetSampleAttributes,
|
||||
INlpSampleFormAttributes,
|
||||
INlpSampleFull,
|
||||
} from "@/types/nlp-sample.types";
|
||||
|
||||
import NlpDatasetSample from "./components/NlpTrainForm";
|
||||
|
||||
export type NlpSampleDialogProps = DialogControlProps<INlpSampleFull>;
|
||||
export type NlpSampleDialogProps = DialogControlProps<INlpDatasetSample>;
|
||||
export const NlpSampleDialog: FC<NlpSampleDialogProps> = ({
|
||||
open,
|
||||
data: sample,
|
||||
@ -44,15 +44,16 @@ export const NlpSampleDialog: FC<NlpSampleDialogProps> = ({
|
||||
toast.success(t("message.success_save"));
|
||||
},
|
||||
});
|
||||
const onSubmitForm = (params: INlpSampleFormAttributes) => {
|
||||
const onSubmitForm = (form: INlpSampleFormAttributes) => {
|
||||
if (sample?.id) {
|
||||
updateSample(
|
||||
{
|
||||
id: sample.id,
|
||||
params: {
|
||||
text: params.text,
|
||||
type: params.type,
|
||||
entities: [...params.keywordEntities, ...params.traitEntities],
|
||||
text: form.text,
|
||||
type: form.type,
|
||||
entities: [...form.keywordEntities, ...form.traitEntities],
|
||||
language: form.language,
|
||||
},
|
||||
},
|
||||
{
|
||||
|
@ -26,6 +26,7 @@ import { useTranslation } from "react-i18next";
|
||||
|
||||
import { DeleteDialog } from "@/app-components/dialogs";
|
||||
import { ChipEntity } from "@/app-components/displays/ChipEntity";
|
||||
import AutoCompleteEntitySelect from "@/app-components/inputs/AutoCompleteEntitySelect";
|
||||
import { FilterTextfield } from "@/app-components/inputs/FilterTextfield";
|
||||
import { Input } from "@/app-components/inputs/Input";
|
||||
import {
|
||||
@ -43,9 +44,10 @@ import { useHasPermission } from "@/hooks/useHasPermission";
|
||||
import { useSearch } from "@/hooks/useSearch";
|
||||
import { useToast } from "@/hooks/useToast";
|
||||
import { EntityType, Format } from "@/services/types";
|
||||
import { ILanguage } from "@/types/language.types";
|
||||
import {
|
||||
INlpDatasetSample,
|
||||
INlpSample,
|
||||
INlpSampleFull,
|
||||
NlpSampleType,
|
||||
} from "@/types/nlp-sample.types";
|
||||
import { INlpSampleEntity } from "@/types/nlp-sample_entity.types";
|
||||
@ -66,12 +68,17 @@ export default function NlpSample() {
|
||||
const { apiUrl } = useConfig();
|
||||
const { toast } = useToast();
|
||||
const { t } = useTranslation();
|
||||
const [dataset, setDataSet] = useState("");
|
||||
const [type, setType] = useState<NlpSampleType | undefined>(undefined);
|
||||
const [language, setLanguage] = useState<string | undefined>(undefined);
|
||||
const hasPermission = useHasPermission();
|
||||
const getNlpEntityFromCache = useGetFromCache(EntityType.NLP_ENTITY);
|
||||
const getNlpValueFromCache = useGetFromCache(EntityType.NLP_VALUE);
|
||||
const getSampleEntityFromCache = useGetFromCache(
|
||||
EntityType.NLP_SAMPLE_ENTITY,
|
||||
);
|
||||
const getLanguageFromCache = useGetFromCache(EntityType.LANGUAGE);
|
||||
const { onSearch, searchPayload } = useSearch<INlpSample>({
|
||||
$eq: dataset === "" ? [] : [{ type: dataset as NlpSampleType }],
|
||||
$eq: [...(type ? [{ type }] : []), ...(language ? [{ language }] : [])],
|
||||
$iLike: ["text"],
|
||||
});
|
||||
const { mutateAsync: deleteNlpSample } = useDelete(EntityType.NLP_SAMPLE, {
|
||||
@ -90,21 +97,29 @@ export default function NlpSample() {
|
||||
},
|
||||
);
|
||||
const deleteDialogCtl = useDialog<string>(false);
|
||||
const editDialogCtl = useDialog<INlpSampleFull>(false);
|
||||
const editDialogCtl = useDialog<INlpDatasetSample>(false);
|
||||
const importDialogCtl = useDialog<never>(false);
|
||||
const actionColumns = getActionsColumn<INlpSampleFull>(
|
||||
const actionColumns = getActionsColumn<INlpSample>(
|
||||
[
|
||||
{
|
||||
label: ActionColumnLabel.Edit,
|
||||
action: ({ entities, ...rest }) => {
|
||||
const data: INlpSampleFull = {
|
||||
action: ({ entities, language, ...rest }) => {
|
||||
const lang = getLanguageFromCache(language) as ILanguage;
|
||||
const data: INlpDatasetSample = {
|
||||
...rest,
|
||||
entities: entities?.map(({ end, start, value, entity }) => ({
|
||||
end,
|
||||
start,
|
||||
value: getNlpValueFromCache(value)?.value,
|
||||
entity: getNlpEntityFromCache(entity)?.name,
|
||||
})) as unknown as INlpSampleEntity[],
|
||||
entities: entities?.map((e) => {
|
||||
const sampleEntity = getSampleEntityFromCache(e);
|
||||
const { end, start, value, entity } =
|
||||
sampleEntity as INlpSampleEntity;
|
||||
|
||||
return {
|
||||
end,
|
||||
start,
|
||||
value: getNlpValueFromCache(value)?.value || "",
|
||||
entity: getNlpEntityFromCache(entity)?.name || "",
|
||||
};
|
||||
}),
|
||||
language: lang.code,
|
||||
};
|
||||
|
||||
editDialogCtl.openDialog(data);
|
||||
@ -119,7 +134,7 @@ export default function NlpSample() {
|
||||
],
|
||||
t("label.operations"),
|
||||
);
|
||||
const columns: GridColDef<INlpSampleFull>[] = [
|
||||
const columns: GridColDef<INlpSample>[] = [
|
||||
{
|
||||
flex: 1,
|
||||
field: "text",
|
||||
@ -132,38 +147,53 @@ export default function NlpSample() {
|
||||
flex: 1,
|
||||
field: "entities",
|
||||
renderCell: ({ row }) =>
|
||||
row.entities.map((entity) => (
|
||||
<ChipEntity
|
||||
id={entity.entity}
|
||||
key={entity.id}
|
||||
variant="title"
|
||||
field="name"
|
||||
render={(value) => (
|
||||
<Chip
|
||||
variant="title"
|
||||
label={
|
||||
<>
|
||||
{value}
|
||||
{` `}={` `}
|
||||
<ChipEntity
|
||||
id={entity.value}
|
||||
key={entity.value}
|
||||
variant="text"
|
||||
field="value"
|
||||
entity={EntityType.NLP_VALUE}
|
||||
/>
|
||||
</>
|
||||
}
|
||||
/>
|
||||
)}
|
||||
entity={EntityType.NLP_ENTITY}
|
||||
/>
|
||||
)),
|
||||
row.entities
|
||||
.map((e) => getSampleEntityFromCache(e) as INlpSampleEntity)
|
||||
.map((entity) => (
|
||||
<ChipEntity
|
||||
id={entity.entity}
|
||||
key={entity.id}
|
||||
variant="title"
|
||||
field="name"
|
||||
render={(value) => (
|
||||
<Chip
|
||||
variant="title"
|
||||
label={
|
||||
<>
|
||||
{value}
|
||||
{` `}={` `}
|
||||
<ChipEntity
|
||||
id={entity.value}
|
||||
key={entity.value}
|
||||
variant="text"
|
||||
field="value"
|
||||
entity={EntityType.NLP_VALUE}
|
||||
/>
|
||||
</>
|
||||
}
|
||||
/>
|
||||
)}
|
||||
entity={EntityType.NLP_ENTITY}
|
||||
/>
|
||||
)),
|
||||
headerName: t("label.entities"),
|
||||
sortable: false,
|
||||
disableColumnMenu: true,
|
||||
renderHeader,
|
||||
},
|
||||
{
|
||||
maxWidth: 90,
|
||||
field: "language",
|
||||
renderCell: ({ row }) => {
|
||||
const language = getLanguageFromCache(row.language);
|
||||
|
||||
return language?.title;
|
||||
},
|
||||
headerName: t("label.language"),
|
||||
sortable: true,
|
||||
disableColumnMenu: true,
|
||||
renderHeader,
|
||||
},
|
||||
{
|
||||
maxWidth: 90,
|
||||
field: "type",
|
||||
@ -232,18 +262,33 @@ export default function NlpSample() {
|
||||
fullWidth={false}
|
||||
sx={{ minWidth: "256px" }}
|
||||
/>
|
||||
<AutoCompleteEntitySelect<ILanguage, "title", false>
|
||||
fullWidth={false}
|
||||
sx={{
|
||||
minWidth: "150px",
|
||||
}}
|
||||
autoFocus
|
||||
searchFields={["title", "code"]}
|
||||
entity={EntityType.LANGUAGE}
|
||||
format={Format.BASIC}
|
||||
labelKey="title"
|
||||
label={t("label.language")}
|
||||
multiple={false}
|
||||
onChange={(_e, selected) => setLanguage(selected?.id)}
|
||||
/>
|
||||
<Input
|
||||
select
|
||||
fullWidth={false}
|
||||
sx={{
|
||||
width: "150px",
|
||||
minWidth: "150px",
|
||||
}}
|
||||
label={t("label.dataset")}
|
||||
value={dataset}
|
||||
onChange={(e) => setDataSet(e.target.value)}
|
||||
value={type}
|
||||
onChange={(e) => setType(e.target.value as NlpSampleType)}
|
||||
SelectProps={{
|
||||
...(dataset !== "" && {
|
||||
...(type && {
|
||||
IconComponent: () => (
|
||||
<IconButton size="small" onClick={() => setDataSet("")}>
|
||||
<IconButton size="small" onClick={() => setType(undefined)}>
|
||||
<DeleteIcon />
|
||||
</IconButton>
|
||||
),
|
||||
@ -288,7 +333,7 @@ export default function NlpSample() {
|
||||
variant="contained"
|
||||
href={buildURL(
|
||||
apiUrl,
|
||||
`nlpsample/export${dataset ? `?type=${dataset}` : ""}`,
|
||||
`nlpsample/export${type ? `?type=${type}` : ""}`,
|
||||
)}
|
||||
startIcon={<DownloadIcon />}
|
||||
>
|
||||
|
@ -36,18 +36,19 @@ import { useFind } from "@/hooks/crud/useFind";
|
||||
import { useGetFromCache } from "@/hooks/crud/useGet";
|
||||
import { useApiClient } from "@/hooks/useApiClient";
|
||||
import { EntityType, Format } from "@/services/types";
|
||||
import { ILanguage } from "@/types/language.types";
|
||||
import { INlpEntity } from "@/types/nlp-entity.types";
|
||||
import {
|
||||
INlpDatasetKeywordEntity,
|
||||
INlpDatasetSample,
|
||||
INlpDatasetTraitEntity,
|
||||
INlpSampleFormAttributes,
|
||||
INlpSampleFull,
|
||||
NlpSampleType,
|
||||
} from "@/types/nlp-sample.types";
|
||||
import { INlpValue } from "@/types/nlp-value.types";
|
||||
|
||||
type NlpDatasetSampleProps = {
|
||||
sample?: INlpSampleFull;
|
||||
sample?: INlpDatasetSample;
|
||||
submitForm: (params: INlpSampleFormAttributes) => void;
|
||||
};
|
||||
|
||||
@ -90,7 +91,7 @@ const NlpDatasetSample: FC<NlpDatasetSampleProps> = ({
|
||||
lookups.includes("trait"),
|
||||
);
|
||||
const sampleTraitEntities = sample.entities.filter(
|
||||
(e) => typeof e.start === "undefined",
|
||||
(e) => "start" in e && typeof e.start === "undefined",
|
||||
);
|
||||
|
||||
if (sampleTraitEntities.length === traitEntities.length) {
|
||||
@ -112,9 +113,12 @@ const NlpDatasetSample: FC<NlpDatasetSampleProps> = ({
|
||||
defaultValues: {
|
||||
type: sample?.type || NlpSampleType.train,
|
||||
text: sample?.text || "",
|
||||
language: sample?.language,
|
||||
traitEntities: defaultTraitEntities,
|
||||
keywordEntities:
|
||||
sample?.entities.filter((e) => typeof e.start === "number") || [],
|
||||
sample?.entities.filter(
|
||||
(e) => "start" in e && typeof e.start === "number",
|
||||
) || [],
|
||||
},
|
||||
});
|
||||
const currentText = watch("text");
|
||||
@ -167,7 +171,7 @@ const NlpDatasetSample: FC<NlpDatasetSampleProps> = ({
|
||||
|
||||
const findInsertIndex = (newItem: INlpDatasetKeywordEntity): number => {
|
||||
const index = keywordEntities.findIndex(
|
||||
(entity) => entity.start > newItem.start,
|
||||
(entity) => entity.start && newItem.start && entity.start > newItem.start,
|
||||
);
|
||||
|
||||
return index === -1 ? keywordEntities.length : index;
|
||||
@ -177,11 +181,15 @@ const NlpDatasetSample: FC<NlpDatasetSampleProps> = ({
|
||||
start: number;
|
||||
end: number;
|
||||
} | null>(null);
|
||||
const onSubmitForm = (params: INlpSampleFormAttributes) => {
|
||||
submitForm(params);
|
||||
reset();
|
||||
removeTraitEntity();
|
||||
removeKeywordEntity();
|
||||
const onSubmitForm = (form: INlpSampleFormAttributes) => {
|
||||
submitForm(form);
|
||||
reset({
|
||||
type: form?.type || NlpSampleType.train,
|
||||
text: "",
|
||||
language: form?.language,
|
||||
traitEntities: defaultTraitEntities,
|
||||
keywordEntities: [],
|
||||
});
|
||||
refetchEntities();
|
||||
};
|
||||
|
||||
@ -247,6 +255,37 @@ const NlpDatasetSample: FC<NlpDatasetSampleProps> = ({
|
||||
/>
|
||||
</ContentItem>
|
||||
<Box display="flex" flexDirection="column">
|
||||
<ContentItem
|
||||
display="flex"
|
||||
flexDirection="row"
|
||||
maxWidth="50%"
|
||||
gap={2}
|
||||
>
|
||||
<Controller
|
||||
name="language"
|
||||
control={control}
|
||||
render={({ field }) => {
|
||||
const { onChange, ...rest } = field;
|
||||
|
||||
return (
|
||||
<AutoCompleteEntitySelect<ILanguage, "title", false>
|
||||
fullWidth={true}
|
||||
autoFocus
|
||||
searchFields={["title", "code"]}
|
||||
entity={EntityType.LANGUAGE}
|
||||
format={Format.BASIC}
|
||||
labelKey="title"
|
||||
idKey="code"
|
||||
label={t("label.language")}
|
||||
multiple={false}
|
||||
{...field}
|
||||
onChange={(_e, selected) => onChange(selected?.code)}
|
||||
{...rest}
|
||||
/>
|
||||
);
|
||||
}}
|
||||
/>
|
||||
</ContentItem>
|
||||
{traitEntities.map((traitEntity, index) => (
|
||||
<ContentItem
|
||||
key={traitEntity.id}
|
||||
|
@ -81,6 +81,7 @@ export const Nlp = ({
|
||||
text: params.text,
|
||||
type: params.type,
|
||||
entities: [...params.traitEntities, ...params.keywordEntities],
|
||||
language: params.language,
|
||||
});
|
||||
};
|
||||
|
||||
|
@ -175,14 +175,24 @@ export const SettingEntity = new schema.Entity(EntityType.SETTING, {
|
||||
processStrategy: processCommonStrategy,
|
||||
});
|
||||
|
||||
export const NlpSampleEntity = new schema.Entity(
|
||||
EntityType.NLP_SAMPLE,
|
||||
export const LanguageEntity = new schema.Entity(
|
||||
EntityType.LANGUAGE,
|
||||
undefined,
|
||||
{
|
||||
idAttribute: ({ id }) => id,
|
||||
processStrategy: processCommonStrategy,
|
||||
},
|
||||
);
|
||||
|
||||
export const TranslationEntity = new schema.Entity(
|
||||
EntityType.TRANSLATION,
|
||||
undefined,
|
||||
{
|
||||
idAttribute: ({ id }) => id,
|
||||
processStrategy: processCommonStrategy,
|
||||
},
|
||||
);
|
||||
|
||||
export const NlpValueEntity = new schema.Entity(
|
||||
EntityType.NLP_VALUE,
|
||||
undefined,
|
||||
@ -201,27 +211,28 @@ export const NlpEntityEntity = new schema.Entity(
|
||||
},
|
||||
);
|
||||
|
||||
NlpValueEntity.define({
|
||||
entity: NlpEntityEntity,
|
||||
});
|
||||
|
||||
export const NlpSampleEntityEntity = new schema.Entity(
|
||||
EntityType.NLP_SAMPLE_ENTITY,
|
||||
undefined,
|
||||
{
|
||||
entity: NlpEntityEntity,
|
||||
value: NlpValueEntity,
|
||||
},
|
||||
{
|
||||
idAttribute: ({ id }) => id,
|
||||
processStrategy: processCommonStrategy,
|
||||
},
|
||||
);
|
||||
|
||||
export const LanguageEntity = new schema.Entity(
|
||||
EntityType.LANGUAGE,
|
||||
undefined,
|
||||
export const NlpSampleEntity = new schema.Entity(
|
||||
EntityType.NLP_SAMPLE,
|
||||
{
|
||||
idAttribute: ({ id }) => id,
|
||||
processStrategy: processCommonStrategy,
|
||||
entities: [NlpSampleEntityEntity],
|
||||
language: LanguageEntity,
|
||||
},
|
||||
);
|
||||
|
||||
export const TranslationEntity = new schema.Entity(
|
||||
EntityType.TRANSLATION,
|
||||
undefined,
|
||||
{
|
||||
idAttribute: ({ id }) => id,
|
||||
processStrategy: processCommonStrategy,
|
||||
|
@ -100,7 +100,7 @@ export const POPULATE_BY_TYPE = {
|
||||
"trigger_labels",
|
||||
"assignTo",
|
||||
],
|
||||
[EntityType.NLP_SAMPLE]: ["entities"],
|
||||
[EntityType.NLP_SAMPLE]: ["language", "entities"],
|
||||
[EntityType.NLP_SAMPLE_ENTITY]: ["sample", "entity", "value"],
|
||||
[EntityType.NLP_ENTITY]: ["values"],
|
||||
[EntityType.NLP_VALUE]: ["entity"],
|
||||
|
@ -10,6 +10,7 @@
|
||||
import { EntityType, Format } from "@/services/types";
|
||||
|
||||
import { IBaseSchema, IFormat, OmitPopulate } from "./base.types";
|
||||
import { ILanguage } from "./language.types";
|
||||
import { INlpSampleEntity } from "./nlp-sample_entity.types";
|
||||
|
||||
export enum NlpSampleType {
|
||||
@ -23,6 +24,7 @@ export interface INlpSampleAttributes {
|
||||
trained?: boolean;
|
||||
type?: NlpSampleType;
|
||||
entities: string[];
|
||||
language: string;
|
||||
}
|
||||
|
||||
export interface INlpSampleStub
|
||||
@ -31,14 +33,15 @@ export interface INlpSampleStub
|
||||
|
||||
export interface INlpSample extends INlpSampleStub, IFormat<Format.BASIC> {
|
||||
entities: string[];
|
||||
language: string;
|
||||
}
|
||||
|
||||
export interface INlpSampleFull extends INlpSampleStub, IFormat<Format.FULL> {
|
||||
entities: INlpSampleEntity[];
|
||||
language: ILanguage;
|
||||
}
|
||||
|
||||
// Dataset Trainer
|
||||
|
||||
export interface INlpDatasetTraitEntity {
|
||||
entity: string; // entity name
|
||||
value: string; // value name
|
||||
@ -60,3 +63,7 @@ export interface INlpDatasetSampleAttributes
|
||||
extends Omit<INlpSampleAttributes, "entities"> {
|
||||
entities: (INlpDatasetTraitEntity | INlpDatasetKeywordEntity)[];
|
||||
}
|
||||
|
||||
export interface INlpDatasetSample
|
||||
extends IBaseSchema,
|
||||
INlpDatasetSampleAttributes {}
|
Loading…
Reference in New Issue
Block a user