mirror of
https://github.com/hexastack/hexabot
synced 2025-05-24 14:04:24 +00:00
feat(frontend): apply nlp language updates
This commit is contained in:
parent
56c6a5306b
commit
0c02b51cf6
@ -82,6 +82,7 @@ export class MessageRepository extends BaseRepository<
|
|||||||
text: _doc.message.text,
|
text: _doc.message.text,
|
||||||
type: NlpSampleState.inbox,
|
type: NlpSampleState.inbox,
|
||||||
trained: false,
|
trained: false,
|
||||||
|
// @TODO : We need to define the language in the message entity
|
||||||
language: defaultLang.id,
|
language: defaultLang.id,
|
||||||
};
|
};
|
||||||
try {
|
try {
|
||||||
|
@ -56,4 +56,14 @@ export class LanguageService extends BaseService<Language> {
|
|||||||
async getDefaultLanguage() {
|
async getDefaultLanguage() {
|
||||||
return await this.findOne({ default: true });
|
return await this.findOne({ default: true });
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Retrieves the language by code.
|
||||||
|
*
|
||||||
|
* @returns A promise that resolves to the `Language` object.
|
||||||
|
*/
|
||||||
|
@Cacheable(DEFAULT_LANGUAGE_CACHE_KEY)
|
||||||
|
async getLanguageByCode(code: string) {
|
||||||
|
return await this.findOne({ code });
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -199,7 +199,7 @@ describe('NlpSampleController', () => {
|
|||||||
trained: true,
|
trained: true,
|
||||||
type: NlpSampleState.test,
|
type: NlpSampleState.test,
|
||||||
entities: [],
|
entities: [],
|
||||||
language: enLang.id,
|
language: 'en',
|
||||||
};
|
};
|
||||||
const result = await nlpSampleController.create(nlSample);
|
const result = await nlpSampleController.create(nlSample);
|
||||||
expect(result).toEqualPayload({
|
expect(result).toEqualPayload({
|
||||||
@ -279,7 +279,7 @@ describe('NlpSampleController', () => {
|
|||||||
value: 'update',
|
value: 'update',
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
language: frLang.id,
|
language: 'fr',
|
||||||
});
|
});
|
||||||
const updatedSample = {
|
const updatedSample = {
|
||||||
text: 'updated',
|
text: 'updated',
|
||||||
@ -302,15 +302,12 @@ describe('NlpSampleController', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('should throw exception when nlp sample id not found', async () => {
|
it('should throw exception when nlp sample id not found', async () => {
|
||||||
const frLang = await languageService.findOne({
|
|
||||||
code: 'fr',
|
|
||||||
});
|
|
||||||
await expect(
|
await expect(
|
||||||
nlpSampleController.updateOne(byeJhonSampleId, {
|
nlpSampleController.updateOne(byeJhonSampleId, {
|
||||||
text: 'updated',
|
text: 'updated',
|
||||||
trained: true,
|
trained: true,
|
||||||
type: NlpSampleState.test,
|
type: NlpSampleState.test,
|
||||||
language: frLang.id,
|
language: 'fr',
|
||||||
}),
|
}),
|
||||||
).rejects.toThrow(NotFoundException);
|
).rejects.toThrow(NotFoundException);
|
||||||
});
|
});
|
||||||
|
@ -122,21 +122,24 @@ export class NlpSampleController extends BaseController<
|
|||||||
@CsrfCheck(true)
|
@CsrfCheck(true)
|
||||||
@Post()
|
@Post()
|
||||||
async create(
|
async create(
|
||||||
@Body() { entities: nlpEntities, ...createNlpSampleDto }: NlpSampleDto,
|
@Body()
|
||||||
|
{
|
||||||
|
entities: nlpEntities,
|
||||||
|
language: languageCode,
|
||||||
|
...createNlpSampleDto
|
||||||
|
}: NlpSampleDto,
|
||||||
): Promise<NlpSampleFull> {
|
): Promise<NlpSampleFull> {
|
||||||
const nlpSample = await this.nlpSampleService.create(
|
const language = await this.languageService.getLanguageByCode(languageCode);
|
||||||
createNlpSampleDto as NlpSampleCreateDto,
|
const nlpSample = await this.nlpSampleService.create({
|
||||||
);
|
...createNlpSampleDto,
|
||||||
|
language: language.id,
|
||||||
|
});
|
||||||
|
|
||||||
const entities = await this.nlpSampleEntityService.storeSampleEntities(
|
const entities = await this.nlpSampleEntityService.storeSampleEntities(
|
||||||
nlpSample,
|
nlpSample,
|
||||||
nlpEntities,
|
nlpEntities,
|
||||||
);
|
);
|
||||||
|
|
||||||
const language = await this.languageService.findOne(
|
|
||||||
createNlpSampleDto.language,
|
|
||||||
);
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
...nlpSample,
|
...nlpSample,
|
||||||
entities,
|
entities,
|
||||||
@ -250,7 +253,11 @@ export class NlpSampleController extends BaseController<
|
|||||||
async findPage(
|
async findPage(
|
||||||
@Query(PageQueryPipe) pageQuery: PageQueryDto<NlpSample>,
|
@Query(PageQueryPipe) pageQuery: PageQueryDto<NlpSample>,
|
||||||
@Query(PopulatePipe) populate: string[],
|
@Query(PopulatePipe) populate: string[],
|
||||||
@Query(new SearchFilterPipe<NlpSample>({ allowedFields: ['text', 'type'] }))
|
@Query(
|
||||||
|
new SearchFilterPipe<NlpSample>({
|
||||||
|
allowedFields: ['text', 'type', 'language'],
|
||||||
|
}),
|
||||||
|
)
|
||||||
filters: TFilterQuery<NlpSample>,
|
filters: TFilterQuery<NlpSample>,
|
||||||
) {
|
) {
|
||||||
return this.canPopulate(populate)
|
return this.canPopulate(populate)
|
||||||
@ -270,11 +277,12 @@ export class NlpSampleController extends BaseController<
|
|||||||
@Patch(':id')
|
@Patch(':id')
|
||||||
async updateOne(
|
async updateOne(
|
||||||
@Param('id') id: string,
|
@Param('id') id: string,
|
||||||
@Body() updateNlpSampleDto: NlpSampleDto,
|
@Body() { entities, language: languageCode, ...sampleAttrs }: NlpSampleDto,
|
||||||
): Promise<NlpSampleFull> {
|
): Promise<NlpSampleFull> {
|
||||||
const { entities, ...sampleAttrs } = updateNlpSampleDto;
|
const language = await this.languageService.getLanguageByCode(languageCode);
|
||||||
const sample = await this.nlpSampleService.updateOne(id, {
|
const sample = await this.nlpSampleService.updateOne(id, {
|
||||||
...sampleAttrs,
|
...sampleAttrs,
|
||||||
|
language: language.id,
|
||||||
trained: false,
|
trained: false,
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -288,8 +296,6 @@ export class NlpSampleController extends BaseController<
|
|||||||
const updatedSampleEntities =
|
const updatedSampleEntities =
|
||||||
await this.nlpSampleEntityService.storeSampleEntities(sample, entities);
|
await this.nlpSampleEntityService.storeSampleEntities(sample, entities);
|
||||||
|
|
||||||
const language = await this.languageService.findOne(sampleAttrs.language);
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
...sample,
|
...sample,
|
||||||
language,
|
language,
|
||||||
|
@ -43,7 +43,7 @@ export class NlpSampleCreateDto {
|
|||||||
@IsOptional()
|
@IsOptional()
|
||||||
type?: NlpSampleState;
|
type?: NlpSampleState;
|
||||||
|
|
||||||
@ApiProperty({ description: 'NLP sample language', type: String })
|
@ApiProperty({ description: 'NLP sample language id', type: String })
|
||||||
@IsString()
|
@IsString()
|
||||||
@IsNotEmpty()
|
@IsNotEmpty()
|
||||||
@IsObjectId({ message: 'Language must be a valid ObjectId' })
|
@IsObjectId({ message: 'Language must be a valid ObjectId' })
|
||||||
@ -56,6 +56,11 @@ export class NlpSampleDto extends NlpSampleCreateDto {
|
|||||||
})
|
})
|
||||||
@IsOptional()
|
@IsOptional()
|
||||||
entities?: NlpSampleEntityValue[];
|
entities?: NlpSampleEntityValue[];
|
||||||
|
|
||||||
|
@ApiProperty({ description: 'NLP sample language code', type: String })
|
||||||
|
@IsString()
|
||||||
|
@IsNotEmpty()
|
||||||
|
language: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
export class NlpSampleUpdateDto extends PartialType(NlpSampleCreateDto) {}
|
export class NlpSampleUpdateDto extends PartialType(NlpSampleCreateDto) {}
|
||||||
|
@ -17,14 +17,14 @@ import { DialogControlProps } from "@/hooks/useDialog";
|
|||||||
import { useToast } from "@/hooks/useToast";
|
import { useToast } from "@/hooks/useToast";
|
||||||
import { EntityType } from "@/services/types";
|
import { EntityType } from "@/services/types";
|
||||||
import {
|
import {
|
||||||
|
INlpDatasetSample,
|
||||||
INlpDatasetSampleAttributes,
|
INlpDatasetSampleAttributes,
|
||||||
INlpSampleFormAttributes,
|
INlpSampleFormAttributes,
|
||||||
INlpSampleFull,
|
|
||||||
} from "@/types/nlp-sample.types";
|
} from "@/types/nlp-sample.types";
|
||||||
|
|
||||||
import NlpDatasetSample from "./components/NlpTrainForm";
|
import NlpDatasetSample from "./components/NlpTrainForm";
|
||||||
|
|
||||||
export type NlpSampleDialogProps = DialogControlProps<INlpSampleFull>;
|
export type NlpSampleDialogProps = DialogControlProps<INlpDatasetSample>;
|
||||||
export const NlpSampleDialog: FC<NlpSampleDialogProps> = ({
|
export const NlpSampleDialog: FC<NlpSampleDialogProps> = ({
|
||||||
open,
|
open,
|
||||||
data: sample,
|
data: sample,
|
||||||
@ -44,15 +44,16 @@ export const NlpSampleDialog: FC<NlpSampleDialogProps> = ({
|
|||||||
toast.success(t("message.success_save"));
|
toast.success(t("message.success_save"));
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
const onSubmitForm = (params: INlpSampleFormAttributes) => {
|
const onSubmitForm = (form: INlpSampleFormAttributes) => {
|
||||||
if (sample?.id) {
|
if (sample?.id) {
|
||||||
updateSample(
|
updateSample(
|
||||||
{
|
{
|
||||||
id: sample.id,
|
id: sample.id,
|
||||||
params: {
|
params: {
|
||||||
text: params.text,
|
text: form.text,
|
||||||
type: params.type,
|
type: form.type,
|
||||||
entities: [...params.keywordEntities, ...params.traitEntities],
|
entities: [...form.keywordEntities, ...form.traitEntities],
|
||||||
|
language: form.language,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -26,6 +26,7 @@ import { useTranslation } from "react-i18next";
|
|||||||
|
|
||||||
import { DeleteDialog } from "@/app-components/dialogs";
|
import { DeleteDialog } from "@/app-components/dialogs";
|
||||||
import { ChipEntity } from "@/app-components/displays/ChipEntity";
|
import { ChipEntity } from "@/app-components/displays/ChipEntity";
|
||||||
|
import AutoCompleteEntitySelect from "@/app-components/inputs/AutoCompleteEntitySelect";
|
||||||
import { FilterTextfield } from "@/app-components/inputs/FilterTextfield";
|
import { FilterTextfield } from "@/app-components/inputs/FilterTextfield";
|
||||||
import { Input } from "@/app-components/inputs/Input";
|
import { Input } from "@/app-components/inputs/Input";
|
||||||
import {
|
import {
|
||||||
@ -43,9 +44,10 @@ import { useHasPermission } from "@/hooks/useHasPermission";
|
|||||||
import { useSearch } from "@/hooks/useSearch";
|
import { useSearch } from "@/hooks/useSearch";
|
||||||
import { useToast } from "@/hooks/useToast";
|
import { useToast } from "@/hooks/useToast";
|
||||||
import { EntityType, Format } from "@/services/types";
|
import { EntityType, Format } from "@/services/types";
|
||||||
|
import { ILanguage } from "@/types/language.types";
|
||||||
import {
|
import {
|
||||||
|
INlpDatasetSample,
|
||||||
INlpSample,
|
INlpSample,
|
||||||
INlpSampleFull,
|
|
||||||
NlpSampleType,
|
NlpSampleType,
|
||||||
} from "@/types/nlp-sample.types";
|
} from "@/types/nlp-sample.types";
|
||||||
import { INlpSampleEntity } from "@/types/nlp-sample_entity.types";
|
import { INlpSampleEntity } from "@/types/nlp-sample_entity.types";
|
||||||
@ -66,12 +68,17 @@ export default function NlpSample() {
|
|||||||
const { apiUrl } = useConfig();
|
const { apiUrl } = useConfig();
|
||||||
const { toast } = useToast();
|
const { toast } = useToast();
|
||||||
const { t } = useTranslation();
|
const { t } = useTranslation();
|
||||||
const [dataset, setDataSet] = useState("");
|
const [type, setType] = useState<NlpSampleType | undefined>(undefined);
|
||||||
|
const [language, setLanguage] = useState<string | undefined>(undefined);
|
||||||
const hasPermission = useHasPermission();
|
const hasPermission = useHasPermission();
|
||||||
const getNlpEntityFromCache = useGetFromCache(EntityType.NLP_ENTITY);
|
const getNlpEntityFromCache = useGetFromCache(EntityType.NLP_ENTITY);
|
||||||
const getNlpValueFromCache = useGetFromCache(EntityType.NLP_VALUE);
|
const getNlpValueFromCache = useGetFromCache(EntityType.NLP_VALUE);
|
||||||
|
const getSampleEntityFromCache = useGetFromCache(
|
||||||
|
EntityType.NLP_SAMPLE_ENTITY,
|
||||||
|
);
|
||||||
|
const getLanguageFromCache = useGetFromCache(EntityType.LANGUAGE);
|
||||||
const { onSearch, searchPayload } = useSearch<INlpSample>({
|
const { onSearch, searchPayload } = useSearch<INlpSample>({
|
||||||
$eq: dataset === "" ? [] : [{ type: dataset as NlpSampleType }],
|
$eq: [...(type ? [{ type }] : []), ...(language ? [{ language }] : [])],
|
||||||
$iLike: ["text"],
|
$iLike: ["text"],
|
||||||
});
|
});
|
||||||
const { mutateAsync: deleteNlpSample } = useDelete(EntityType.NLP_SAMPLE, {
|
const { mutateAsync: deleteNlpSample } = useDelete(EntityType.NLP_SAMPLE, {
|
||||||
@ -90,21 +97,29 @@ export default function NlpSample() {
|
|||||||
},
|
},
|
||||||
);
|
);
|
||||||
const deleteDialogCtl = useDialog<string>(false);
|
const deleteDialogCtl = useDialog<string>(false);
|
||||||
const editDialogCtl = useDialog<INlpSampleFull>(false);
|
const editDialogCtl = useDialog<INlpDatasetSample>(false);
|
||||||
const importDialogCtl = useDialog<never>(false);
|
const importDialogCtl = useDialog<never>(false);
|
||||||
const actionColumns = getActionsColumn<INlpSampleFull>(
|
const actionColumns = getActionsColumn<INlpSample>(
|
||||||
[
|
[
|
||||||
{
|
{
|
||||||
label: ActionColumnLabel.Edit,
|
label: ActionColumnLabel.Edit,
|
||||||
action: ({ entities, ...rest }) => {
|
action: ({ entities, language, ...rest }) => {
|
||||||
const data: INlpSampleFull = {
|
const lang = getLanguageFromCache(language) as ILanguage;
|
||||||
|
const data: INlpDatasetSample = {
|
||||||
...rest,
|
...rest,
|
||||||
entities: entities?.map(({ end, start, value, entity }) => ({
|
entities: entities?.map((e) => {
|
||||||
|
const sampleEntity = getSampleEntityFromCache(e);
|
||||||
|
const { end, start, value, entity } =
|
||||||
|
sampleEntity as INlpSampleEntity;
|
||||||
|
|
||||||
|
return {
|
||||||
end,
|
end,
|
||||||
start,
|
start,
|
||||||
value: getNlpValueFromCache(value)?.value,
|
value: getNlpValueFromCache(value)?.value || "",
|
||||||
entity: getNlpEntityFromCache(entity)?.name,
|
entity: getNlpEntityFromCache(entity)?.name || "",
|
||||||
})) as unknown as INlpSampleEntity[],
|
};
|
||||||
|
}),
|
||||||
|
language: lang.code,
|
||||||
};
|
};
|
||||||
|
|
||||||
editDialogCtl.openDialog(data);
|
editDialogCtl.openDialog(data);
|
||||||
@ -119,7 +134,7 @@ export default function NlpSample() {
|
|||||||
],
|
],
|
||||||
t("label.operations"),
|
t("label.operations"),
|
||||||
);
|
);
|
||||||
const columns: GridColDef<INlpSampleFull>[] = [
|
const columns: GridColDef<INlpSample>[] = [
|
||||||
{
|
{
|
||||||
flex: 1,
|
flex: 1,
|
||||||
field: "text",
|
field: "text",
|
||||||
@ -132,7 +147,9 @@ export default function NlpSample() {
|
|||||||
flex: 1,
|
flex: 1,
|
||||||
field: "entities",
|
field: "entities",
|
||||||
renderCell: ({ row }) =>
|
renderCell: ({ row }) =>
|
||||||
row.entities.map((entity) => (
|
row.entities
|
||||||
|
.map((e) => getSampleEntityFromCache(e) as INlpSampleEntity)
|
||||||
|
.map((entity) => (
|
||||||
<ChipEntity
|
<ChipEntity
|
||||||
id={entity.entity}
|
id={entity.entity}
|
||||||
key={entity.id}
|
key={entity.id}
|
||||||
@ -164,6 +181,19 @@ export default function NlpSample() {
|
|||||||
disableColumnMenu: true,
|
disableColumnMenu: true,
|
||||||
renderHeader,
|
renderHeader,
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
maxWidth: 90,
|
||||||
|
field: "language",
|
||||||
|
renderCell: ({ row }) => {
|
||||||
|
const language = getLanguageFromCache(row.language);
|
||||||
|
|
||||||
|
return language?.title;
|
||||||
|
},
|
||||||
|
headerName: t("label.language"),
|
||||||
|
sortable: true,
|
||||||
|
disableColumnMenu: true,
|
||||||
|
renderHeader,
|
||||||
|
},
|
||||||
{
|
{
|
||||||
maxWidth: 90,
|
maxWidth: 90,
|
||||||
field: "type",
|
field: "type",
|
||||||
@ -232,18 +262,33 @@ export default function NlpSample() {
|
|||||||
fullWidth={false}
|
fullWidth={false}
|
||||||
sx={{ minWidth: "256px" }}
|
sx={{ minWidth: "256px" }}
|
||||||
/>
|
/>
|
||||||
|
<AutoCompleteEntitySelect<ILanguage, "title", false>
|
||||||
|
fullWidth={false}
|
||||||
|
sx={{
|
||||||
|
minWidth: "150px",
|
||||||
|
}}
|
||||||
|
autoFocus
|
||||||
|
searchFields={["title", "code"]}
|
||||||
|
entity={EntityType.LANGUAGE}
|
||||||
|
format={Format.BASIC}
|
||||||
|
labelKey="title"
|
||||||
|
label={t("label.language")}
|
||||||
|
multiple={false}
|
||||||
|
onChange={(_e, selected) => setLanguage(selected?.id)}
|
||||||
|
/>
|
||||||
<Input
|
<Input
|
||||||
select
|
select
|
||||||
|
fullWidth={false}
|
||||||
sx={{
|
sx={{
|
||||||
width: "150px",
|
minWidth: "150px",
|
||||||
}}
|
}}
|
||||||
label={t("label.dataset")}
|
label={t("label.dataset")}
|
||||||
value={dataset}
|
value={type}
|
||||||
onChange={(e) => setDataSet(e.target.value)}
|
onChange={(e) => setType(e.target.value as NlpSampleType)}
|
||||||
SelectProps={{
|
SelectProps={{
|
||||||
...(dataset !== "" && {
|
...(type && {
|
||||||
IconComponent: () => (
|
IconComponent: () => (
|
||||||
<IconButton size="small" onClick={() => setDataSet("")}>
|
<IconButton size="small" onClick={() => setType(undefined)}>
|
||||||
<DeleteIcon />
|
<DeleteIcon />
|
||||||
</IconButton>
|
</IconButton>
|
||||||
),
|
),
|
||||||
@ -288,7 +333,7 @@ export default function NlpSample() {
|
|||||||
variant="contained"
|
variant="contained"
|
||||||
href={buildURL(
|
href={buildURL(
|
||||||
apiUrl,
|
apiUrl,
|
||||||
`nlpsample/export${dataset ? `?type=${dataset}` : ""}`,
|
`nlpsample/export${type ? `?type=${type}` : ""}`,
|
||||||
)}
|
)}
|
||||||
startIcon={<DownloadIcon />}
|
startIcon={<DownloadIcon />}
|
||||||
>
|
>
|
||||||
|
@ -36,18 +36,19 @@ import { useFind } from "@/hooks/crud/useFind";
|
|||||||
import { useGetFromCache } from "@/hooks/crud/useGet";
|
import { useGetFromCache } from "@/hooks/crud/useGet";
|
||||||
import { useApiClient } from "@/hooks/useApiClient";
|
import { useApiClient } from "@/hooks/useApiClient";
|
||||||
import { EntityType, Format } from "@/services/types";
|
import { EntityType, Format } from "@/services/types";
|
||||||
|
import { ILanguage } from "@/types/language.types";
|
||||||
import { INlpEntity } from "@/types/nlp-entity.types";
|
import { INlpEntity } from "@/types/nlp-entity.types";
|
||||||
import {
|
import {
|
||||||
INlpDatasetKeywordEntity,
|
INlpDatasetKeywordEntity,
|
||||||
|
INlpDatasetSample,
|
||||||
INlpDatasetTraitEntity,
|
INlpDatasetTraitEntity,
|
||||||
INlpSampleFormAttributes,
|
INlpSampleFormAttributes,
|
||||||
INlpSampleFull,
|
|
||||||
NlpSampleType,
|
NlpSampleType,
|
||||||
} from "@/types/nlp-sample.types";
|
} from "@/types/nlp-sample.types";
|
||||||
import { INlpValue } from "@/types/nlp-value.types";
|
import { INlpValue } from "@/types/nlp-value.types";
|
||||||
|
|
||||||
type NlpDatasetSampleProps = {
|
type NlpDatasetSampleProps = {
|
||||||
sample?: INlpSampleFull;
|
sample?: INlpDatasetSample;
|
||||||
submitForm: (params: INlpSampleFormAttributes) => void;
|
submitForm: (params: INlpSampleFormAttributes) => void;
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -90,7 +91,7 @@ const NlpDatasetSample: FC<NlpDatasetSampleProps> = ({
|
|||||||
lookups.includes("trait"),
|
lookups.includes("trait"),
|
||||||
);
|
);
|
||||||
const sampleTraitEntities = sample.entities.filter(
|
const sampleTraitEntities = sample.entities.filter(
|
||||||
(e) => typeof e.start === "undefined",
|
(e) => "start" in e && typeof e.start === "undefined",
|
||||||
);
|
);
|
||||||
|
|
||||||
if (sampleTraitEntities.length === traitEntities.length) {
|
if (sampleTraitEntities.length === traitEntities.length) {
|
||||||
@ -112,9 +113,12 @@ const NlpDatasetSample: FC<NlpDatasetSampleProps> = ({
|
|||||||
defaultValues: {
|
defaultValues: {
|
||||||
type: sample?.type || NlpSampleType.train,
|
type: sample?.type || NlpSampleType.train,
|
||||||
text: sample?.text || "",
|
text: sample?.text || "",
|
||||||
|
language: sample?.language,
|
||||||
traitEntities: defaultTraitEntities,
|
traitEntities: defaultTraitEntities,
|
||||||
keywordEntities:
|
keywordEntities:
|
||||||
sample?.entities.filter((e) => typeof e.start === "number") || [],
|
sample?.entities.filter(
|
||||||
|
(e) => "start" in e && typeof e.start === "number",
|
||||||
|
) || [],
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
const currentText = watch("text");
|
const currentText = watch("text");
|
||||||
@ -167,7 +171,7 @@ const NlpDatasetSample: FC<NlpDatasetSampleProps> = ({
|
|||||||
|
|
||||||
const findInsertIndex = (newItem: INlpDatasetKeywordEntity): number => {
|
const findInsertIndex = (newItem: INlpDatasetKeywordEntity): number => {
|
||||||
const index = keywordEntities.findIndex(
|
const index = keywordEntities.findIndex(
|
||||||
(entity) => entity.start > newItem.start,
|
(entity) => entity.start && newItem.start && entity.start > newItem.start,
|
||||||
);
|
);
|
||||||
|
|
||||||
return index === -1 ? keywordEntities.length : index;
|
return index === -1 ? keywordEntities.length : index;
|
||||||
@ -177,11 +181,15 @@ const NlpDatasetSample: FC<NlpDatasetSampleProps> = ({
|
|||||||
start: number;
|
start: number;
|
||||||
end: number;
|
end: number;
|
||||||
} | null>(null);
|
} | null>(null);
|
||||||
const onSubmitForm = (params: INlpSampleFormAttributes) => {
|
const onSubmitForm = (form: INlpSampleFormAttributes) => {
|
||||||
submitForm(params);
|
submitForm(form);
|
||||||
reset();
|
reset({
|
||||||
removeTraitEntity();
|
type: form?.type || NlpSampleType.train,
|
||||||
removeKeywordEntity();
|
text: "",
|
||||||
|
language: form?.language,
|
||||||
|
traitEntities: defaultTraitEntities,
|
||||||
|
keywordEntities: [],
|
||||||
|
});
|
||||||
refetchEntities();
|
refetchEntities();
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -247,6 +255,37 @@ const NlpDatasetSample: FC<NlpDatasetSampleProps> = ({
|
|||||||
/>
|
/>
|
||||||
</ContentItem>
|
</ContentItem>
|
||||||
<Box display="flex" flexDirection="column">
|
<Box display="flex" flexDirection="column">
|
||||||
|
<ContentItem
|
||||||
|
display="flex"
|
||||||
|
flexDirection="row"
|
||||||
|
maxWidth="50%"
|
||||||
|
gap={2}
|
||||||
|
>
|
||||||
|
<Controller
|
||||||
|
name="language"
|
||||||
|
control={control}
|
||||||
|
render={({ field }) => {
|
||||||
|
const { onChange, ...rest } = field;
|
||||||
|
|
||||||
|
return (
|
||||||
|
<AutoCompleteEntitySelect<ILanguage, "title", false>
|
||||||
|
fullWidth={true}
|
||||||
|
autoFocus
|
||||||
|
searchFields={["title", "code"]}
|
||||||
|
entity={EntityType.LANGUAGE}
|
||||||
|
format={Format.BASIC}
|
||||||
|
labelKey="title"
|
||||||
|
idKey="code"
|
||||||
|
label={t("label.language")}
|
||||||
|
multiple={false}
|
||||||
|
{...field}
|
||||||
|
onChange={(_e, selected) => onChange(selected?.code)}
|
||||||
|
{...rest}
|
||||||
|
/>
|
||||||
|
);
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
</ContentItem>
|
||||||
{traitEntities.map((traitEntity, index) => (
|
{traitEntities.map((traitEntity, index) => (
|
||||||
<ContentItem
|
<ContentItem
|
||||||
key={traitEntity.id}
|
key={traitEntity.id}
|
||||||
|
@ -81,6 +81,7 @@ export const Nlp = ({
|
|||||||
text: params.text,
|
text: params.text,
|
||||||
type: params.type,
|
type: params.type,
|
||||||
entities: [...params.traitEntities, ...params.keywordEntities],
|
entities: [...params.traitEntities, ...params.keywordEntities],
|
||||||
|
language: params.language,
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -175,14 +175,24 @@ export const SettingEntity = new schema.Entity(EntityType.SETTING, {
|
|||||||
processStrategy: processCommonStrategy,
|
processStrategy: processCommonStrategy,
|
||||||
});
|
});
|
||||||
|
|
||||||
export const NlpSampleEntity = new schema.Entity(
|
export const LanguageEntity = new schema.Entity(
|
||||||
EntityType.NLP_SAMPLE,
|
EntityType.LANGUAGE,
|
||||||
undefined,
|
undefined,
|
||||||
{
|
{
|
||||||
idAttribute: ({ id }) => id,
|
idAttribute: ({ id }) => id,
|
||||||
processStrategy: processCommonStrategy,
|
processStrategy: processCommonStrategy,
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|
||||||
|
export const TranslationEntity = new schema.Entity(
|
||||||
|
EntityType.TRANSLATION,
|
||||||
|
undefined,
|
||||||
|
{
|
||||||
|
idAttribute: ({ id }) => id,
|
||||||
|
processStrategy: processCommonStrategy,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
export const NlpValueEntity = new schema.Entity(
|
export const NlpValueEntity = new schema.Entity(
|
||||||
EntityType.NLP_VALUE,
|
EntityType.NLP_VALUE,
|
||||||
undefined,
|
undefined,
|
||||||
@ -201,27 +211,28 @@ export const NlpEntityEntity = new schema.Entity(
|
|||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|
||||||
|
NlpValueEntity.define({
|
||||||
|
entity: NlpEntityEntity,
|
||||||
|
});
|
||||||
|
|
||||||
export const NlpSampleEntityEntity = new schema.Entity(
|
export const NlpSampleEntityEntity = new schema.Entity(
|
||||||
EntityType.NLP_SAMPLE_ENTITY,
|
EntityType.NLP_SAMPLE_ENTITY,
|
||||||
undefined,
|
{
|
||||||
|
entity: NlpEntityEntity,
|
||||||
|
value: NlpValueEntity,
|
||||||
|
},
|
||||||
{
|
{
|
||||||
idAttribute: ({ id }) => id,
|
idAttribute: ({ id }) => id,
|
||||||
processStrategy: processCommonStrategy,
|
processStrategy: processCommonStrategy,
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|
||||||
export const LanguageEntity = new schema.Entity(
|
export const NlpSampleEntity = new schema.Entity(
|
||||||
EntityType.LANGUAGE,
|
EntityType.NLP_SAMPLE,
|
||||||
undefined,
|
|
||||||
{
|
{
|
||||||
idAttribute: ({ id }) => id,
|
entities: [NlpSampleEntityEntity],
|
||||||
processStrategy: processCommonStrategy,
|
language: LanguageEntity,
|
||||||
},
|
},
|
||||||
);
|
|
||||||
|
|
||||||
export const TranslationEntity = new schema.Entity(
|
|
||||||
EntityType.TRANSLATION,
|
|
||||||
undefined,
|
|
||||||
{
|
{
|
||||||
idAttribute: ({ id }) => id,
|
idAttribute: ({ id }) => id,
|
||||||
processStrategy: processCommonStrategy,
|
processStrategy: processCommonStrategy,
|
||||||
|
@ -100,7 +100,7 @@ export const POPULATE_BY_TYPE = {
|
|||||||
"trigger_labels",
|
"trigger_labels",
|
||||||
"assignTo",
|
"assignTo",
|
||||||
],
|
],
|
||||||
[EntityType.NLP_SAMPLE]: ["entities"],
|
[EntityType.NLP_SAMPLE]: ["language", "entities"],
|
||||||
[EntityType.NLP_SAMPLE_ENTITY]: ["sample", "entity", "value"],
|
[EntityType.NLP_SAMPLE_ENTITY]: ["sample", "entity", "value"],
|
||||||
[EntityType.NLP_ENTITY]: ["values"],
|
[EntityType.NLP_ENTITY]: ["values"],
|
||||||
[EntityType.NLP_VALUE]: ["entity"],
|
[EntityType.NLP_VALUE]: ["entity"],
|
||||||
|
@ -10,6 +10,7 @@
|
|||||||
import { EntityType, Format } from "@/services/types";
|
import { EntityType, Format } from "@/services/types";
|
||||||
|
|
||||||
import { IBaseSchema, IFormat, OmitPopulate } from "./base.types";
|
import { IBaseSchema, IFormat, OmitPopulate } from "./base.types";
|
||||||
|
import { ILanguage } from "./language.types";
|
||||||
import { INlpSampleEntity } from "./nlp-sample_entity.types";
|
import { INlpSampleEntity } from "./nlp-sample_entity.types";
|
||||||
|
|
||||||
export enum NlpSampleType {
|
export enum NlpSampleType {
|
||||||
@ -23,6 +24,7 @@ export interface INlpSampleAttributes {
|
|||||||
trained?: boolean;
|
trained?: boolean;
|
||||||
type?: NlpSampleType;
|
type?: NlpSampleType;
|
||||||
entities: string[];
|
entities: string[];
|
||||||
|
language: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface INlpSampleStub
|
export interface INlpSampleStub
|
||||||
@ -31,14 +33,15 @@ export interface INlpSampleStub
|
|||||||
|
|
||||||
export interface INlpSample extends INlpSampleStub, IFormat<Format.BASIC> {
|
export interface INlpSample extends INlpSampleStub, IFormat<Format.BASIC> {
|
||||||
entities: string[];
|
entities: string[];
|
||||||
|
language: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface INlpSampleFull extends INlpSampleStub, IFormat<Format.FULL> {
|
export interface INlpSampleFull extends INlpSampleStub, IFormat<Format.FULL> {
|
||||||
entities: INlpSampleEntity[];
|
entities: INlpSampleEntity[];
|
||||||
|
language: ILanguage;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Dataset Trainer
|
// Dataset Trainer
|
||||||
|
|
||||||
export interface INlpDatasetTraitEntity {
|
export interface INlpDatasetTraitEntity {
|
||||||
entity: string; // entity name
|
entity: string; // entity name
|
||||||
value: string; // value name
|
value: string; // value name
|
||||||
@ -60,3 +63,7 @@ export interface INlpDatasetSampleAttributes
|
|||||||
extends Omit<INlpSampleAttributes, "entities"> {
|
extends Omit<INlpSampleAttributes, "entities"> {
|
||||||
entities: (INlpDatasetTraitEntity | INlpDatasetKeywordEntity)[];
|
entities: (INlpDatasetTraitEntity | INlpDatasetKeywordEntity)[];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export interface INlpDatasetSample
|
||||||
|
extends IBaseSchema,
|
||||||
|
INlpDatasetSampleAttributes {}
|
Loading…
Reference in New Issue
Block a user