dify
This commit is contained in:
269
dify/web/service/knowledge/use-create-dataset.ts
Normal file
269
dify/web/service/knowledge/use-create-dataset.ts
Normal file
@@ -0,0 +1,269 @@
|
||||
import groupBy from 'lodash-es/groupBy'
|
||||
import type { MutationOptions } from '@tanstack/react-query'
|
||||
import { useMutation } from '@tanstack/react-query'
|
||||
import { createDocument, createFirstDocument, fetchDefaultProcessRule, fetchFileIndexingEstimate } from '../datasets'
|
||||
import type { IndexingType } from '@/app/components/datasets/create/step-two'
|
||||
import type {
|
||||
ChunkingMode,
|
||||
CrawlOptions,
|
||||
CrawlResultItem,
|
||||
CreateDatasetReq,
|
||||
CreateDatasetResponse,
|
||||
CreateDocumentReq,
|
||||
CustomFile,
|
||||
DataSourceType,
|
||||
FileIndexingEstimateResponse,
|
||||
IndexingEstimateParams,
|
||||
NotionInfo,
|
||||
ProcessRule,
|
||||
ProcessRuleResponse,
|
||||
createDocumentResponse,
|
||||
} from '@/models/datasets'
|
||||
import type { DataSourceProvider, NotionPage } from '@/models/common'
|
||||
import { post } from '../base'
|
||||
|
||||
const NAME_SPACE = 'knowledge/create-dataset'
|
||||
|
||||
export const getNotionInfo = (
|
||||
notionPages: NotionPage[],
|
||||
credentialId: string,
|
||||
) => {
|
||||
const workspacesMap = groupBy(notionPages, 'workspace_id')
|
||||
const workspaces = Object.keys(workspacesMap).map((workspaceId) => {
|
||||
return {
|
||||
workspaceId,
|
||||
pages: workspacesMap[workspaceId],
|
||||
}
|
||||
})
|
||||
return workspaces.map((workspace) => {
|
||||
return {
|
||||
credential_id: credentialId,
|
||||
workspace_id: workspace.workspaceId,
|
||||
pages: workspace.pages.map((page) => {
|
||||
const { page_id, page_name, page_icon, type } = page
|
||||
return {
|
||||
page_id,
|
||||
page_name,
|
||||
page_icon,
|
||||
type,
|
||||
}
|
||||
}),
|
||||
}
|
||||
}) as NotionInfo[]
|
||||
}
|
||||
|
||||
export const getWebsiteInfo = (
|
||||
opts: {
|
||||
websiteCrawlProvider: DataSourceProvider
|
||||
websiteCrawlJobId: string
|
||||
websitePages: CrawlResultItem[]
|
||||
crawlOptions?: CrawlOptions
|
||||
},
|
||||
) => {
|
||||
const { websiteCrawlProvider, websiteCrawlJobId, websitePages, crawlOptions } = opts
|
||||
return {
|
||||
provider: websiteCrawlProvider,
|
||||
job_id: websiteCrawlJobId,
|
||||
urls: websitePages.map(page => page.source_url),
|
||||
only_main_content: crawlOptions?.only_main_content,
|
||||
}
|
||||
}
|
||||
|
||||
type GetFileIndexingEstimateParamsOptionBase = {
|
||||
docForm: ChunkingMode
|
||||
docLanguage: string
|
||||
indexingTechnique: IndexingType
|
||||
processRule: ProcessRule
|
||||
dataset_id: string
|
||||
}
|
||||
|
||||
type GetFileIndexingEstimateParamsOptionFile = GetFileIndexingEstimateParamsOptionBase & {
|
||||
dataSourceType: DataSourceType.FILE
|
||||
files: CustomFile[]
|
||||
}
|
||||
|
||||
const getFileIndexingEstimateParamsForFile = ({
|
||||
docForm,
|
||||
docLanguage,
|
||||
dataSourceType,
|
||||
files,
|
||||
indexingTechnique,
|
||||
processRule,
|
||||
dataset_id,
|
||||
}: GetFileIndexingEstimateParamsOptionFile): IndexingEstimateParams => {
|
||||
return {
|
||||
info_list: {
|
||||
data_source_type: dataSourceType,
|
||||
file_info_list: {
|
||||
file_ids: files.map(file => file.id) as string[],
|
||||
},
|
||||
},
|
||||
indexing_technique: indexingTechnique,
|
||||
process_rule: processRule,
|
||||
doc_form: docForm,
|
||||
doc_language: docLanguage,
|
||||
dataset_id,
|
||||
}
|
||||
}
|
||||
|
||||
export const useFetchFileIndexingEstimateForFile = (
|
||||
options: GetFileIndexingEstimateParamsOptionFile,
|
||||
mutationOptions: MutationOptions<FileIndexingEstimateResponse> = {},
|
||||
) => {
|
||||
return useMutation({
|
||||
mutationFn: async () => {
|
||||
return fetchFileIndexingEstimate(getFileIndexingEstimateParamsForFile(options))
|
||||
},
|
||||
...mutationOptions,
|
||||
})
|
||||
}
|
||||
|
||||
type GetFileIndexingEstimateParamsOptionNotion = GetFileIndexingEstimateParamsOptionBase & {
|
||||
dataSourceType: DataSourceType.NOTION
|
||||
notionPages: NotionPage[]
|
||||
credential_id: string
|
||||
}
|
||||
|
||||
const getFileIndexingEstimateParamsForNotion = ({
|
||||
docForm,
|
||||
docLanguage,
|
||||
dataSourceType,
|
||||
notionPages,
|
||||
indexingTechnique,
|
||||
processRule,
|
||||
dataset_id,
|
||||
credential_id,
|
||||
}: GetFileIndexingEstimateParamsOptionNotion): IndexingEstimateParams => {
|
||||
return {
|
||||
info_list: {
|
||||
data_source_type: dataSourceType,
|
||||
notion_info_list: getNotionInfo(notionPages, credential_id),
|
||||
},
|
||||
indexing_technique: indexingTechnique,
|
||||
process_rule: processRule,
|
||||
doc_form: docForm,
|
||||
doc_language: docLanguage,
|
||||
dataset_id,
|
||||
}
|
||||
}
|
||||
|
||||
export const useFetchFileIndexingEstimateForNotion = (
|
||||
options: GetFileIndexingEstimateParamsOptionNotion,
|
||||
mutationOptions: MutationOptions<FileIndexingEstimateResponse> = {},
|
||||
) => {
|
||||
return useMutation({
|
||||
mutationFn: async () => {
|
||||
return fetchFileIndexingEstimate(getFileIndexingEstimateParamsForNotion(options))
|
||||
},
|
||||
...mutationOptions,
|
||||
})
|
||||
}
|
||||
|
||||
type GetFileIndexingEstimateParamsOptionWeb = GetFileIndexingEstimateParamsOptionBase & {
|
||||
dataSourceType: DataSourceType.WEB
|
||||
websitePages: CrawlResultItem[]
|
||||
crawlOptions?: CrawlOptions
|
||||
websiteCrawlProvider: DataSourceProvider
|
||||
websiteCrawlJobId: string
|
||||
}
|
||||
|
||||
const getFileIndexingEstimateParamsForWeb = ({
|
||||
docForm,
|
||||
docLanguage,
|
||||
dataSourceType,
|
||||
websitePages,
|
||||
crawlOptions,
|
||||
websiteCrawlProvider,
|
||||
websiteCrawlJobId,
|
||||
indexingTechnique,
|
||||
processRule,
|
||||
dataset_id,
|
||||
}: GetFileIndexingEstimateParamsOptionWeb): IndexingEstimateParams => {
|
||||
return {
|
||||
info_list: {
|
||||
data_source_type: dataSourceType,
|
||||
website_info_list: getWebsiteInfo({
|
||||
websiteCrawlProvider,
|
||||
websiteCrawlJobId,
|
||||
websitePages,
|
||||
crawlOptions,
|
||||
}),
|
||||
},
|
||||
indexing_technique: indexingTechnique,
|
||||
process_rule: processRule,
|
||||
doc_form: docForm,
|
||||
doc_language: docLanguage,
|
||||
dataset_id,
|
||||
}
|
||||
}
|
||||
|
||||
export const useFetchFileIndexingEstimateForWeb = (
|
||||
options: GetFileIndexingEstimateParamsOptionWeb,
|
||||
mutationOptions: MutationOptions<FileIndexingEstimateResponse> = {},
|
||||
) => {
|
||||
return useMutation({
|
||||
mutationFn: async () => {
|
||||
return fetchFileIndexingEstimate(getFileIndexingEstimateParamsForWeb(options))
|
||||
},
|
||||
...mutationOptions,
|
||||
})
|
||||
}
|
||||
|
||||
export const useCreateFirstDocument = (
|
||||
mutationOptions: MutationOptions<createDocumentResponse, Error, CreateDocumentReq> = {},
|
||||
) => {
|
||||
return useMutation({
|
||||
mutationFn: async (createDocumentReq: CreateDocumentReq,
|
||||
) => {
|
||||
return createFirstDocument({ body: createDocumentReq })
|
||||
},
|
||||
...mutationOptions,
|
||||
})
|
||||
}
|
||||
|
||||
export const useCreateDocument = (
|
||||
datasetId: string,
|
||||
mutationOptions: MutationOptions<createDocumentResponse, Error, CreateDocumentReq> = {},
|
||||
) => {
|
||||
return useMutation({
|
||||
mutationFn: async (req: CreateDocumentReq) => {
|
||||
return createDocument({ datasetId, body: req })
|
||||
},
|
||||
...mutationOptions,
|
||||
})
|
||||
}
|
||||
|
||||
export const useFetchDefaultProcessRule = (
|
||||
mutationOptions: MutationOptions<ProcessRuleResponse, Error, string> = {},
|
||||
) => {
|
||||
return useMutation({
|
||||
mutationFn: async (url: string) => {
|
||||
return fetchDefaultProcessRule({ url })
|
||||
},
|
||||
...mutationOptions,
|
||||
})
|
||||
}
|
||||
|
||||
export const useCreatePipelineDataset = (
|
||||
mutationOptions: MutationOptions<CreateDatasetResponse, Error> = {},
|
||||
) => {
|
||||
return useMutation({
|
||||
mutationKey: [NAME_SPACE, 'create-pipeline-empty-dataset'],
|
||||
mutationFn: () => {
|
||||
return post<CreateDatasetResponse>('/rag/pipeline/empty-dataset')
|
||||
},
|
||||
...mutationOptions,
|
||||
})
|
||||
}
|
||||
|
||||
export const useCreatePipelineDatasetFromCustomized = (
|
||||
mutationOptions: MutationOptions<CreateDatasetResponse, Error, CreateDatasetReq> = {},
|
||||
) => {
|
||||
return useMutation({
|
||||
mutationKey: [NAME_SPACE, 'create-pipeline-dataset'],
|
||||
mutationFn: (req: CreateDatasetReq) => {
|
||||
return post<CreateDatasetResponse>('/rag/pipeline/dataset', { body: req })
|
||||
},
|
||||
...mutationOptions,
|
||||
})
|
||||
}
|
||||
99
dify/web/service/knowledge/use-dataset.ts
Normal file
99
dify/web/service/knowledge/use-dataset.ts
Normal file
@@ -0,0 +1,99 @@
|
||||
import type { MutationOptions } from '@tanstack/react-query'
|
||||
import { useInfiniteQuery, useMutation, useQuery } from '@tanstack/react-query'
|
||||
import type {
|
||||
DataSet,
|
||||
DataSetListResponse,
|
||||
DatasetListRequest,
|
||||
IndexingStatusBatchRequest,
|
||||
IndexingStatusBatchResponse,
|
||||
ProcessRuleResponse,
|
||||
RelatedAppResponse,
|
||||
} from '@/models/datasets'
|
||||
import { get, post } from '../base'
|
||||
import { useInvalid } from '../use-base'
|
||||
import qs from 'qs'
|
||||
import type { CommonResponse } from '@/models/common'
|
||||
|
||||
const NAME_SPACE = 'dataset'
|
||||
|
||||
const DatasetListKey = [NAME_SPACE, 'list']
|
||||
|
||||
export const useDatasetList = (params: DatasetListRequest) => {
|
||||
const { initialPage, tag_ids, limit, include_all, keyword } = params
|
||||
return useInfiniteQuery({
|
||||
queryKey: [...DatasetListKey, initialPage, tag_ids, limit, include_all, keyword],
|
||||
queryFn: ({ pageParam = 1 }) => {
|
||||
const urlParams = qs.stringify({
|
||||
tag_ids,
|
||||
limit,
|
||||
include_all,
|
||||
keyword,
|
||||
page: pageParam,
|
||||
}, { indices: false })
|
||||
return get<DataSetListResponse>(`/datasets?${urlParams}`)
|
||||
},
|
||||
getNextPageParam: lastPage => lastPage.has_more ? lastPage.page + 1 : null,
|
||||
initialPageParam: initialPage,
|
||||
})
|
||||
}
|
||||
|
||||
export const useInvalidDatasetList = () => {
|
||||
return useInvalid([...DatasetListKey])
|
||||
}
|
||||
|
||||
export const datasetDetailQueryKeyPrefix = [NAME_SPACE, 'detail']
|
||||
|
||||
export const useDatasetDetail = (datasetId: string) => {
|
||||
return useQuery({
|
||||
queryKey: [...datasetDetailQueryKeyPrefix, datasetId],
|
||||
queryFn: () => get<DataSet>(`/datasets/${datasetId}`),
|
||||
enabled: !!datasetId,
|
||||
})
|
||||
}
|
||||
|
||||
export const useDatasetRelatedApps = (datasetId: string) => {
|
||||
return useQuery({
|
||||
queryKey: [NAME_SPACE, 'related-apps', datasetId],
|
||||
queryFn: () => get<RelatedAppResponse>(`/datasets/${datasetId}/related-apps`),
|
||||
})
|
||||
}
|
||||
|
||||
export const useIndexingStatusBatch = (
|
||||
params: IndexingStatusBatchRequest,
|
||||
mutationOptions: MutationOptions<IndexingStatusBatchResponse, Error> = {},
|
||||
) => {
|
||||
const { datasetId, batchId } = params
|
||||
return useMutation({
|
||||
mutationKey: [NAME_SPACE, 'indexing-status-batch', datasetId, batchId],
|
||||
mutationFn: () => get<IndexingStatusBatchResponse>(`/datasets/${datasetId}/batch/${batchId}/indexing-status`),
|
||||
...mutationOptions,
|
||||
})
|
||||
}
|
||||
|
||||
export const useProcessRule = (documentId: string) => {
|
||||
return useQuery<ProcessRuleResponse>({
|
||||
queryKey: [NAME_SPACE, 'process-rule', documentId],
|
||||
queryFn: () => get<ProcessRuleResponse>('/datasets/process-rule', { params: { document_id: documentId } }),
|
||||
})
|
||||
}
|
||||
|
||||
export const useDatasetApiBaseUrl = () => {
|
||||
return useQuery<{ api_base_url: string }>({
|
||||
queryKey: [NAME_SPACE, 'api-base-info'],
|
||||
queryFn: () => get<{ api_base_url: string }>('/datasets/api-base-info'),
|
||||
})
|
||||
}
|
||||
|
||||
export const useEnableDatasetServiceApi = () => {
|
||||
return useMutation({
|
||||
mutationKey: [NAME_SPACE, 'enable-api'],
|
||||
mutationFn: (datasetId: string) => post<CommonResponse>(`/datasets/${datasetId}/api-keys/enable`),
|
||||
})
|
||||
}
|
||||
|
||||
export const useDisableDatasetServiceApi = () => {
|
||||
return useMutation({
|
||||
mutationKey: [NAME_SPACE, 'disable-api'],
|
||||
mutationFn: (datasetId: string) => post<CommonResponse>(`/datasets/${datasetId}/api-keys/disable`),
|
||||
})
|
||||
}
|
||||
165
dify/web/service/knowledge/use-document.ts
Normal file
165
dify/web/service/knowledge/use-document.ts
Normal file
@@ -0,0 +1,165 @@
|
||||
import {
|
||||
useMutation,
|
||||
useQuery,
|
||||
} from '@tanstack/react-query'
|
||||
import { del, get, patch } from '../base'
|
||||
import { useInvalid } from '../use-base'
|
||||
import type { MetadataType, SortType } from '../datasets'
|
||||
import { pauseDocIndexing, resumeDocIndexing } from '../datasets'
|
||||
import type { DocumentDetailResponse, DocumentListResponse, UpdateDocumentBatchParams } from '@/models/datasets'
|
||||
import { DocumentActionType } from '@/models/datasets'
|
||||
import type { CommonResponse } from '@/models/common'
|
||||
import { normalizeStatusForQuery } from '@/app/components/datasets/documents/status-filter'
|
||||
|
||||
const NAME_SPACE = 'knowledge/document'
|
||||
|
||||
export const useDocumentListKey = [NAME_SPACE, 'documentList']
|
||||
export const useDocumentList = (payload: {
|
||||
datasetId: string
|
||||
query: {
|
||||
keyword: string
|
||||
page: number
|
||||
limit: number
|
||||
sort?: SortType
|
||||
status?: string
|
||||
},
|
||||
refetchInterval?: number | false
|
||||
}) => {
|
||||
const { query, datasetId, refetchInterval } = payload
|
||||
const { keyword, page, limit, sort, status } = query
|
||||
const normalizedStatus = normalizeStatusForQuery(status)
|
||||
const params: Record<string, number | string> = {
|
||||
keyword,
|
||||
page,
|
||||
limit,
|
||||
}
|
||||
if (sort)
|
||||
params.sort = sort
|
||||
if (normalizedStatus && normalizedStatus !== 'all')
|
||||
params.status = normalizedStatus
|
||||
return useQuery<DocumentListResponse>({
|
||||
queryKey: [...useDocumentListKey, datasetId, keyword, page, limit, sort, normalizedStatus],
|
||||
queryFn: () => get<DocumentListResponse>(`/datasets/${datasetId}/documents`, {
|
||||
params,
|
||||
}),
|
||||
refetchInterval,
|
||||
})
|
||||
}
|
||||
|
||||
export const useInvalidDocumentList = (datasetId?: string) => {
|
||||
return useInvalid(datasetId ? [...useDocumentListKey, datasetId] : useDocumentListKey)
|
||||
}
|
||||
|
||||
const useAutoDisabledDocumentKey = [NAME_SPACE, 'autoDisabledDocument']
|
||||
export const useAutoDisabledDocuments = (datasetId: string) => {
|
||||
return useQuery({
|
||||
queryKey: [...useAutoDisabledDocumentKey, datasetId],
|
||||
queryFn: () => get<{ document_ids: string[] }>(`/datasets/${datasetId}/auto-disable-logs`),
|
||||
})
|
||||
}
|
||||
|
||||
export const useInvalidDisabledDocument = () => {
|
||||
return useInvalid(useAutoDisabledDocumentKey)
|
||||
}
|
||||
|
||||
const toBatchDocumentsIdParams = (documentIds: string[] | string) => {
|
||||
const ids = Array.isArray(documentIds) ? documentIds : [documentIds]
|
||||
return ids.map(id => `document_id=${id}`).join('&')
|
||||
}
|
||||
|
||||
export const useDocumentBatchAction = (action: DocumentActionType) => {
|
||||
return useMutation({
|
||||
mutationFn: ({ datasetId, documentIds, documentId }: UpdateDocumentBatchParams) => {
|
||||
return patch<CommonResponse>(`/datasets/${datasetId}/documents/status/${action}/batch?${toBatchDocumentsIdParams(documentId || documentIds!)}`)
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
export const useDocumentEnable = () => {
|
||||
return useDocumentBatchAction(DocumentActionType.enable)
|
||||
}
|
||||
|
||||
export const useDocumentDisable = () => {
|
||||
return useDocumentBatchAction(DocumentActionType.disable)
|
||||
}
|
||||
|
||||
export const useDocumentArchive = () => {
|
||||
return useDocumentBatchAction(DocumentActionType.archive)
|
||||
}
|
||||
|
||||
export const useDocumentUnArchive = () => {
|
||||
return useDocumentBatchAction(DocumentActionType.unArchive)
|
||||
}
|
||||
|
||||
export const useDocumentDelete = () => {
|
||||
return useMutation({
|
||||
mutationFn: ({ datasetId, documentIds, documentId }: UpdateDocumentBatchParams) => {
|
||||
return del<CommonResponse>(`/datasets/${datasetId}/documents?${toBatchDocumentsIdParams(documentId || documentIds!)}`)
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
export const useSyncDocument = () => {
|
||||
return useMutation({
|
||||
mutationFn: ({ datasetId, documentId }: UpdateDocumentBatchParams) => {
|
||||
return get<CommonResponse>(`/datasets/${datasetId}/documents/${documentId}/notion/sync`)
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
export const useSyncWebsite = () => {
|
||||
return useMutation({
|
||||
mutationFn: ({ datasetId, documentId }: UpdateDocumentBatchParams) => {
|
||||
return get<CommonResponse>(`/datasets/${datasetId}/documents/${documentId}/website-sync`)
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
const useDocumentDetailKey = [NAME_SPACE, 'documentDetail', 'withoutMetaData']
|
||||
export const useDocumentDetail = (payload: {
|
||||
datasetId: string
|
||||
documentId: string
|
||||
params: { metadata: MetadataType }
|
||||
}) => {
|
||||
const { datasetId, documentId, params } = payload
|
||||
return useQuery<DocumentDetailResponse>({
|
||||
queryKey: [...useDocumentDetailKey, 'withoutMetaData', datasetId, documentId],
|
||||
queryFn: () => get<DocumentDetailResponse>(`/datasets/${datasetId}/documents/${documentId}`, { params }),
|
||||
})
|
||||
}
|
||||
|
||||
export const useDocumentMetadata = (payload: {
|
||||
datasetId: string
|
||||
documentId: string
|
||||
params: { metadata: MetadataType }
|
||||
}) => {
|
||||
const { datasetId, documentId, params } = payload
|
||||
return useQuery<DocumentDetailResponse>({
|
||||
queryKey: [...useDocumentDetailKey, 'onlyMetaData', datasetId, documentId],
|
||||
queryFn: () => get<DocumentDetailResponse>(`/datasets/${datasetId}/documents/${documentId}`, { params }),
|
||||
})
|
||||
}
|
||||
|
||||
export const useInvalidDocumentDetail = () => {
|
||||
return useInvalid(useDocumentDetailKey)
|
||||
}
|
||||
|
||||
export const useDocumentPause = () => {
|
||||
return useMutation({
|
||||
mutationFn: ({ datasetId, documentId }: UpdateDocumentBatchParams) => {
|
||||
if (!datasetId || !documentId)
|
||||
throw new Error('datasetId and documentId are required')
|
||||
return pauseDocIndexing({ datasetId, documentId }) as Promise<CommonResponse>
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
export const useDocumentResume = () => {
|
||||
return useMutation({
|
||||
mutationFn: ({ datasetId, documentId }: UpdateDocumentBatchParams) => {
|
||||
if (!datasetId || !documentId)
|
||||
throw new Error('datasetId and documentId are required')
|
||||
return resumeDocIndexing({ datasetId, documentId }) as Promise<CommonResponse>
|
||||
},
|
||||
})
|
||||
}
|
||||
1
dify/web/service/knowledge/use-hit-testing.ts
Normal file
1
dify/web/service/knowledge/use-hit-testing.ts
Normal file
@@ -0,0 +1 @@
|
||||
export {}
|
||||
42
dify/web/service/knowledge/use-import.ts
Normal file
42
dify/web/service/knowledge/use-import.ts
Normal file
@@ -0,0 +1,42 @@
|
||||
import { useQuery, useQueryClient } from '@tanstack/react-query'
|
||||
import { get } from '../base'
|
||||
import type { DataSourceNotionWorkspace } from '@/models/common'
|
||||
|
||||
type PreImportNotionPagesParams = {
|
||||
datasetId: string
|
||||
credentialId: string
|
||||
}
|
||||
|
||||
const PRE_IMPORT_NOTION_PAGES_QUERY_KEY = 'notion-pre-import-pages'
|
||||
|
||||
export const usePreImportNotionPages = ({
|
||||
datasetId,
|
||||
credentialId,
|
||||
}: PreImportNotionPagesParams) => {
|
||||
return useQuery({
|
||||
queryKey: [PRE_IMPORT_NOTION_PAGES_QUERY_KEY, datasetId, credentialId],
|
||||
queryFn: async () => {
|
||||
return get<{ notion_info: DataSourceNotionWorkspace[] }>('/notion/pre-import/pages', {
|
||||
params: {
|
||||
dataset_id: datasetId,
|
||||
credential_id: credentialId,
|
||||
},
|
||||
})
|
||||
},
|
||||
retry: 0,
|
||||
})
|
||||
}
|
||||
|
||||
export const useInvalidPreImportNotionPages = () => {
|
||||
const queryClient = useQueryClient()
|
||||
return ({
|
||||
datasetId,
|
||||
credentialId,
|
||||
}: PreImportNotionPagesParams) => {
|
||||
queryClient.invalidateQueries(
|
||||
{
|
||||
queryKey: [PRE_IMPORT_NOTION_PAGES_QUERY_KEY, datasetId, credentialId],
|
||||
},
|
||||
)
|
||||
}
|
||||
}
|
||||
84
dify/web/service/knowledge/use-metadata.spec.tsx
Normal file
84
dify/web/service/knowledge/use-metadata.spec.tsx
Normal file
@@ -0,0 +1,84 @@
|
||||
import { DataType } from '@/app/components/datasets/metadata/types'
|
||||
import { act, renderHook } from '@testing-library/react'
|
||||
import { QueryClient, QueryClientProvider } from '@tanstack/react-query'
|
||||
import { useBatchUpdateDocMetadata } from '@/service/knowledge/use-metadata'
|
||||
import { useDocumentListKey } from './use-document'
|
||||
|
||||
// Mock the post function to avoid real network requests
|
||||
jest.mock('@/service/base', () => ({
|
||||
post: jest.fn().mockResolvedValue({ success: true }),
|
||||
}))
|
||||
|
||||
const NAME_SPACE = 'dataset-metadata'
|
||||
|
||||
describe('useBatchUpdateDocMetadata', () => {
|
||||
let queryClient: QueryClient
|
||||
|
||||
beforeEach(() => {
|
||||
// Create a fresh QueryClient before each test
|
||||
queryClient = new QueryClient()
|
||||
})
|
||||
|
||||
// Wrapper for React Query context
|
||||
const wrapper = ({ children }: { children: React.ReactNode }) => (
|
||||
<QueryClientProvider client={queryClient}>{children}</QueryClientProvider>
|
||||
)
|
||||
|
||||
it('should correctly invalidate dataset and document caches', async () => {
|
||||
const { result } = renderHook(() => useBatchUpdateDocMetadata(), { wrapper })
|
||||
|
||||
// Spy on queryClient.invalidateQueries
|
||||
const invalidateSpy = jest.spyOn(queryClient, 'invalidateQueries')
|
||||
|
||||
// Correct payload type: each document has its own metadata_list array
|
||||
|
||||
const payload = {
|
||||
dataset_id: 'dataset-1',
|
||||
metadata_list: [
|
||||
{
|
||||
document_id: 'doc-1',
|
||||
metadata_list: [
|
||||
{ key: 'title-1', id: '01', name: 'name-1', type: DataType.string, value: 'new title 01' },
|
||||
],
|
||||
},
|
||||
{
|
||||
document_id: 'doc-2',
|
||||
metadata_list: [
|
||||
{ key: 'title-2', id: '02', name: 'name-1', type: DataType.string, value: 'new title 02' },
|
||||
],
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
// Execute the mutation
|
||||
await act(async () => {
|
||||
await result.current.mutateAsync(payload)
|
||||
})
|
||||
|
||||
// Expect invalidateQueries to have been called exactly 5 times
|
||||
expect(invalidateSpy).toHaveBeenCalledTimes(5)
|
||||
|
||||
// Dataset cache invalidation
|
||||
expect(invalidateSpy).toHaveBeenNthCalledWith(1, {
|
||||
queryKey: [NAME_SPACE, 'dataset', 'dataset-1'],
|
||||
})
|
||||
|
||||
// Document list cache invalidation
|
||||
expect(invalidateSpy).toHaveBeenNthCalledWith(2, {
|
||||
queryKey: [NAME_SPACE, 'document', 'dataset-1'],
|
||||
})
|
||||
|
||||
// useDocumentListKey cache invalidation
|
||||
expect(invalidateSpy).toHaveBeenNthCalledWith(3, {
|
||||
queryKey: [...useDocumentListKey, 'dataset-1'],
|
||||
})
|
||||
|
||||
// Single document cache invalidation
|
||||
expect(invalidateSpy.mock.calls.slice(3)).toEqual(
|
||||
expect.arrayContaining([
|
||||
[{ queryKey: [NAME_SPACE, 'document', 'dataset-1', 'doc-1'] }],
|
||||
[{ queryKey: [NAME_SPACE, 'document', 'dataset-1', 'doc-2'] }],
|
||||
]),
|
||||
)
|
||||
})
|
||||
})
|
||||
146
dify/web/service/knowledge/use-metadata.ts
Normal file
146
dify/web/service/knowledge/use-metadata.ts
Normal file
@@ -0,0 +1,146 @@
|
||||
import type { BuiltInMetadataItem, MetadataBatchEditToServer, MetadataItemWithValueLength } from '@/app/components/datasets/metadata/types'
|
||||
import { del, get, patch, post } from '../base'
|
||||
import { useDocumentListKey, useInvalidDocumentList } from './use-document'
|
||||
import { useMutation, useQuery, useQueryClient } from '@tanstack/react-query'
|
||||
import { useInvalid } from '../use-base'
|
||||
import type { DocumentDetailResponse } from '@/models/datasets'
|
||||
|
||||
const NAME_SPACE = 'dataset-metadata'
|
||||
|
||||
export const useDatasetMetaData = (datasetId: string) => {
|
||||
return useQuery<{ doc_metadata: MetadataItemWithValueLength[], built_in_field_enabled: boolean }>({
|
||||
queryKey: [NAME_SPACE, 'dataset', datasetId],
|
||||
queryFn: () => {
|
||||
return get<{ doc_metadata: MetadataItemWithValueLength[], built_in_field_enabled: boolean }>(`/datasets/${datasetId}/metadata`)
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
export const useInvalidDatasetMetaData = (datasetId: string) => {
|
||||
return useInvalid([NAME_SPACE, 'dataset', datasetId])
|
||||
}
|
||||
|
||||
export const useCreateMetaData = (datasetId: string) => {
|
||||
const invalidDatasetMetaData = useInvalidDatasetMetaData(datasetId)
|
||||
return useMutation({
|
||||
mutationFn: async (payload: BuiltInMetadataItem) => {
|
||||
await post(`/datasets/${datasetId}/metadata`, {
|
||||
body: payload,
|
||||
})
|
||||
await invalidDatasetMetaData()
|
||||
return Promise.resolve(true)
|
||||
},
|
||||
})
|
||||
}
|
||||
export const useInvalidAllDocumentMetaData = (datasetId: string) => {
|
||||
const queryClient = useQueryClient()
|
||||
return () => {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: [NAME_SPACE, 'document', datasetId],
|
||||
exact: false, // invalidate all document metadata: [NAME_SPACE, 'document', datasetId, documentId]
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
const useInvalidAllMetaData = (datasetId: string) => {
|
||||
const invalidDatasetMetaData = useInvalidDatasetMetaData(datasetId)
|
||||
const invalidDocumentList = useInvalidDocumentList(datasetId)
|
||||
const invalidateAllDocumentMetaData = useInvalidAllDocumentMetaData(datasetId)
|
||||
|
||||
return async () => {
|
||||
// meta data in dataset
|
||||
await invalidDatasetMetaData()
|
||||
// meta data in document list
|
||||
invalidDocumentList()
|
||||
// meta data in single document
|
||||
await invalidateAllDocumentMetaData() // meta data in document
|
||||
}
|
||||
}
|
||||
|
||||
export const useRenameMeta = (datasetId: string) => {
|
||||
const invalidateAllMetaData = useInvalidAllMetaData(datasetId)
|
||||
return useMutation({
|
||||
mutationFn: async (payload: MetadataItemWithValueLength) => {
|
||||
await patch(`/datasets/${datasetId}/metadata/${payload.id}`, {
|
||||
body: {
|
||||
name: payload.name,
|
||||
},
|
||||
})
|
||||
await invalidateAllMetaData()
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
export const useDeleteMetaData = (datasetId: string) => {
|
||||
const invalidateAllMetaData = useInvalidAllMetaData(datasetId)
|
||||
return useMutation({
|
||||
mutationFn: async (metaDataId: string) => {
|
||||
// datasetMetaData = datasetMetaData.filter(item => item.id !== metaDataId)
|
||||
await del(`/datasets/${datasetId}/metadata/${metaDataId}`)
|
||||
await invalidateAllMetaData()
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
export const useBuiltInMetaDataFields = () => {
|
||||
return useQuery<{ fields: BuiltInMetadataItem[] }>({
|
||||
queryKey: [NAME_SPACE, 'built-in'],
|
||||
queryFn: () => {
|
||||
return get('/datasets/metadata/built-in')
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
export const useDocumentMetaData = ({ datasetId, documentId }: { datasetId: string, documentId: string }) => {
|
||||
return useQuery<DocumentDetailResponse>({
|
||||
queryKey: [NAME_SPACE, 'document', datasetId, documentId],
|
||||
queryFn: () => {
|
||||
return get<DocumentDetailResponse>(`/datasets/${datasetId}/documents/${documentId}`, { params: { metadata: 'only' } })
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
export const useBatchUpdateDocMetadata = () => {
|
||||
const queryClient = useQueryClient()
|
||||
return useMutation({
|
||||
mutationFn: async (payload: {
|
||||
dataset_id: string
|
||||
metadata_list: MetadataBatchEditToServer
|
||||
}) => {
|
||||
const documentIds = payload.metadata_list.map(item => item.document_id)
|
||||
await post(`/datasets/${payload.dataset_id}/documents/metadata`, {
|
||||
body: {
|
||||
operation_data: payload.metadata_list,
|
||||
},
|
||||
})
|
||||
// meta data in dataset
|
||||
await queryClient.invalidateQueries({
|
||||
queryKey: [NAME_SPACE, 'dataset', payload.dataset_id],
|
||||
})
|
||||
// meta data in document list
|
||||
await queryClient.invalidateQueries({
|
||||
queryKey: [NAME_SPACE, 'document', payload.dataset_id],
|
||||
})
|
||||
await queryClient.invalidateQueries({
|
||||
queryKey: [...useDocumentListKey, payload.dataset_id],
|
||||
})
|
||||
|
||||
// meta data in single document
|
||||
await Promise.all(documentIds.map(documentId => queryClient.invalidateQueries(
|
||||
{
|
||||
queryKey: [NAME_SPACE, 'document', payload.dataset_id, documentId],
|
||||
},
|
||||
)))
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
export const useUpdateBuiltInStatus = (datasetId: string) => {
|
||||
const invalidDatasetMetaData = useInvalidDatasetMetaData(datasetId)
|
||||
return useMutation({
|
||||
mutationFn: async (enabled: boolean) => {
|
||||
await post(`/datasets/${datasetId}/metadata/built-in/${enabled ? 'enable' : 'disable'}`)
|
||||
invalidDatasetMetaData()
|
||||
},
|
||||
})
|
||||
}
|
||||
172
dify/web/service/knowledge/use-segment.ts
Normal file
172
dify/web/service/knowledge/use-segment.ts
Normal file
@@ -0,0 +1,172 @@
|
||||
import { useMutation, useQuery } from '@tanstack/react-query'
|
||||
import { del, get, patch, post } from '../base'
|
||||
import type { CommonResponse } from '@/models/common'
|
||||
import type {
|
||||
BatchImportResponse,
|
||||
ChildChunkDetail,
|
||||
ChildSegmentsResponse,
|
||||
ChunkingMode,
|
||||
SegmentDetailModel,
|
||||
SegmentUpdater,
|
||||
SegmentsResponse,
|
||||
} from '@/models/datasets'
|
||||
|
||||
const NAME_SPACE = 'segment'
|
||||
|
||||
export const useSegmentListKey = [NAME_SPACE, 'chunkList']
|
||||
export const useChunkListEnabledKey = [NAME_SPACE, 'chunkList', { enabled: true }]
|
||||
export const useChunkListDisabledKey = [NAME_SPACE, 'chunkList', { enabled: false }]
|
||||
export const useChunkListAllKey = [NAME_SPACE, 'chunkList', { enabled: 'all' }]
|
||||
|
||||
export const useSegmentList = (
|
||||
payload: {
|
||||
datasetId: string
|
||||
documentId: string
|
||||
params: {
|
||||
page: number
|
||||
limit: number
|
||||
keyword: string
|
||||
enabled: boolean | 'all' | ''
|
||||
}
|
||||
},
|
||||
disable?: boolean,
|
||||
) => {
|
||||
const { datasetId, documentId, params } = payload
|
||||
const { page, limit, keyword, enabled } = params
|
||||
return useQuery<SegmentsResponse>({
|
||||
queryKey: [...useSegmentListKey, { datasetId, documentId, page, limit, keyword, enabled }],
|
||||
queryFn: () => {
|
||||
return get<SegmentsResponse>(`/datasets/${datasetId}/documents/${documentId}/segments`, { params })
|
||||
},
|
||||
enabled: !disable,
|
||||
})
|
||||
}
|
||||
|
||||
export const useUpdateSegment = () => {
|
||||
return useMutation({
|
||||
mutationKey: [NAME_SPACE, 'update'],
|
||||
mutationFn: (payload: { datasetId: string; documentId: string; segmentId: string; body: SegmentUpdater }) => {
|
||||
const { datasetId, documentId, segmentId, body } = payload
|
||||
return patch<{ data: SegmentDetailModel; doc_form: ChunkingMode }>(`/datasets/${datasetId}/documents/${documentId}/segments/${segmentId}`, { body })
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
export const useAddSegment = () => {
|
||||
return useMutation({
|
||||
mutationKey: [NAME_SPACE, 'add'],
|
||||
mutationFn: (payload: { datasetId: string; documentId: string; body: SegmentUpdater }) => {
|
||||
const { datasetId, documentId, body } = payload
|
||||
return post<{ data: SegmentDetailModel; doc_form: ChunkingMode }>(`/datasets/${datasetId}/documents/${documentId}/segment`, { body })
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
export const useEnableSegment = () => {
|
||||
return useMutation({
|
||||
mutationKey: [NAME_SPACE, 'enable'],
|
||||
mutationFn: (payload: { datasetId: string; documentId: string; segmentIds: string[] }) => {
|
||||
const { datasetId, documentId, segmentIds } = payload
|
||||
const query = segmentIds.map(id => `segment_id=${id}`).join('&')
|
||||
return patch<CommonResponse>(`/datasets/${datasetId}/documents/${documentId}/segment/enable?${query}`)
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
export const useDisableSegment = () => {
|
||||
return useMutation({
|
||||
mutationKey: [NAME_SPACE, 'disable'],
|
||||
mutationFn: (payload: { datasetId: string; documentId: string; segmentIds: string[] }) => {
|
||||
const { datasetId, documentId, segmentIds } = payload
|
||||
const query = segmentIds.map(id => `segment_id=${id}`).join('&')
|
||||
return patch<CommonResponse>(`/datasets/${datasetId}/documents/${documentId}/segment/disable?${query}`)
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
export const useDeleteSegment = () => {
|
||||
return useMutation({
|
||||
mutationKey: [NAME_SPACE, 'delete'],
|
||||
mutationFn: (payload: { datasetId: string; documentId: string; segmentIds: string[] }) => {
|
||||
const { datasetId, documentId, segmentIds } = payload
|
||||
const query = segmentIds.map(id => `segment_id=${id}`).join('&')
|
||||
return del<CommonResponse>(`/datasets/${datasetId}/documents/${documentId}/segments?${query}`)
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
export const useChildSegmentListKey = [NAME_SPACE, 'childChunkList']
|
||||
|
||||
export const useChildSegmentList = (
|
||||
payload: {
|
||||
datasetId: string
|
||||
documentId: string
|
||||
segmentId: string
|
||||
params: {
|
||||
page: number
|
||||
limit: number
|
||||
keyword: string
|
||||
}
|
||||
},
|
||||
disable?: boolean,
|
||||
) => {
|
||||
const { datasetId, documentId, segmentId, params } = payload
|
||||
const { page, limit, keyword } = params
|
||||
return useQuery({
|
||||
queryKey: [...useChildSegmentListKey, { datasetId, documentId, segmentId, page, limit, keyword }],
|
||||
queryFn: () => {
|
||||
return get<ChildSegmentsResponse>(`/datasets/${datasetId}/documents/${documentId}/segments/${segmentId}/child_chunks`, { params })
|
||||
},
|
||||
enabled: !disable,
|
||||
})
|
||||
}
|
||||
|
||||
export const useDeleteChildSegment = () => {
|
||||
return useMutation({
|
||||
mutationKey: [NAME_SPACE, 'childChunk', 'delete'],
|
||||
mutationFn: (payload: { datasetId: string; documentId: string; segmentId: string; childChunkId: string }) => {
|
||||
const { datasetId, documentId, segmentId, childChunkId } = payload
|
||||
return del<CommonResponse>(`/datasets/${datasetId}/documents/${documentId}/segments/${segmentId}/child_chunks/${childChunkId}`)
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
export const useAddChildSegment = () => {
|
||||
return useMutation({
|
||||
mutationKey: [NAME_SPACE, 'childChunk', 'add'],
|
||||
mutationFn: (payload: { datasetId: string; documentId: string; segmentId: string; body: { content: string } }) => {
|
||||
const { datasetId, documentId, segmentId, body } = payload
|
||||
return post<{ data: ChildChunkDetail }>(`/datasets/${datasetId}/documents/${documentId}/segments/${segmentId}/child_chunks`, { body })
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
export const useUpdateChildSegment = () => {
|
||||
return useMutation({
|
||||
mutationKey: [NAME_SPACE, 'childChunk', 'update'],
|
||||
mutationFn: (payload: { datasetId: string; documentId: string; segmentId: string; childChunkId: string; body: { content: string } }) => {
|
||||
const { datasetId, documentId, segmentId, childChunkId, body } = payload
|
||||
return patch<{ data: ChildChunkDetail }>(`/datasets/${datasetId}/documents/${documentId}/segments/${segmentId}/child_chunks/${childChunkId}`, { body })
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
export const useSegmentBatchImport = () => {
|
||||
return useMutation({
|
||||
mutationKey: [NAME_SPACE, 'batchImport'],
|
||||
mutationFn: (payload: { url: string; body: { upload_file_id: string } }) => {
|
||||
const { url, body } = payload
|
||||
return post<BatchImportResponse>(url, { body })
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
export const useCheckSegmentBatchImportProgress = () => {
|
||||
return useMutation({
|
||||
mutationKey: [NAME_SPACE, 'batchImport', 'checkProgress'],
|
||||
mutationFn: (payload: { jobID: string }) => {
|
||||
const { jobID } = payload
|
||||
return get<BatchImportResponse>(`/datasets/batch_import_status/${jobID}`)
|
||||
},
|
||||
})
|
||||
}
|
||||
Reference in New Issue
Block a user