dify
This commit is contained in:
@@ -0,0 +1,116 @@
|
||||
'use client'
|
||||
import type { FC } from 'react'
|
||||
import React, { useCallback } from 'react'
|
||||
import type { CustomRunFormProps } from './types'
|
||||
import { DatasourceType } from '@/models/pipeline'
|
||||
import LocalFile from '@/app/components/datasets/documents/create-from-pipeline/data-source/local-file'
|
||||
import OnlineDocuments from '@/app/components/datasets/documents/create-from-pipeline/data-source/online-documents'
|
||||
import WebsiteCrawl from '@/app/components/datasets/documents/create-from-pipeline/data-source/website-crawl'
|
||||
import OnlineDrive from '@/app/components/datasets/documents/create-from-pipeline/data-source/online-drive'
|
||||
import { useDataSourceStore } from '@/app/components/datasets/documents/create-from-pipeline/data-source/store'
|
||||
import { useOnlineDocument, useOnlineDrive, useWebsiteCrawl } from '@/app/components/rag-pipeline/components/panel/test-run/preparation/hooks'
|
||||
import Button from '@/app/components/base/button'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import DataSourceProvider from '@/app/components/datasets/documents/create-from-pipeline/data-source/store/provider'
|
||||
import PanelWrap from '../_base/components/before-run-form/panel-wrap'
|
||||
import useBeforeRunForm from './hooks/use-before-run-form'
|
||||
|
||||
const BeforeRunForm: FC<CustomRunFormProps> = (props) => {
|
||||
const {
|
||||
nodeId,
|
||||
payload,
|
||||
onCancel,
|
||||
} = props
|
||||
const { t } = useTranslation()
|
||||
const dataSourceStore = useDataSourceStore()
|
||||
|
||||
const {
|
||||
isPending,
|
||||
handleRunWithSyncDraft,
|
||||
datasourceType,
|
||||
datasourceNodeData,
|
||||
startRunBtnDisabled,
|
||||
} = useBeforeRunForm(props)
|
||||
|
||||
const { clearOnlineDocumentData } = useOnlineDocument()
|
||||
const { clearWebsiteCrawlData } = useWebsiteCrawl()
|
||||
const { clearOnlineDriveData } = useOnlineDrive()
|
||||
|
||||
const clearDataSourceData = useCallback(() => {
|
||||
if (datasourceType === DatasourceType.onlineDocument)
|
||||
clearOnlineDocumentData()
|
||||
else if (datasourceType === DatasourceType.websiteCrawl)
|
||||
clearWebsiteCrawlData()
|
||||
else if (datasourceType === DatasourceType.onlineDrive)
|
||||
clearOnlineDriveData()
|
||||
}, [datasourceType])
|
||||
|
||||
const handleCredentialChange = useCallback((credentialId: string) => {
|
||||
const { setCurrentCredentialId } = dataSourceStore.getState()
|
||||
clearDataSourceData()
|
||||
setCurrentCredentialId(credentialId)
|
||||
}, [dataSourceStore])
|
||||
|
||||
return (
|
||||
<PanelWrap
|
||||
nodeName={payload.title}
|
||||
onHide={onCancel}
|
||||
>
|
||||
<div className='flex flex-col gap-y-5 px-4 pt-4'>
|
||||
{datasourceType === DatasourceType.localFile && (
|
||||
<LocalFile
|
||||
allowedExtensions={datasourceNodeData.fileExtensions || []}
|
||||
notSupportBatchUpload
|
||||
/>
|
||||
)}
|
||||
{datasourceType === DatasourceType.onlineDocument && (
|
||||
<OnlineDocuments
|
||||
nodeId={nodeId}
|
||||
nodeData={datasourceNodeData}
|
||||
isInPipeline
|
||||
onCredentialChange={handleCredentialChange}
|
||||
/>
|
||||
)}
|
||||
{datasourceType === DatasourceType.websiteCrawl && (
|
||||
<WebsiteCrawl
|
||||
nodeId={nodeId}
|
||||
nodeData={datasourceNodeData}
|
||||
isInPipeline
|
||||
onCredentialChange={handleCredentialChange}
|
||||
/>
|
||||
)}
|
||||
{datasourceType === DatasourceType.onlineDrive && (
|
||||
<OnlineDrive
|
||||
nodeId={nodeId}
|
||||
nodeData={datasourceNodeData}
|
||||
isInPipeline
|
||||
onCredentialChange={handleCredentialChange}
|
||||
/>
|
||||
)}
|
||||
<div className='flex justify-end gap-x-2'>
|
||||
<Button onClick={onCancel}>
|
||||
{t('common.operation.cancel')}
|
||||
</Button>
|
||||
<Button
|
||||
onClick={handleRunWithSyncDraft}
|
||||
variant='primary'
|
||||
loading={isPending}
|
||||
disabled={isPending || startRunBtnDisabled}
|
||||
>
|
||||
{t('workflow.singleRun.startRun')}
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
</PanelWrap>
|
||||
)
|
||||
}
|
||||
|
||||
const BeforeRunFormWrapper = (props: CustomRunFormProps) => {
|
||||
return (
|
||||
<DataSourceProvider>
|
||||
<BeforeRunForm {...props} />
|
||||
</DataSourceProvider>
|
||||
)
|
||||
}
|
||||
|
||||
export default React.memo(BeforeRunFormWrapper)
|
||||
@@ -0,0 +1,59 @@
|
||||
import { VarType } from '@/app/components/workflow/types'
|
||||
|
||||
export const COMMON_OUTPUT = [
|
||||
{
|
||||
name: 'datasource_type',
|
||||
type: VarType.string,
|
||||
description: 'local_file, online_document, website_crawl',
|
||||
},
|
||||
]
|
||||
|
||||
export const LOCAL_FILE_OUTPUT = [
|
||||
{
|
||||
name: 'file',
|
||||
type: VarType.file,
|
||||
description: 'file',
|
||||
subItems: [
|
||||
{
|
||||
name: 'name',
|
||||
type: VarType.string,
|
||||
description: 'file name',
|
||||
},
|
||||
{
|
||||
name: 'size',
|
||||
type: VarType.number,
|
||||
description: 'file size',
|
||||
},
|
||||
{
|
||||
name: 'type',
|
||||
type: VarType.string,
|
||||
description: 'file type',
|
||||
},
|
||||
{
|
||||
name: 'extension',
|
||||
type: VarType.string,
|
||||
description: 'file extension',
|
||||
},
|
||||
{
|
||||
name: 'mime_type',
|
||||
type: VarType.string,
|
||||
description: 'file mime type',
|
||||
},
|
||||
{
|
||||
name: 'transfer_method',
|
||||
type: VarType.string,
|
||||
description: 'file transfer method',
|
||||
},
|
||||
{
|
||||
name: 'url',
|
||||
type: VarType.string,
|
||||
description: 'file url',
|
||||
},
|
||||
{
|
||||
name: 'related_id',
|
||||
type: VarType.string,
|
||||
description: 'file related id',
|
||||
},
|
||||
],
|
||||
},
|
||||
]
|
||||
111
dify/web/app/components/workflow/nodes/data-source/default.ts
Normal file
111
dify/web/app/components/workflow/nodes/data-source/default.ts
Normal file
@@ -0,0 +1,111 @@
|
||||
import type { NodeDefault } from '../../types'
|
||||
import type { DataSourceNodeType } from './types'
|
||||
import { DataSourceClassification } from './types'
|
||||
import { genNodeMetaData } from '@/app/components/workflow/utils'
|
||||
import { BlockEnum } from '@/app/components/workflow/types'
|
||||
import {
|
||||
COMMON_OUTPUT,
|
||||
LOCAL_FILE_OUTPUT,
|
||||
} from './constants'
|
||||
import { VarType as VarKindType } from '@/app/components/workflow/nodes/tool/types'
|
||||
import { getMatchedSchemaType } from '../_base/components/variable/use-match-schema-type'
|
||||
|
||||
const i18nPrefix = 'workflow.errorMsg'
|
||||
|
||||
const metaData = genNodeMetaData({
|
||||
sort: -1,
|
||||
type: BlockEnum.DataSource,
|
||||
isStart: true,
|
||||
isRequired: true,
|
||||
})
|
||||
const nodeDefault: NodeDefault<DataSourceNodeType> = {
|
||||
metaData,
|
||||
defaultValue: {
|
||||
datasource_parameters: {},
|
||||
datasource_configurations: {},
|
||||
},
|
||||
checkValid(payload, t, moreDataForCheckValid) {
|
||||
const { dataSourceInputsSchema, notAuthed } = moreDataForCheckValid
|
||||
let errorMessage = ''
|
||||
if (notAuthed)
|
||||
errorMessage = t(`${i18nPrefix}.authRequired`)
|
||||
|
||||
if (!errorMessage) {
|
||||
dataSourceInputsSchema.filter((field: any) => {
|
||||
return field.required
|
||||
}).forEach((field: any) => {
|
||||
const targetVar = payload.datasource_parameters[field.variable]
|
||||
if (!targetVar) {
|
||||
errorMessage = t(`${i18nPrefix}.fieldRequired`, { field: field.label })
|
||||
return
|
||||
}
|
||||
const { type: variable_type, value } = targetVar
|
||||
if (variable_type === VarKindType.variable) {
|
||||
if (!errorMessage && (!value || value.length === 0))
|
||||
errorMessage = t(`${i18nPrefix}.fieldRequired`, { field: field.label })
|
||||
}
|
||||
else {
|
||||
if (!errorMessage && (value === undefined || value === null || value === ''))
|
||||
errorMessage = t(`${i18nPrefix}.fieldRequired`, { field: field.label })
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
return {
|
||||
isValid: !errorMessage,
|
||||
errorMessage,
|
||||
}
|
||||
},
|
||||
getOutputVars(payload, allPluginInfoList, ragVars = [], { schemaTypeDefinitions } = { schemaTypeDefinitions: [] }) {
|
||||
const {
|
||||
plugin_id,
|
||||
datasource_name,
|
||||
provider_type,
|
||||
} = payload
|
||||
|
||||
const isLocalFile = provider_type === DataSourceClassification.localFile
|
||||
const currentDataSource = allPluginInfoList.dataSourceList?.find((ds: any) => ds.plugin_id === plugin_id)
|
||||
const currentDataSourceItem = currentDataSource?.tools?.find((tool: any) => tool.name === datasource_name)
|
||||
const output_schema = currentDataSourceItem?.output_schema
|
||||
const dynamicOutputSchema: any[] = []
|
||||
|
||||
if (output_schema?.properties) {
|
||||
Object.keys(output_schema.properties).forEach((outputKey) => {
|
||||
const output = output_schema.properties[outputKey]
|
||||
const dataType = output.type
|
||||
let type = dataType === 'array'
|
||||
? `array[${output.items?.type.slice(0, 1).toLocaleLowerCase()}${output.items?.type.slice(1)}]`
|
||||
: `${dataType.slice(0, 1).toLocaleLowerCase()}${dataType.slice(1)}`
|
||||
const schemaType = getMatchedSchemaType?.(output, schemaTypeDefinitions)
|
||||
|
||||
if (type === 'object' && schemaType === 'file')
|
||||
type = 'file'
|
||||
|
||||
dynamicOutputSchema.push({
|
||||
variable: outputKey,
|
||||
type,
|
||||
description: output.description,
|
||||
schemaType,
|
||||
children: output.type === 'object' ? {
|
||||
schema: {
|
||||
type: 'object',
|
||||
properties: output.properties,
|
||||
},
|
||||
} : undefined,
|
||||
})
|
||||
})
|
||||
}
|
||||
return [
|
||||
...COMMON_OUTPUT.map(item => ({ variable: item.name, type: item.type })),
|
||||
...(
|
||||
isLocalFile
|
||||
? LOCAL_FILE_OUTPUT.map(item => ({ variable: item.name, type: item.type }))
|
||||
: []
|
||||
),
|
||||
...ragVars,
|
||||
...dynamicOutputSchema,
|
||||
]
|
||||
},
|
||||
}
|
||||
|
||||
export default nodeDefault
|
||||
@@ -0,0 +1,207 @@
|
||||
import { useStoreApi } from 'reactflow'
|
||||
import type { CustomRunFormProps, DataSourceNodeType } from '../types'
|
||||
import { useEffect, useMemo, useRef } from 'react'
|
||||
import { useNodeDataUpdate, useNodesSyncDraft } from '../../../hooks'
|
||||
import { NodeRunningStatus } from '../../../types'
|
||||
import { useInvalidLastRun } from '@/service/use-workflow'
|
||||
import type { NodeRunResult } from '@/types/workflow'
|
||||
import { fetchNodeInspectVars } from '@/service/workflow'
|
||||
import { FlowType } from '@/types/common'
|
||||
import { useDatasourceSingleRun } from '@/service/use-pipeline'
|
||||
import { useDataSourceStore, useDataSourceStoreWithSelector } from '@/app/components/datasets/documents/create-from-pipeline/data-source/store'
|
||||
import { DatasourceType } from '@/models/pipeline'
|
||||
import { TransferMethod } from '@/types/app'
|
||||
import { useShallow } from 'zustand/react/shallow'
|
||||
|
||||
const useBeforeRunForm = ({
|
||||
nodeId,
|
||||
flowId,
|
||||
flowType,
|
||||
payload,
|
||||
setRunResult,
|
||||
isPaused,
|
||||
isRunAfterSingleRun,
|
||||
setIsRunAfterSingleRun,
|
||||
onSuccess,
|
||||
appendNodeInspectVars,
|
||||
}: CustomRunFormProps) => {
|
||||
const store = useStoreApi()
|
||||
const dataSourceStore = useDataSourceStore()
|
||||
const isPausedRef = useRef(isPaused)
|
||||
const { handleNodeDataUpdate } = useNodeDataUpdate()
|
||||
|
||||
const datasourceType = payload.provider_type as DatasourceType
|
||||
const datasourceNodeData = payload as DataSourceNodeType
|
||||
|
||||
const {
|
||||
localFileList,
|
||||
onlineDocuments,
|
||||
websitePages,
|
||||
selectedFileIds,
|
||||
} = useDataSourceStoreWithSelector(useShallow(state => ({
|
||||
localFileList: state.localFileList,
|
||||
onlineDocuments: state.onlineDocuments,
|
||||
websitePages: state.websitePages,
|
||||
selectedFileIds: state.selectedFileIds,
|
||||
})))
|
||||
|
||||
const startRunBtnDisabled = useMemo(() => {
|
||||
if (!datasourceNodeData) return false
|
||||
if (datasourceType === DatasourceType.localFile)
|
||||
return !localFileList.length || localFileList.some(file => !file.file.id)
|
||||
if (datasourceType === DatasourceType.onlineDocument)
|
||||
return !onlineDocuments.length
|
||||
if (datasourceType === DatasourceType.websiteCrawl)
|
||||
return !websitePages.length
|
||||
if (datasourceType === DatasourceType.onlineDrive)
|
||||
return !selectedFileIds.length
|
||||
return false
|
||||
}, [datasourceNodeData, datasourceType, localFileList, onlineDocuments.length, selectedFileIds.length, websitePages.length])
|
||||
|
||||
useEffect(() => {
|
||||
isPausedRef.current = isPaused
|
||||
}, [isPaused])
|
||||
|
||||
const runningStatus = payload._singleRunningStatus || NodeRunningStatus.NotStart
|
||||
|
||||
const setNodeRunning = () => {
|
||||
handleNodeDataUpdate({
|
||||
id: nodeId,
|
||||
data: {
|
||||
...payload,
|
||||
_singleRunningStatus: NodeRunningStatus.Running,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
const invalidLastRun = useInvalidLastRun(flowType, flowId, nodeId)
|
||||
|
||||
const updateRunResult = async (data: NodeRunResult) => {
|
||||
const isPaused = isPausedRef.current
|
||||
|
||||
// The backend don't support pause the single run, so the frontend handle the pause state.
|
||||
if (isPaused)
|
||||
return
|
||||
|
||||
const canRunLastRun = !isRunAfterSingleRun || runningStatus === NodeRunningStatus.Succeeded
|
||||
if (!canRunLastRun) {
|
||||
setRunResult(data)
|
||||
return
|
||||
}
|
||||
|
||||
// run fail may also update the inspect vars when the node set the error default output.
|
||||
const vars = await fetchNodeInspectVars(FlowType.ragPipeline, flowId, nodeId)
|
||||
const { getNodes } = store.getState()
|
||||
const nodes = getNodes()
|
||||
appendNodeInspectVars(nodeId, vars, nodes)
|
||||
if (data?.status === NodeRunningStatus.Succeeded)
|
||||
onSuccess()
|
||||
}
|
||||
|
||||
const { mutateAsync: handleDatasourceSingleRun, isPending } = useDatasourceSingleRun()
|
||||
|
||||
const handleRun = () => {
|
||||
let datasourceInfo: Record<string, any> = {}
|
||||
const { currentCredentialId: credentialId } = dataSourceStore.getState()
|
||||
if (datasourceType === DatasourceType.localFile) {
|
||||
const { localFileList } = dataSourceStore.getState()
|
||||
const { id, name, type, size, extension, mime_type } = localFileList[0].file
|
||||
const documentInfo = {
|
||||
related_id: id,
|
||||
name,
|
||||
type,
|
||||
size,
|
||||
extension,
|
||||
mime_type,
|
||||
url: '',
|
||||
transfer_method: TransferMethod.local_file,
|
||||
}
|
||||
datasourceInfo = documentInfo
|
||||
}
|
||||
if (datasourceType === DatasourceType.onlineDocument) {
|
||||
const { onlineDocuments } = dataSourceStore.getState()
|
||||
const { workspace_id, ...rest } = onlineDocuments[0]
|
||||
const documentInfo = {
|
||||
workspace_id,
|
||||
page: rest,
|
||||
credential_id: credentialId,
|
||||
}
|
||||
datasourceInfo = documentInfo
|
||||
}
|
||||
if (datasourceType === DatasourceType.websiteCrawl) {
|
||||
const { websitePages } = dataSourceStore.getState()
|
||||
datasourceInfo = {
|
||||
...websitePages[0],
|
||||
credential_id: credentialId,
|
||||
}
|
||||
}
|
||||
if (datasourceType === DatasourceType.onlineDrive) {
|
||||
const { bucket, onlineDriveFileList, selectedFileIds } = dataSourceStore.getState()
|
||||
const file = onlineDriveFileList.find(file => file.id === selectedFileIds[0])
|
||||
datasourceInfo = {
|
||||
bucket,
|
||||
id: file?.id,
|
||||
type: file?.type,
|
||||
credential_id: credentialId,
|
||||
}
|
||||
}
|
||||
let hasError = false
|
||||
handleDatasourceSingleRun({
|
||||
pipeline_id: flowId,
|
||||
start_node_id: nodeId,
|
||||
start_node_title: datasourceNodeData.title,
|
||||
datasource_type: datasourceType,
|
||||
datasource_info: datasourceInfo,
|
||||
}, {
|
||||
onError: () => {
|
||||
hasError = true
|
||||
invalidLastRun()
|
||||
if (isPausedRef.current)
|
||||
return
|
||||
handleNodeDataUpdate({
|
||||
id: nodeId,
|
||||
data: {
|
||||
...payload,
|
||||
_isSingleRun: false,
|
||||
_singleRunningStatus: NodeRunningStatus.Failed,
|
||||
},
|
||||
})
|
||||
},
|
||||
onSettled: (data) => {
|
||||
updateRunResult(data!)
|
||||
if (!hasError && !isPausedRef.current) {
|
||||
handleNodeDataUpdate({
|
||||
id: nodeId,
|
||||
data: {
|
||||
...payload,
|
||||
_isSingleRun: false,
|
||||
_singleRunningStatus: NodeRunningStatus.Succeeded,
|
||||
},
|
||||
})
|
||||
}
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
const { handleSyncWorkflowDraft } = useNodesSyncDraft()
|
||||
|
||||
const handleRunWithSyncDraft = () => {
|
||||
setNodeRunning()
|
||||
setIsRunAfterSingleRun(true)
|
||||
handleSyncWorkflowDraft(true, true, {
|
||||
onSuccess() {
|
||||
handleRun()
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
return {
|
||||
isPending,
|
||||
handleRunWithSyncDraft,
|
||||
datasourceType,
|
||||
datasourceNodeData,
|
||||
startRunBtnDisabled,
|
||||
}
|
||||
}
|
||||
|
||||
export default useBeforeRunForm
|
||||
@@ -0,0 +1,117 @@
|
||||
import {
|
||||
useCallback,
|
||||
useEffect,
|
||||
useMemo,
|
||||
} from 'react'
|
||||
import { useStoreApi } from 'reactflow'
|
||||
import { useNodeDataUpdate } from '@/app/components/workflow/hooks'
|
||||
import type {
|
||||
DataSourceNodeType,
|
||||
ToolVarInputs,
|
||||
} from '../types'
|
||||
|
||||
export const useConfig = (id: string, dataSourceList?: any[]) => {
|
||||
const store = useStoreApi()
|
||||
const { handleNodeDataUpdateWithSyncDraft } = useNodeDataUpdate()
|
||||
|
||||
const getNodeData = useCallback(() => {
|
||||
const { getNodes } = store.getState()
|
||||
const nodes = getNodes()
|
||||
|
||||
return nodes.find(node => node.id === id)
|
||||
}, [store, id])
|
||||
|
||||
const handleNodeDataUpdate = useCallback((data: Partial<DataSourceNodeType>) => {
|
||||
handleNodeDataUpdateWithSyncDraft({
|
||||
id,
|
||||
data,
|
||||
})
|
||||
}, [id, handleNodeDataUpdateWithSyncDraft])
|
||||
|
||||
const handleLocalFileDataSourceInit = useCallback(() => {
|
||||
const nodeData = getNodeData()
|
||||
|
||||
if (nodeData?.data._dataSourceStartToAdd && nodeData?.data.provider_type === 'local_file') {
|
||||
handleNodeDataUpdate({
|
||||
...nodeData.data,
|
||||
_dataSourceStartToAdd: false,
|
||||
})
|
||||
}
|
||||
}, [getNodeData, handleNodeDataUpdate])
|
||||
|
||||
useEffect(() => {
|
||||
handleLocalFileDataSourceInit()
|
||||
}, [handleLocalFileDataSourceInit])
|
||||
|
||||
const handleFileExtensionsChange = useCallback((fileExtensions: string[]) => {
|
||||
const nodeData = getNodeData()
|
||||
handleNodeDataUpdate({
|
||||
...nodeData?.data,
|
||||
fileExtensions,
|
||||
})
|
||||
}, [handleNodeDataUpdate, getNodeData])
|
||||
|
||||
const handleParametersChange = useCallback((datasource_parameters: ToolVarInputs) => {
|
||||
const nodeData = getNodeData()
|
||||
handleNodeDataUpdate({
|
||||
...nodeData?.data,
|
||||
datasource_parameters,
|
||||
})
|
||||
}, [handleNodeDataUpdate, getNodeData])
|
||||
|
||||
const outputSchema = useMemo(() => {
|
||||
const nodeData = getNodeData()
|
||||
if (!nodeData?.data || !dataSourceList) return []
|
||||
|
||||
const currentDataSource = dataSourceList.find((ds: any) => ds.plugin_id === nodeData.data.plugin_id)
|
||||
const currentDataSourceItem = currentDataSource?.tools?.find((tool: any) => tool.name === nodeData.data.datasource_name)
|
||||
const output_schema = currentDataSourceItem?.output_schema
|
||||
|
||||
const res: any[] = []
|
||||
if (!output_schema || !output_schema.properties)
|
||||
return res
|
||||
|
||||
Object.keys(output_schema.properties).forEach((outputKey) => {
|
||||
const output = output_schema.properties[outputKey]
|
||||
const type = output.type
|
||||
if (type === 'object') {
|
||||
res.push({
|
||||
name: outputKey,
|
||||
value: output,
|
||||
})
|
||||
}
|
||||
else {
|
||||
res.push({
|
||||
name: outputKey,
|
||||
type: output.type === 'array'
|
||||
? `Array[${output.items?.type.slice(0, 1).toLocaleUpperCase()}${output.items?.type.slice(1)}]`
|
||||
: `${output.type.slice(0, 1).toLocaleUpperCase()}${output.type.slice(1)}`,
|
||||
description: output.description,
|
||||
})
|
||||
}
|
||||
})
|
||||
return res
|
||||
}, [getNodeData, dataSourceList])
|
||||
|
||||
const hasObjectOutput = useMemo(() => {
|
||||
const nodeData = getNodeData()
|
||||
if (!nodeData?.data || !dataSourceList) return false
|
||||
|
||||
const currentDataSource = dataSourceList.find((ds: any) => ds.plugin_id === nodeData.data.plugin_id)
|
||||
const currentDataSourceItem = currentDataSource?.tools?.find((tool: any) => tool.name === nodeData.data.datasource_name)
|
||||
const output_schema = currentDataSourceItem?.output_schema
|
||||
|
||||
if (!output_schema || !output_schema.properties)
|
||||
return false
|
||||
|
||||
const properties = output_schema.properties
|
||||
return Object.keys(properties).some(key => properties[key].type === 'object')
|
||||
}, [getNodeData, dataSourceList])
|
||||
|
||||
return {
|
||||
handleFileExtensionsChange,
|
||||
handleParametersChange,
|
||||
outputSchema,
|
||||
hasObjectOutput,
|
||||
}
|
||||
}
|
||||
59
dify/web/app/components/workflow/nodes/data-source/node.tsx
Normal file
59
dify/web/app/components/workflow/nodes/data-source/node.tsx
Normal file
@@ -0,0 +1,59 @@
|
||||
import type { FC } from 'react'
|
||||
import { memo, useEffect } from 'react'
|
||||
import type { NodeProps } from '@/app/components/workflow/types'
|
||||
import { InstallPluginButton } from '@/app/components/workflow/nodes/_base/components/install-plugin-button'
|
||||
import { useNodePluginInstallation } from '@/app/components/workflow/hooks/use-node-plugin-installation'
|
||||
import { useNodeDataUpdate } from '@/app/components/workflow/hooks/use-node-data-update'
|
||||
import type { DataSourceNodeType } from './types'
|
||||
|
||||
const Node: FC<NodeProps<DataSourceNodeType>> = ({
|
||||
id,
|
||||
data,
|
||||
}) => {
|
||||
const {
|
||||
isChecking,
|
||||
isMissing,
|
||||
uniqueIdentifier,
|
||||
canInstall,
|
||||
onInstallSuccess,
|
||||
shouldDim,
|
||||
} = useNodePluginInstallation(data)
|
||||
const { handleNodeDataUpdate } = useNodeDataUpdate()
|
||||
const shouldLock = !isChecking && isMissing && canInstall && Boolean(uniqueIdentifier)
|
||||
|
||||
useEffect(() => {
|
||||
if (data._pluginInstallLocked === shouldLock && data._dimmed === shouldDim)
|
||||
return
|
||||
handleNodeDataUpdate({
|
||||
id,
|
||||
data: {
|
||||
_pluginInstallLocked: shouldLock,
|
||||
_dimmed: shouldDim,
|
||||
},
|
||||
})
|
||||
}, [data._pluginInstallLocked, data._dimmed, handleNodeDataUpdate, id, shouldDim, shouldLock])
|
||||
|
||||
const showInstallButton = !isChecking && isMissing && canInstall && uniqueIdentifier
|
||||
|
||||
if (!showInstallButton)
|
||||
return null
|
||||
|
||||
return (
|
||||
<div className='relative mb-1 px-3 py-1'>
|
||||
<div className='pointer-events-auto absolute right-3 top-[-32px] z-40'>
|
||||
<InstallPluginButton
|
||||
size='small'
|
||||
extraIdentifiers={[
|
||||
data.plugin_id,
|
||||
data.provider_name,
|
||||
].filter(Boolean) as string[]}
|
||||
className='!font-medium !text-text-accent'
|
||||
uniqueIdentifier={uniqueIdentifier!}
|
||||
onSuccess={onInstallSuccess}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
export default memo(Node)
|
||||
164
dify/web/app/components/workflow/nodes/data-source/panel.tsx
Normal file
164
dify/web/app/components/workflow/nodes/data-source/panel.tsx
Normal file
@@ -0,0 +1,164 @@
|
||||
import type { FC } from 'react'
|
||||
import {
|
||||
useMemo,
|
||||
} from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import { memo } from 'react'
|
||||
import type { DataSourceNodeType } from './types'
|
||||
import { DataSourceClassification } from './types'
|
||||
import type { NodePanelProps } from '@/app/components/workflow/types'
|
||||
import {
|
||||
BoxGroupField,
|
||||
} from '@/app/components/workflow/nodes/_base/components/layout'
|
||||
import OutputVars, { VarItem } from '@/app/components/workflow/nodes/_base/components/output-vars'
|
||||
import StructureOutputItem from '@/app/components/workflow/nodes/_base/components/variable/object-child-tree-panel/show'
|
||||
import TagInput from '@/app/components/base/tag-input'
|
||||
import { useNodesReadOnly } from '@/app/components/workflow/hooks'
|
||||
import { useConfig } from './hooks/use-config'
|
||||
import {
|
||||
COMMON_OUTPUT,
|
||||
LOCAL_FILE_OUTPUT,
|
||||
} from './constants'
|
||||
import { useStore } from '@/app/components/workflow/store'
|
||||
import { toolParametersToFormSchemas } from '@/app/components/tools/utils/to-form-schema'
|
||||
import ToolForm from '../tool/components/tool-form'
|
||||
import { wrapStructuredVarItem } from '@/app/components/workflow/utils/tool'
|
||||
import useMatchSchemaType, { getMatchedSchemaType } from '../_base/components/variable/use-match-schema-type'
|
||||
|
||||
const Panel: FC<NodePanelProps<DataSourceNodeType>> = ({ id, data }) => {
|
||||
const { t } = useTranslation()
|
||||
const { nodesReadOnly } = useNodesReadOnly()
|
||||
const dataSourceList = useStore(s => s.dataSourceList)
|
||||
const {
|
||||
provider_type,
|
||||
plugin_id,
|
||||
fileExtensions = [],
|
||||
datasource_parameters,
|
||||
} = data
|
||||
const {
|
||||
handleFileExtensionsChange,
|
||||
handleParametersChange,
|
||||
outputSchema,
|
||||
hasObjectOutput,
|
||||
} = useConfig(id, dataSourceList)
|
||||
const isLocalFile = provider_type === DataSourceClassification.localFile
|
||||
const currentDataSource = dataSourceList?.find(ds => ds.plugin_id === plugin_id)
|
||||
const currentDataSourceItem: any = currentDataSource?.tools?.find((tool: any) => tool.name === data.datasource_name)
|
||||
const formSchemas = useMemo(() => {
|
||||
return currentDataSourceItem ? toolParametersToFormSchemas(currentDataSourceItem.parameters) : []
|
||||
}, [currentDataSourceItem])
|
||||
|
||||
const pipelineId = useStore(s => s.pipelineId)
|
||||
const setShowInputFieldPanel = useStore(s => s.setShowInputFieldPanel)
|
||||
const { schemaTypeDefinitions } = useMatchSchemaType()
|
||||
return (
|
||||
<div >
|
||||
{
|
||||
currentDataSource?.is_authorized && !isLocalFile && !!formSchemas?.length && (
|
||||
<BoxGroupField
|
||||
boxGroupProps={{
|
||||
boxProps: { withBorderBottom: true },
|
||||
}}
|
||||
fieldProps={{
|
||||
fieldTitleProps: {
|
||||
title: t('workflow.nodes.tool.inputVars'),
|
||||
},
|
||||
supportCollapse: true,
|
||||
}}
|
||||
>
|
||||
{formSchemas.length > 0 && (
|
||||
<ToolForm
|
||||
readOnly={nodesReadOnly}
|
||||
nodeId={id}
|
||||
schema={formSchemas as any}
|
||||
value={datasource_parameters}
|
||||
onChange={handleParametersChange}
|
||||
currentProvider={currentDataSource}
|
||||
currentTool={currentDataSourceItem}
|
||||
showManageInputField={!!pipelineId}
|
||||
onManageInputField={() => setShowInputFieldPanel?.(true)}
|
||||
/>
|
||||
)}
|
||||
</BoxGroupField>
|
||||
)
|
||||
}
|
||||
{
|
||||
isLocalFile && (
|
||||
<BoxGroupField
|
||||
boxGroupProps={{
|
||||
boxProps: { withBorderBottom: true },
|
||||
}}
|
||||
fieldProps={{
|
||||
fieldTitleProps: {
|
||||
title: t('workflow.nodes.dataSource.supportedFileFormats'),
|
||||
},
|
||||
}}
|
||||
>
|
||||
<div className='rounded-lg bg-components-input-bg-normal p-1 pt-0'>
|
||||
<TagInput
|
||||
items={fileExtensions}
|
||||
onChange={handleFileExtensionsChange}
|
||||
placeholder={t('workflow.nodes.dataSource.supportedFileFormatsPlaceholder')}
|
||||
inputClassName='bg-transparent'
|
||||
disableAdd={nodesReadOnly}
|
||||
disableRemove={nodesReadOnly}
|
||||
/>
|
||||
</div>
|
||||
</BoxGroupField>
|
||||
)
|
||||
}
|
||||
<OutputVars>
|
||||
{
|
||||
COMMON_OUTPUT.map((item, index) => (
|
||||
<VarItem
|
||||
key={index}
|
||||
name={item.name}
|
||||
type={item.type}
|
||||
description={item.description}
|
||||
isIndent={hasObjectOutput}
|
||||
/>
|
||||
))
|
||||
}
|
||||
{
|
||||
isLocalFile && LOCAL_FILE_OUTPUT.map((item, index) => (
|
||||
<VarItem
|
||||
key={index}
|
||||
name={item.name}
|
||||
type={item.type}
|
||||
description={item.description}
|
||||
subItems={item.subItems.map(item => ({
|
||||
name: item.name,
|
||||
type: item.type,
|
||||
description: item.description,
|
||||
}))}
|
||||
/>
|
||||
))
|
||||
}
|
||||
{outputSchema.map((outputItem) => {
|
||||
const schemaType = getMatchedSchemaType(outputItem.value, schemaTypeDefinitions)
|
||||
|
||||
return (
|
||||
<div key={outputItem.name}>
|
||||
{outputItem.value?.type === 'object' ? (
|
||||
<StructureOutputItem
|
||||
rootClassName='code-sm-semibold text-text-secondary'
|
||||
payload={wrapStructuredVarItem(outputItem, schemaType)}
|
||||
/>
|
||||
) : (
|
||||
<VarItem
|
||||
name={outputItem.name}
|
||||
// eslint-disable-next-line sonarjs/no-nested-template-literals
|
||||
type={`${outputItem.type.toLocaleLowerCase()}${schemaType ? ` (${schemaType})` : ''}`}
|
||||
description={outputItem.description}
|
||||
isIndent={hasObjectOutput}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
)
|
||||
})}
|
||||
</OutputVars>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
export default memo(Panel)
|
||||
41
dify/web/app/components/workflow/nodes/data-source/types.ts
Normal file
41
dify/web/app/components/workflow/nodes/data-source/types.ts
Normal file
@@ -0,0 +1,41 @@
|
||||
import type { CommonNodeType, Node } from '@/app/components/workflow/types'
|
||||
import type { FlowType } from '@/types/common'
|
||||
import type { NodeRunResult, VarInInspect } from '@/types/workflow'
|
||||
import type { Dispatch, SetStateAction } from 'react'
|
||||
import type { ResourceVarInputs } from '../_base/types'
|
||||
export { VarKindType as VarType } from '../_base/types'
|
||||
|
||||
export enum DataSourceClassification {
|
||||
localFile = 'local_file',
|
||||
websiteCrawl = 'website_crawl',
|
||||
onlineDocument = 'online_document',
|
||||
onlineDrive = 'online_drive',
|
||||
}
|
||||
|
||||
export type ToolVarInputs = ResourceVarInputs
|
||||
|
||||
export type DataSourceNodeType = CommonNodeType & {
|
||||
fileExtensions?: string[]
|
||||
plugin_id: string
|
||||
provider_type: string
|
||||
provider_name: string
|
||||
datasource_name: string
|
||||
datasource_label: string
|
||||
datasource_parameters: ToolVarInputs
|
||||
datasource_configurations: Record<string, any>
|
||||
plugin_unique_identifier?: string
|
||||
}
|
||||
|
||||
export type CustomRunFormProps = {
|
||||
nodeId: string
|
||||
flowId: string
|
||||
flowType: FlowType
|
||||
payload: CommonNodeType
|
||||
setRunResult: Dispatch<SetStateAction<NodeRunResult | null>>
|
||||
setIsRunAfterSingleRun: Dispatch<SetStateAction<boolean>>
|
||||
isPaused: boolean
|
||||
isRunAfterSingleRun: boolean
|
||||
onSuccess: () => void
|
||||
onCancel: () => void
|
||||
appendNodeInspectVars: (nodeId: string, vars: VarInInspect[], nodes: Node[]) => void
|
||||
}
|
||||
11
dify/web/app/components/workflow/nodes/data-source/utils.ts
Normal file
11
dify/web/app/components/workflow/nodes/data-source/utils.ts
Normal file
@@ -0,0 +1,11 @@
|
||||
import { PipelineInputVarType } from '@/models/pipeline'
|
||||
import { VarType } from '@/app/components/workflow/types'
|
||||
|
||||
export const inputVarTypeToVarType = (type: PipelineInputVarType): VarType => {
|
||||
return ({
|
||||
[PipelineInputVarType.number]: VarType.number,
|
||||
[PipelineInputVarType.singleFile]: VarType.file,
|
||||
[PipelineInputVarType.multiFiles]: VarType.arrayFile,
|
||||
[PipelineInputVarType.checkbox]: VarType.boolean,
|
||||
} as any)[type] || VarType.string
|
||||
}
|
||||
Reference in New Issue
Block a user