知识库上传、分段

This commit is contained in:
2025-11-07 14:38:51 +08:00
parent b98450df96
commit 8d87b00678
19 changed files with 687 additions and 478 deletions

View File

@@ -19,6 +19,8 @@ import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.TimeUnit;
/**
@@ -253,6 +255,7 @@ public class DifyApiClient {
/**
* 上传文档到知识库(通过文件)
* 根据 Dify API 文档: POST /datasets/{dataset_id}/document/create-by-file
*/
public DocumentUploadResponse uploadDocumentByFile(
String datasetId,
@@ -260,24 +263,41 @@ public class DifyApiClient {
String originalFilename,
DocumentUploadRequest uploadRequest) {
String url = difyConfig.getFullApiUrl("/datasets/" + datasetId + "/document/create_by_file");
String url = difyConfig.getFullApiUrl("/datasets/" + datasetId + "/document/create-by-file");
try {
// 构建 data JSON 字符串(包含所有元数据)
Map<String, Object> dataMap = new HashMap<>();
if (uploadRequest.getName() != null) {
dataMap.put("name", uploadRequest.getName());
}
if (uploadRequest.getIndexingTechnique() != null) {
dataMap.put("indexing_technique", uploadRequest.getIndexingTechnique());
}
// process_rule 是必填字段,如果没有提供则使用默认配置
if (uploadRequest.getProcessRule() != null) {
dataMap.put("process_rule", uploadRequest.getProcessRule());
} else {
// 默认分段规则
Map<String, Object> defaultProcessRule = new HashMap<>();
defaultProcessRule.put("mode", "automatic");
dataMap.put("process_rule", defaultProcessRule);
}
// 默认设置文档形式和语言
dataMap.put("doc_form", "text_model");
dataMap.put("doc_language", "Chinese");
String dataJson = JSON.toJSONString(dataMap);
logger.info("上传文档到知识库: datasetId={}, file={}, data={}", datasetId, originalFilename, dataJson);
// 构建 multipart/form-data 请求体
MultipartBody.Builder bodyBuilder = new MultipartBody.Builder()
.setType(MultipartBody.FORM)
.addFormDataPart("file", originalFilename,
RequestBody.create(file, MediaType.parse("application/octet-stream")));
// 添加其他参数
if (uploadRequest.getName() != null) {
bodyBuilder.addFormDataPart("name", uploadRequest.getName());
}
if (uploadRequest.getIndexingTechnique() != null) {
bodyBuilder.addFormDataPart("indexing_technique", uploadRequest.getIndexingTechnique());
}
if (uploadRequest.getProcessRule() != null) {
bodyBuilder.addFormDataPart("process_rule", JSON.toJSONString(uploadRequest.getProcessRule()));
}
RequestBody.create(file, MediaType.parse("application/octet-stream")))
.addFormDataPart("data", dataJson);
Request httpRequest = new Request.Builder()
.url(url)
@@ -293,7 +313,8 @@ public class DifyApiClient {
throw new DifyException("上传文档失败: " + responseBody);
}
return JSON.parseObject(responseBody, DocumentUploadResponse.class);
logger.info("文档上传成功: datasetId={}, file={}", datasetId, originalFilename);
return JSON.parseObject(responseBody, DocumentUploadResponse.class);
}
} catch (IOException e) {
logger.error("上传文档异常", e);
@@ -710,16 +731,19 @@ public class DifyApiClient {
/**
* 通用 GET 请求
* @param path API路径
* @param apiKey API密钥
* @param apiKey API密钥为null时使用知识库API Key
* @return JSON响应字符串
*/
public String get(String path, String apiKey) {
String url = difyConfig.getFullApiUrl(path);
try {
// 如果apiKey为null使用知识库API Key因为通用方法主要用于知识库相关操作
String actualApiKey = apiKey != null ? apiKey : getKnowledgeApiKey();
Request httpRequest = new Request.Builder()
.url(url)
.header("Authorization", "Bearer " + getApiKey(apiKey))
.header("Authorization", "Bearer " + actualApiKey)
.get()
.build();
@@ -743,19 +767,22 @@ public class DifyApiClient {
* 通用 POST 请求
* @param path API路径
* @param requestBody 请求体JSON字符串或Map
* @param apiKey API密钥
* @param apiKey API密钥为null时使用知识库API Key
* @return JSON响应字符串
*/
public String post(String path, Object requestBody, String apiKey) {
String url = difyConfig.getFullApiUrl(path);
try {
// 如果apiKey为null使用知识库API Key
String actualApiKey = apiKey != null ? apiKey : getKnowledgeApiKey();
String jsonBody = requestBody instanceof String ?
(String) requestBody : JSON.toJSONString(requestBody);
Request httpRequest = new Request.Builder()
.url(url)
.header("Authorization", "Bearer " + getApiKey(apiKey))
.header("Authorization", "Bearer " + actualApiKey)
.header("Content-Type", "application/json")
.post(RequestBody.create(jsonBody, MediaType.parse("application/json")))
.build();
@@ -780,19 +807,22 @@ public class DifyApiClient {
* 通用 PATCH 请求
* @param path API路径
* @param requestBody 请求体JSON字符串或Map
* @param apiKey API密钥
* @param apiKey API密钥为null时使用知识库API Key
* @return JSON响应字符串
*/
public String patch(String path, Object requestBody, String apiKey) {
String url = difyConfig.getFullApiUrl(path);
try {
// 如果apiKey为null使用知识库API Key
String actualApiKey = apiKey != null ? apiKey : getKnowledgeApiKey();
String jsonBody = requestBody instanceof String ?
(String) requestBody : JSON.toJSONString(requestBody);
Request httpRequest = new Request.Builder()
.url(url)
.header("Authorization", "Bearer " + getApiKey(apiKey))
.header("Authorization", "Bearer " + actualApiKey)
.header("Content-Type", "application/json")
.patch(RequestBody.create(jsonBody, MediaType.parse("application/json")))
.build();
@@ -816,16 +846,19 @@ public class DifyApiClient {
/**
* 通用 DELETE 请求
* @param path API路径
* @param apiKey API密钥
* @param apiKey API密钥为null时使用知识库API Key
* @return JSON响应字符串
*/
public String delete(String path, String apiKey) {
String url = difyConfig.getFullApiUrl(path);
try {
// 如果apiKey为null使用知识库API Key
String actualApiKey = apiKey != null ? apiKey : getKnowledgeApiKey();
Request httpRequest = new Request.Builder()
.url(url)
.header("Authorization", "Bearer " + getApiKey(apiKey))
.header("Authorization", "Bearer " + actualApiKey)
.delete()
.build();

View File

@@ -1,22 +1,23 @@
package org.xyzh.ai.client.dto;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.alibaba.fastjson2.annotation.JSONField;
import lombok.Data;
import java.util.List;
/**
* @description 文档列表响应
* @description Dify文档列表响应
* @filename DocumentListResponse.java
* @author AI Assistant
* @copyright xyzh
* @since 2025-11-04
* @since 2025-11-07
*/
@Data
public class DocumentListResponse {
private List<DocumentInfo> data;
@JsonProperty("has_more")
private List<Document> data;
@JSONField(name = "has_more")
private Boolean hasMore;
private Integer limit;
@@ -24,62 +25,72 @@ public class DocumentListResponse {
private Integer total;
private Integer page;
/**
* 文档信息
*/
@Data
public static class DocumentInfo {
public static class Document {
private String id;
private Integer position;
@JsonProperty("data_source_type")
@JSONField(name = "data_source_type")
private String dataSourceType;
@JsonProperty("data_source_info")
@JSONField(name = "data_source_info")
private DataSourceInfo dataSourceInfo;
@JsonProperty("dataset_process_rule_id")
@JSONField(name = "dataset_process_rule_id")
private String datasetProcessRuleId;
private String name;
@JsonProperty("created_from")
@JSONField(name = "created_from")
private String createdFrom;
@JsonProperty("created_by")
@JSONField(name = "created_by")
private String createdBy;
@JsonProperty("created_at")
@JSONField(name = "created_at")
private Long createdAt;
@JsonProperty("indexing_status")
private Integer tokens;
@JSONField(name = "indexing_status")
private String indexingStatus;
private String error;
private Boolean enabled;
@JsonProperty("disabled_at")
@JSONField(name = "disabled_at")
private Long disabledAt;
@JsonProperty("disabled_by")
@JSONField(name = "disabled_by")
private String disabledBy;
private Boolean archived;
@JsonProperty("word_count")
@JSONField(name = "display_status")
private String displayStatus;
@JSONField(name = "word_count")
private Integer wordCount;
@JsonProperty("hit_count")
@JSONField(name = "hit_count")
private Integer hitCount;
@JsonProperty("doc_form")
@JSONField(name = "doc_form")
private String docForm;
}
/**
* 数据源信息
*/
@Data
public static class DataSourceInfo {
@JsonProperty("upload_file_id")
@JSONField(name = "upload_file_id")
private String uploadFileId;
}
}

View File

@@ -1,10 +1,10 @@
package org.xyzh.ai.client.dto;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.alibaba.fastjson2.annotation.JSONField;
import lombok.Data;
/**
* @description 文档上传响应
* @description 文档上传响应(根据 Dify API 返回结构)
* @filename DocumentUploadResponse.java
* @author AI Assistant
* @copyright xyzh
@@ -14,14 +14,9 @@ import lombok.Data;
public class DocumentUploadResponse {
/**
* 文档ID
* 文档详细信息
*/
private String id;
/**
* 文档名称
*/
private String name;
private Document document;
/**
* 批次ID用于查询处理状态
@@ -29,32 +24,122 @@ public class DocumentUploadResponse {
private String batch;
/**
* 位置(序号)
* 文档详细信息
*/
private Integer position;
@Data
public static class Document {
/**
* 文档ID
*/
private String id;
/**
* 数据源类型
*/
@JsonProperty("data_source_type")
private String dataSourceType;
/**
* 文档名称
*/
private String name;
/**
* 索引状态
*/
@JsonProperty("indexing_status")
private String indexingStatus;
/**
* 位置(序号)
*/
private Integer position;
/**
* 创建时间
*/
@JsonProperty("created_at")
private Long createdAt;
/**
* 数据源类型
*/
@JSONField(name = "data_source_type")
private String dataSourceType;
/**
* 创建人
*/
@JsonProperty("created_by")
private String createdBy;
/**
* 数据源信息
*/
@JSONField(name = "data_source_info")
private Object dataSourceInfo;
/**
* 数据集处理规则ID
*/
@JSONField(name = "dataset_process_rule_id")
private String datasetProcessRuleId;
/**
* 创建来源
*/
@JSONField(name = "created_from")
private String createdFrom;
/**
* 创建人
*/
@JSONField(name = "created_by")
private String createdBy;
/**
* 创建时间(时间戳)
*/
@JSONField(name = "created_at")
private Long createdAt;
/**
* Token数量
*/
private Integer tokens;
/**
* 索引状态
*/
@JSONField(name = "indexing_status")
private String indexingStatus;
/**
* 错误信息
*/
private String error;
/**
* 是否启用
*/
private Boolean enabled;
/**
* 禁用时间
*/
@JSONField(name = "disabled_at")
private Long disabledAt;
/**
* 禁用人
*/
@JSONField(name = "disabled_by")
private String disabledBy;
/**
* 是否归档
*/
private Boolean archived;
/**
* 显示状态
*/
@JSONField(name = "display_status")
private String displayStatus;
/**
* 字数
*/
@JSONField(name = "word_count")
private Integer wordCount;
/**
* 命中次数
*/
@JSONField(name = "hit_count")
private Integer hitCount;
/**
* 文档形式
*/
@JSONField(name = "doc_form")
private String docForm;
}
}

View File

@@ -197,4 +197,22 @@ public class AiKnowledgeController {
log.info("获取可用的Rerank模型列表");
return knowledgeService.getAvailableRerankModels();
}
/**
* @description 获取知识库文档列表
* @param knowledgeId 知识库ID
* @param page 页码从1开始默认1
* @param limit 每页数量默认20
* @return ResultDomain<Map>
* @author AI Assistant
* @since 2025-11-07
*/
@GetMapping("/{knowledgeId}/documents")
public ResultDomain<Map<String, Object>> getDocumentList(
@PathVariable(name = "knowledgeId") String knowledgeId,
@RequestParam(required = false, defaultValue = "1", name = "page") Integer page,
@RequestParam(required = false, defaultValue = "20", name = "limit") Integer limit) {
log.info("获取文档列表: knowledgeId={}, page={}, limit={}", knowledgeId, page, limit);
return knowledgeService.getDocumentList(knowledgeId, page, limit);
}
}

View File

@@ -4,8 +4,15 @@ import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import org.xyzh.ai.client.DifyApiClient;
import org.xyzh.ai.mapper.AiUploadFileMapper;
import org.xyzh.common.core.domain.ResultDomain;
import org.xyzh.common.dto.ai.TbAiUploadFile;
import com.alibaba.fastjson2.JSONArray;
import com.alibaba.fastjson2.JSONObject;
import java.util.Date;
import java.util.List;
import java.util.Map;
/**
@@ -23,6 +30,11 @@ public class DifyProxyController {
@Autowired
private DifyApiClient difyApiClient;
@Autowired
private AiUploadFileMapper uploadFileMapper;
// ===================== 文档分段管理 API =====================
/**
@@ -34,18 +46,19 @@ public class DifyProxyController {
* @since 2025-11-04
*/
@GetMapping("/datasets/{datasetId}/documents/{documentId}/segments")
public ResultDomain<String> getDocumentSegments(
public ResultDomain<JSONObject> getDocumentSegments(
@PathVariable(name = "datasetId") String datasetId,
@PathVariable(name = "documentId") String documentId) {
ResultDomain<String> result = new ResultDomain<>();
ResultDomain<JSONObject> result = new ResultDomain<>();
log.info("获取文档分段列表: datasetId={}, documentId={}", datasetId, documentId);
try {
// 调用Dify API使用默认配置的API Key
String path = "/datasets/" + datasetId + "/documents/" + documentId + "/segments";
String response = difyApiClient.get(path, null);
JSONObject jsonObject = JSONObject.parseObject(response);
result.success("获取文档分段列表成功", response);
result.success("获取文档分段列表成功", JSONArray.parseArray(jsonObject.getJSONArray("data").toJSONString(), JSONObject.class));
return result;
} catch (Exception e) {
log.error("获取文档分段列表失败", e);
@@ -197,5 +210,66 @@ public class DifyProxyController {
return result;
}
}
/**
* @description 更新文档启用/禁用状态
* @param datasetId Dify数据集ID
* @param action 操作类型enable/disable/archive/un_archive
* @param requestBody 请求体包含document_ids数组
* @return ResultDomain<String> 更新结果
* @author AI Assistant
* @since 2025-11-07
*/
@PostMapping("/datasets/{datasetId}/documents/status/{action}")
public ResultDomain<String> updateDocumentStatus(
@PathVariable(name = "datasetId") String datasetId,
@PathVariable(name = "action") String action,
@RequestBody Map<String, Object> requestBody) {
log.info("更新文档状态: datasetId={}, action={}, documentIds={}",
datasetId, action, requestBody.get("document_ids"));
ResultDomain<String> result = new ResultDomain<>();
try {
// 1. 调用Dify API使用默认配置的API Key
String path = "/datasets/" + datasetId + "/documents/status/" + action;
String response = difyApiClient.patch(path, requestBody, null);
// 2. 同步更新本地数据库
@SuppressWarnings("unchecked")
List<String> documentIds = (List<String>) requestBody.get("document_ids");
if (documentIds != null && !documentIds.isEmpty()) {
Boolean enabled = null;
if ("enable".equals(action)) {
enabled = true;
} else if ("disable".equals(action)) {
enabled = false;
}
if (enabled != null) {
for (String documentId : documentIds) {
try {
TbAiUploadFile file = uploadFileMapper.selectFileByDifyDocumentId(documentId);
if (file != null) {
file.setEnabled(enabled);
file.setUpdateTime(new Date());
uploadFileMapper.updateUploadFile(file);
log.info("本地数据库更新成功: documentId={}, enabled={}", documentId, enabled);
}
} catch (Exception e) {
log.warn("更新本地数据库失败: documentId={}, error={}", documentId, e.getMessage());
}
}
}
}
result.success("更新文档状态成功", response);
return result;
} catch (Exception e) {
log.error("更新文档状态失败", e);
result.fail("更新文档状态失败: " + e.getMessage());
return result;
}
}
}

View File

@@ -82,4 +82,11 @@ public interface AiUploadFileMapper extends BaseMapper<TbAiUploadFile> {
* @return 插入行数
*/
int batchInsertUploadFiles(@Param("files") List<TbAiUploadFile> files);
/**
* 根据Dify文档ID查询文件
* @param difyDocumentId Dify文档ID
* @return TbAiUploadFile 文件记录
*/
TbAiUploadFile selectFileByDifyDocumentId(@Param("difyDocumentId") String difyDocumentId);
}

View File

@@ -10,6 +10,7 @@ import org.xyzh.ai.client.dto.DatasetCreateRequest;
import org.xyzh.ai.client.dto.DatasetCreateResponse;
import org.xyzh.ai.client.dto.DatasetDetailResponse;
import org.xyzh.ai.client.dto.DatasetUpdateRequest;
import org.xyzh.ai.client.dto.DocumentListResponse;
import org.xyzh.ai.client.dto.EmbeddingModelResponse;
import org.xyzh.ai.client.dto.RerankModelResponse;
import org.xyzh.ai.client.dto.RetrievalModel;
@@ -109,7 +110,7 @@ public class AiKnowledgeServiceImpl implements AiKnowledgeService {
embeddingModel = difyConfig.getDataset().getDefaultEmbeddingModel();
}
difyRequest.setEmbeddingModel(embeddingModel);
// 设置模型提供商(从前端传入或使用配置默认值)
String provider = knowledge.getEmbeddingModelProvider();
@@ -322,7 +323,7 @@ public class AiKnowledgeServiceImpl implements AiKnowledgeService {
}
needUpdateDify = true;
}
// 检索配置变化Rerank、Top K、Score阈值
boolean retrievalConfigChanged = false;
@@ -875,5 +876,73 @@ public class AiKnowledgeServiceImpl implements AiKnowledgeService {
}
}
/**
* 获取知识库文档列表
* @param knowledgeId 知识库ID
* @param page 页码
* @param limit 每页数量
* @return 文档列表
*/
@Override
public ResultDomain<Map<String, Object>> getDocumentList(String knowledgeId, Integer page, Integer limit) {
ResultDomain<Map<String, Object>> resultDomain = new ResultDomain<>();
try {
// 查询知识库信息
TbAiKnowledge knowledge = knowledgeMapper.selectKnowledgeById(knowledgeId);
if (knowledge == null || knowledge.getDeleted()) {
resultDomain.fail("知识库不存在");
return resultDomain;
}
// 检查权限
ResultDomain<Boolean> permissionCheck = checkKnowledgePermission(knowledgeId, "READ");
if (!permissionCheck.isSuccess() || !permissionCheck.getData()) {
resultDomain.fail("无权限访问该知识库");
return resultDomain;
}
// 检查是否有 Dify 数据集ID
if (!StringUtils.hasText(knowledge.getDifyDatasetId())) {
resultDomain.fail("知识库未关联Dify数据集");
return resultDomain;
}
// 设置默认值
int pageNum = page != null && page > 0 ? page : 1;
int pageSize = limit != null && limit > 0 ? limit : 20;
// 调用 Dify API 获取文档列表
DocumentListResponse response = difyApiClient.listDocuments(
knowledge.getDifyDatasetId(),
pageNum,
pageSize
);
// 构造返回结果
Map<String, Object> result = new HashMap<>();
result.put("data", response.getData());
result.put("total", response.getTotal());
result.put("page", response.getPage());
result.put("limit", response.getLimit());
result.put("hasMore", response.getHasMore());
log.info("获取文档列表成功: knowledgeId={}, page={}, total={}",
knowledgeId, pageNum, response.getTotal());
resultDomain.success("获取成功", result);
return resultDomain;
} catch (DifyException e) {
log.error("获取文档列表失败", e);
resultDomain.fail("获取文档列表失败: " + e.getMessage());
return resultDomain;
} catch (Exception e) {
log.error("获取文档列表异常", e);
resultDomain.fail("获取文档列表异常: " + e.getMessage());
return resultDomain;
}
}
}

View File

@@ -13,7 +13,6 @@ import org.xyzh.ai.client.dto.DocumentUploadRequest;
import org.xyzh.ai.client.dto.DocumentUploadResponse;
import org.xyzh.ai.config.DifyConfig;
import org.xyzh.ai.exception.DifyException;
import org.xyzh.ai.exception.FileProcessException;
import org.xyzh.ai.mapper.AiAgentConfigMapper;
import org.xyzh.ai.mapper.AiKnowledgeMapper;
import org.xyzh.ai.mapper.AiUploadFileMapper;
@@ -30,10 +29,6 @@ import org.xyzh.common.dto.user.TbSysUser;
import org.xyzh.system.utils.LoginUtil;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.*;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutorService;
@@ -272,10 +267,12 @@ public class AiUploadFileServiceImpl implements AiUploadFileService {
uploadFile.setFilePath(sysFile.getFilePath()); // 保存系统文件的相对路径
uploadFile.setFileSize(file.getSize());
uploadFile.setFileType(getFileExtension(originalFilename));
uploadFile.setDifyDocumentId(difyResponse.getId());
uploadFile.setDifyDocumentId(difyResponse.getDocument().getId());
uploadFile.setDifyBatchId(difyResponse.getBatch());
uploadFile.setStatus(1); // 1=处理中
uploadFile.setChunkCount(0);
uploadFile.setStatus(difyResponse.getDocument().getIndexingStatus() == "completed" ? 2 : 1); // 1=处理中
uploadFile.setChunkCount(difyResponse.getDocument().getWordCount() != null ? difyResponse.getDocument().getWordCount() : 0);
uploadFile.setEnabled(difyResponse.getDocument().getEnabled() != null ? difyResponse.getDocument().getEnabled() : true);
uploadFile.setDisplayStatus(difyResponse.getDocument().getDisplayStatus());
uploadFile.setCreateTime(new Date());
uploadFile.setUpdateTime(new Date());
uploadFile.setDeleted(false);
@@ -384,10 +381,7 @@ public class AiUploadFileServiceImpl implements AiUploadFileService {
}
}
// 4. 获取当前用户
TbSysUser currentUser = LoginUtil.getCurrentUser();
// 5. 逻辑删除本地记录
// 4. 逻辑删除本地记录
TbAiUploadFile deleteEntity = new TbAiUploadFile();
deleteEntity.setID(fileId);
@@ -530,8 +524,8 @@ public class AiUploadFileServiceImpl implements AiUploadFileService {
update.setStatus(1); // 处理中
}
if (docStatus.getCompletedSegments() != null) {
update.setChunkCount(docStatus.getCompletedSegments());
if (docStatus.getTotalSegments() != null) {
update.setChunkCount(docStatus.getTotalSegments());
}
}

View File

@@ -22,6 +22,8 @@
<result column="chunk_count" property="chunkCount" jdbcType="INTEGER"/>
<result column="status" property="status" jdbcType="INTEGER"/>
<result column="error_message" property="errorMessage" jdbcType="VARCHAR"/>
<result column="enabled" property="enabled" jdbcType="BOOLEAN"/>
<result column="display_status" property="displayStatus" jdbcType="VARCHAR"/>
<result column="create_time" property="createTime" jdbcType="TIMESTAMP"/>
<result column="update_time" property="updateTime" jdbcType="TIMESTAMP"/>
<result column="delete_time" property="deleteTime" jdbcType="TIMESTAMP"/>
@@ -32,7 +34,7 @@
<sql id="Base_Column_List">
id, user_id, knowledge_id, conversation_id, message_id, sys_file_id, file_name, file_path, file_size,
file_type, mime_type, extracted_text, dify_document_id, dify_batch_id, dify_upload_file_id,
chunk_count, status, error_message,
chunk_count, status, error_message, enabled, display_status,
create_time, update_time, delete_time, deleted
</sql>
@@ -68,12 +70,12 @@
INSERT INTO tb_ai_upload_file (
id, user_id, knowledge_id, conversation_id, message_id, sys_file_id, file_name, file_path, file_size,
file_type, mime_type, extracted_text, dify_document_id, dify_batch_id, dify_upload_file_id,
chunk_count, status, error_message,
chunk_count, status, error_message, enabled, display_status,
create_time, update_time, deleted
) VALUES (
#{ID}, #{userID}, #{knowledgeId}, #{conversationID}, #{messageID}, #{sysFileId}, #{fileName}, #{filePath}, #{fileSize},
#{fileType}, #{mimeType}, #{extractedText}, #{difyDocumentId}, #{difyBatchId}, #{difyUploadFileId},
#{chunkCount}, #{status}, #{errorMessage},
#{chunkCount}, #{status}, #{errorMessage}, #{enabled}, #{displayStatus},
#{createTime}, #{updateTime}, #{deleted}
)
</insert>
@@ -99,6 +101,8 @@
<if test="chunkCount != null">chunk_count = #{chunkCount},</if>
<if test="status != null">status = #{status},</if>
<if test="errorMessage != null">error_message = #{errorMessage},</if>
<if test="enabled != null">enabled = #{enabled},</if>
<if test="displayStatus != null">display_status = #{displayStatus},</if>
<if test="updateTime != null">update_time = #{updateTime},</if>
</set>
WHERE id = #{ID} AND deleted = 0
@@ -193,19 +197,29 @@
ORDER BY create_time ASC
</select>
<!-- selectFileByDifyDocumentId根据Dify文档ID查询文件 -->
<select id="selectFileByDifyDocumentId" resultMap="BaseResultMap">
SELECT
<include refid="Base_Column_List"/>
FROM tb_ai_upload_file
WHERE dify_document_id = #{difyDocumentId}
AND deleted = 0
LIMIT 1
</select>
<!-- batchInsertUploadFiles批量插入文件记录 -->
<insert id="batchInsertUploadFiles" parameterType="java.util.List">
INSERT INTO tb_ai_upload_file (
id, user_id, knowledge_id, conversation_id, message_id, sys_file_id, file_name, file_path, file_size,
file_type, mime_type, dify_document_id, dify_batch_id, dify_upload_file_id,
chunk_count, status, create_time, update_time, deleted
chunk_count, status, enabled, display_status, create_time, update_time, deleted
) VALUES
<foreach collection="files" item="file" separator=",">
(
#{file.ID}, #{file.userID}, #{file.knowledgeId}, #{file.conversationID}, #{file.messageID},
#{file.sysFileId}, #{file.fileName}, #{file.filePath}, #{file.fileSize},
#{file.fileType}, #{file.mimeType}, #{file.difyDocumentId}, #{file.difyBatchId}, #{file.difyUploadFileId},
#{file.chunkCount}, #{file.status}, #{file.createTime}, #{file.updateTime}, #{file.deleted}
#{file.chunkCount}, #{file.status}, #{file.enabled}, #{file.displayStatus}, #{file.createTime}, #{file.updateTime}, #{file.deleted}
)
</foreach>
</insert>