对话流实现 文件上传

This commit is contained in:
2025-11-06 16:43:28 +08:00
parent d9d62e22de
commit 0bb4853d54
35 changed files with 1748 additions and 575 deletions

View File

@@ -1,7 +1,7 @@
package org.xyzh.ai.client;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.alibaba.fastjson2.JSON;
import com.alibaba.fastjson2.JSONObject;
import jakarta.annotation.PostConstruct;
import okhttp3.*;
@@ -13,6 +13,7 @@ import org.xyzh.ai.client.dto.*;
import org.xyzh.ai.client.callback.StreamCallback;
import org.xyzh.ai.config.DifyConfig;
import org.xyzh.ai.exception.DifyException;
import org.xyzh.api.ai.dto.DifyFileInfo;
import java.io.BufferedReader;
import java.io.File;
@@ -37,7 +38,6 @@ public class DifyApiClient {
private OkHttpClient httpClient;
private OkHttpClient streamHttpClient;
private final ObjectMapper objectMapper = new ObjectMapper();
@PostConstruct
public void init() {
@@ -69,7 +69,7 @@ public class DifyApiClient {
String url = difyConfig.getFullApiUrl("/datasets");
try {
String jsonBody = objectMapper.writeValueAsString(request);
String jsonBody = JSON.toJSONString(request);
Request httpRequest = new Request.Builder()
.url(url)
.header("Authorization", "Bearer " + getApiKey(apiKey))
@@ -85,7 +85,7 @@ public class DifyApiClient {
throw new DifyException("创建知识库失败: " + responseBody);
}
return objectMapper.readValue(responseBody, DatasetCreateResponse.class);
return JSON.parseObject(responseBody, DatasetCreateResponse.class);
}
} catch (IOException e) {
logger.error("创建知识库异常", e);
@@ -114,7 +114,7 @@ public class DifyApiClient {
throw new DifyException("查询知识库列表失败: " + responseBody);
}
return objectMapper.readValue(responseBody, DatasetListResponse.class);
return JSON.parseObject(responseBody, DatasetListResponse.class);
}
} catch (IOException e) {
logger.error("查询知识库列表异常", e);
@@ -143,7 +143,7 @@ public class DifyApiClient {
throw new DifyException("查询知识库详情失败: " + responseBody);
}
return objectMapper.readValue(responseBody, DatasetDetailResponse.class);
return JSON.parseObject(responseBody, DatasetDetailResponse.class);
}
} catch (IOException e) {
logger.error("查询知识库详情异常", e);
@@ -159,7 +159,7 @@ public class DifyApiClient {
String url = difyConfig.getFullApiUrl("/datasets/" + datasetId);
try {
String jsonBody = objectMapper.writeValueAsString(request);
String jsonBody = JSON.toJSONString(request);
Request httpRequest = new Request.Builder()
.url(url)
.header("Authorization", "Bearer " + getApiKey(apiKey))
@@ -207,6 +207,48 @@ public class DifyApiClient {
}
}
// ===================== 对话文件上传 API =====================
/**
* 上传文件用于对话(图文多模态)
* @param file 文件
* @param originalFilename 原始文件名
* @param user 用户标识
* @param apiKey API密钥
* @return 文件信息包含id、name、size等
*/
public DifyFileInfo uploadFileForChat(File file, String originalFilename, String user, String apiKey) {
String url = difyConfig.getFullApiUrl("/files/upload");
try {
MultipartBody.Builder bodyBuilder = new MultipartBody.Builder()
.setType(MultipartBody.FORM)
.addFormDataPart("file", originalFilename,
RequestBody.create(file, MediaType.parse("application/octet-stream")))
.addFormDataPart("user", user);
Request httpRequest = new Request.Builder()
.url(url)
.header("Authorization", "Bearer " + getApiKey(apiKey))
.post(bodyBuilder.build())
.build();
try (Response response = httpClient.newCall(httpRequest).execute()) {
String responseBody = response.body() != null ? response.body().string() : "";
if (!response.isSuccessful()) {
logger.error("上传对话文件失败: {} - {}", response.code(), responseBody);
throw new DifyException("上传对话文件失败: " + responseBody);
}
return JSON.parseObject(responseBody, DifyFileInfo.class);
}
} catch (IOException e) {
logger.error("上传对话文件异常", e);
throw new DifyException("上传对话文件异常: " + e.getMessage(), e);
}
}
// ===================== 文档管理 API =====================
/**
@@ -235,7 +277,7 @@ public class DifyApiClient {
bodyBuilder.addFormDataPart("indexing_technique", uploadRequest.getIndexingTechnique());
}
if (uploadRequest.getProcessRule() != null) {
bodyBuilder.addFormDataPart("process_rule", objectMapper.writeValueAsString(uploadRequest.getProcessRule()));
bodyBuilder.addFormDataPart("process_rule", JSON.toJSONString(uploadRequest.getProcessRule()));
}
Request httpRequest = new Request.Builder()
@@ -252,7 +294,7 @@ public class DifyApiClient {
throw new DifyException("上传文档失败: " + responseBody);
}
return objectMapper.readValue(responseBody, DocumentUploadResponse.class);
return JSON.parseObject(responseBody, DocumentUploadResponse.class);
}
} catch (IOException e) {
logger.error("上传文档异常", e);
@@ -281,7 +323,7 @@ public class DifyApiClient {
throw new DifyException("查询文档状态失败: " + responseBody);
}
return objectMapper.readValue(responseBody, DocumentStatusResponse.class);
return JSON.parseObject(responseBody, DocumentStatusResponse.class);
}
} catch (IOException e) {
logger.error("查询文档状态异常", e);
@@ -310,7 +352,7 @@ public class DifyApiClient {
throw new DifyException("查询文档列表失败: " + responseBody);
}
return objectMapper.readValue(responseBody, DocumentListResponse.class);
return JSON.parseObject(responseBody, DocumentListResponse.class);
}
} catch (IOException e) {
logger.error("查询文档列表异常", e);
@@ -354,7 +396,7 @@ public class DifyApiClient {
String url = difyConfig.getFullApiUrl("/datasets/" + datasetId + "/retrieve");
try {
String jsonBody = objectMapper.writeValueAsString(request);
String jsonBody = JSON.toJSONString(request);
Request httpRequest = new Request.Builder()
.url(url)
.header("Authorization", "Bearer " + getApiKey(apiKey))
@@ -370,7 +412,7 @@ public class DifyApiClient {
throw new DifyException("知识库检索失败: " + responseBody);
}
return objectMapper.readValue(responseBody, RetrievalResponse.class);
return JSON.parseObject(responseBody, RetrievalResponse.class);
}
} catch (IOException e) {
logger.error("知识库检索异常", e);
@@ -390,7 +432,7 @@ public class DifyApiClient {
// 设置为流式模式
request.setResponseMode("streaming");
String jsonBody = objectMapper.writeValueAsString(request);
String jsonBody = JSON.toJSONString(request);
Request httpRequest = new Request.Builder()
.url(url)
.header("Authorization", "Bearer " + getApiKey(apiKey))
@@ -421,9 +463,9 @@ public class DifyApiClient {
}
if (!data.isEmpty()) {
// 解析SSE数据
JsonNode jsonNode = objectMapper.readTree(data);
String event = jsonNode.has("event") ? jsonNode.get("event").asText() : "";
// 使用Fastjson2解析SSE数据
JSONObject jsonNode = JSON.parseObject(data);
String event = jsonNode.containsKey("event") ? jsonNode.getString("event") : "";
// 转发所有事件到回调(包含完整数据)
callback.onEvent(event, data);
@@ -432,8 +474,8 @@ public class DifyApiClient {
case "message":
case "agent_message":
// 消息内容
if (jsonNode.has("answer")) {
callback.onMessage(jsonNode.get("answer").asText());
if (jsonNode.containsKey("answer")) {
callback.onMessage(jsonNode.getString("answer"));
}
break;
case "message_end":
@@ -442,8 +484,8 @@ public class DifyApiClient {
break;
case "error":
// 错误事件
String errorMsg = jsonNode.has("message") ?
jsonNode.get("message").asText() : "未知错误";
String errorMsg = jsonNode.containsKey("message") ?
jsonNode.getString("message") : "未知错误";
callback.onError(new DifyException(errorMsg));
return;
// 其他事件workflow_started、node_started、node_finished等
@@ -481,7 +523,7 @@ public class DifyApiClient {
// 设置为阻塞模式
request.setResponseMode("blocking");
String jsonBody = objectMapper.writeValueAsString(request);
String jsonBody = JSON.toJSONString(request);
Request httpRequest = new Request.Builder()
.url(url)
.header("Authorization", "Bearer " + getApiKey(apiKey))
@@ -497,7 +539,7 @@ public class DifyApiClient {
throw new DifyException("阻塞式对话失败: " + responseBody);
}
return objectMapper.readValue(responseBody, ChatResponse.class);
return JSON.parseObject(responseBody, ChatResponse.class);
}
} catch (IOException e) {
logger.error("阻塞式对话异常", e);
@@ -512,7 +554,7 @@ public class DifyApiClient {
String url = difyConfig.getFullApiUrl("/chat-messages/" + taskId + "/stop");
try {
String jsonBody = objectMapper.writeValueAsString(new StopRequest(userId));
String jsonBody = JSON.toJSONString(new StopRequest(userId));
Request httpRequest = new Request.Builder()
.url(url)
.header("Authorization", "Bearer " + getApiKey(apiKey))
@@ -546,7 +588,7 @@ public class DifyApiClient {
try {
FeedbackRequest feedbackRequest = new FeedbackRequest(rating, userId, feedback);
String jsonBody = objectMapper.writeValueAsString(feedbackRequest);
String jsonBody = JSON.toJSONString(feedbackRequest);
Request httpRequest = new Request.Builder()
.url(url)
@@ -614,7 +656,7 @@ public class DifyApiClient {
throw new DifyException("获取对话历史失败: " + responseBody);
}
return objectMapper.readValue(responseBody, MessageHistoryResponse.class);
return JSON.parseObject(responseBody, MessageHistoryResponse.class);
}
} catch (IOException e) {
logger.error("获取对话历史异常", e);
@@ -656,7 +698,7 @@ public class DifyApiClient {
throw new DifyException("获取对话列表失败: " + responseBody);
}
return objectMapper.readValue(responseBody, ConversationListResponse.class);
return JSON.parseObject(responseBody, ConversationListResponse.class);
}
} catch (IOException e) {
logger.error("获取对话列表异常", e);
@@ -710,7 +752,7 @@ public class DifyApiClient {
try {
String jsonBody = requestBody instanceof String ?
(String) requestBody : objectMapper.writeValueAsString(requestBody);
(String) requestBody : JSON.toJSONString(requestBody);
Request httpRequest = new Request.Builder()
.url(url)
@@ -747,7 +789,7 @@ public class DifyApiClient {
try {
String jsonBody = requestBody instanceof String ?
(String) requestBody : objectMapper.writeValueAsString(requestBody);
(String) requestBody : JSON.toJSONString(requestBody);
Request httpRequest = new Request.Builder()
.url(url)

View File

@@ -2,6 +2,8 @@ package org.xyzh.ai.client.dto;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.Data;
import org.xyzh.api.ai.dto.DifyFileInfo;
import java.util.List;
import java.util.Map;
@@ -45,7 +47,7 @@ public class ChatRequest {
/**
* 上传的文件列表
*/
private List<FileInfo> files;
private List<DifyFileInfo> files;
/**
* 自动生成标题
@@ -70,29 +72,5 @@ public class ChatRequest {
@JsonProperty("max_tokens")
private Integer maxTokens;
@Data
public static class FileInfo {
/**
* 文件类型image、document、audio、video
*/
private String type;
/**
* 传输方式remote_url、local_file
*/
@JsonProperty("transfer_method")
private String transferMethod;
/**
* 文件URL或ID
*/
private String url;
/**
* 本地文件上传ID
*/
@JsonProperty("upload_file_id")
private String uploadFileId;
}
}

View File

@@ -30,7 +30,8 @@ public class DifyConfig {
/**
* Dify API密钥默认密钥可被智能体的密钥覆盖
*/
private String apiKey="app-PTHzp2DsLPyiUrDYTXBGxL1f";
// private String apiKey="app-PTHzp2DsLPyiUrDYTXBGxL1f";
private String apiKey="app-fwOqGFLTsZtekCQYlOmj9f8x";
/**
* 请求超时时间(秒)

View File

@@ -1,5 +1,6 @@
package org.xyzh.ai.controller;
import com.alibaba.fastjson2.JSON;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.MediaType;
@@ -10,12 +11,13 @@ import org.xyzh.api.ai.history.AiChatHistoryService;
import org.xyzh.common.core.domain.ResultDomain;
import org.xyzh.common.core.page.PageDomain;
import org.xyzh.common.core.page.PageParam;
import org.xyzh.api.ai.dto.DifyFileInfo;
import org.xyzh.common.dto.ai.TbAiConversation;
import org.xyzh.common.dto.ai.TbAiMessage;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
/**
* @description AI对话控制器
@@ -38,35 +40,55 @@ public class AiChatController {
// ===================== 对话相关 =====================
/**
* @description 流式对话SSE
* @param agentId 智能体ID
* @param conversationId 会话ID
* @param query 用户问题
* @param knowledgeIds 知识库ID列表逗号分隔
* @return SseEmitter SSE流式推送对象
* @description 准备流式对话会话POST接收复杂参数
* @param requestBody 请求体agentId, conversationId, query, files
* @return ResultDomain<String> 返回sessionId
* @author AI Assistant
* @since 2025-11-04
* @since 2025-11-06
*/
@GetMapping(value = "/stream", produces = MediaType.TEXT_EVENT_STREAM_VALUE)
public SseEmitter streamChat(
@RequestParam(name = "agentId") String agentId,
@RequestParam(name = "conversationId", required = false) String conversationId,
@RequestParam(name = "query") String query,
@RequestParam(name = "knowledgeIds", required = false) String knowledgeIds) {
@PostMapping("/stream/prepare")
public ResultDomain<String> prepareStreamChat(@RequestBody Map<String, Object> requestBody) {
String agentId = (String) requestBody.get("agentId");
String conversationId = (String) requestBody.get("conversationId");
String query = (String) requestBody.get("query");
// 解析knowledgeIds
List<String> knowledgeIdList = null;
if (knowledgeIds != null && !knowledgeIds.isEmpty()) {
knowledgeIdList = Arrays.asList(knowledgeIds.split(","));
// 转换 files 数据
@SuppressWarnings("unchecked")
List<Map<String, Object>> filesRaw = (List<Map<String, Object>>) requestBody.get("files");
List<DifyFileInfo> filesData = null;
if (filesRaw != null && !filesRaw.isEmpty()) {
filesData = filesRaw.stream()
.map(fileMap -> {
// 使用Fastjson2转换Map为对象
String json = JSON.toJSONString(fileMap);
return JSON.parseObject(json, DifyFileInfo.class);
})
.collect(Collectors.toList());
}
log.info("流式对话: agentId={}, conversationId={}, query={}", agentId, conversationId, query);
return chatService.streamChatWithSse(agentId, conversationId, query, knowledgeIdList);
log.info("准备流式对话会话: agentId={}, query={}, files={}",
agentId, query, filesData != null ? filesData.size() : 0);
return chatService.prepareChatSession(agentId, conversationId, query, filesData);
}
/**
* @description 流式对话SSE- GET建立SSE连接
* @param sessionId 会话标识
* @return SseEmitter SSE流式推送对象
* @author AI Assistant
* @since 2025-11-06
*/
@GetMapping(value = "/stream", produces = MediaType.TEXT_EVENT_STREAM_VALUE)
public SseEmitter streamChat(@RequestParam(name = "sessionId") String sessionId) {
log.info("建立SSE连接: sessionId={}", sessionId);
return chatService.streamChatWithSse(sessionId);
}
/**
* @description 阻塞式对话
* @param requestBody 请求体agentId, conversationId, query, knowledgeIds
* @param requestBody 请求体agentId, conversationId, query
* @return ResultDomain<TbAiMessage>
* @author AI Assistant
* @since 2025-11-04
@@ -76,11 +98,8 @@ public class AiChatController {
String agentId = (String) requestBody.get("agentId");
String conversationId = (String) requestBody.get("conversationId");
String query = (String) requestBody.get("query");
@SuppressWarnings("unchecked")
List<String> knowledgeIds = (List<String>) requestBody.get("knowledgeIds");
log.info("阻塞式对话: agentId={}, conversationId={}, query={}", agentId, conversationId, query);
return chatService.blockingChat(agentId, conversationId, query, knowledgeIds);
return chatService.blockingChat(agentId, conversationId, query);
}

View File

@@ -29,6 +29,22 @@ public class AiFileUploadController {
@Autowired
private AiUploadFileService uploadFileService;
/**
* @description 上传文件用于对话(图文多模态)
* @param file 文件
* @param agentId 智能体ID
* @return ResultDomain<Map<String, Object>> 返回Dify文件信息
* @author AI Assistant
* @since 2025-11-06
*/
@PostMapping("/upload-for-chat")
public ResultDomain<java.util.Map<String, Object>> uploadFileForChat(
@RequestParam("file") MultipartFile file,
@RequestParam("agentId") String agentId) {
log.info("上传对话文件: fileName={}, agentId={}", file.getOriginalFilename(), agentId);
return uploadFileService.uploadFileForChat(file, agentId);
}
/**
* @description 上传文件到知识库
* @param knowledgeId 知识库ID
@@ -78,6 +94,19 @@ public class AiFileUploadController {
return uploadFileService.getFileById(fileId);
}
/**
* @description 查询消息关联的文件列表
* @param messageId 消息ID
* @return ResultDomain<TbAiUploadFile>
* @author AI Assistant
* @since 2025-11-06
*/
@GetMapping("/message/{messageId}")
public ResultDomain<TbAiUploadFile> getMessageFiles(@PathVariable(name = "messageId") String messageId) {
log.info("查询消息文件列表: messageId={}", messageId);
return uploadFileService.listFilesByMessageId(messageId);
}
/**
* @description 查询知识库的文件列表
* @param knowledgeId 知识库ID

View File

@@ -68,4 +68,18 @@ public interface AiUploadFileMapper extends BaseMapper<TbAiUploadFile> {
* @since 2025-10-15
*/
List<TbAiUploadFile> selectAiUploadFiles(TbAiUploadFile filter);
/**
* 根据消息ID查询关联的文件列表
* @param messageId 消息ID
* @return List<TbAiUploadFile> 文件列表
*/
List<TbAiUploadFile> selectFilesByMessageId(@Param("messageId") String messageId);
/**
* 批量插入文件记录
* @param files 文件列表
* @return 插入行数
*/
int batchInsertUploadFiles(@Param("files") List<TbAiUploadFile> files);
}

View File

@@ -0,0 +1,25 @@
package org.xyzh.ai.service;
import java.util.List;
/**
* @description AI知识库Redis管理服务接口
* @filename AiKnowledgeRedisService.java
* @author AI Assistant
* @copyright xyzh
* @since 2025-11-06
*/
public interface AiKnowledgeRedisService {
/**
* 初始化所有知识库到Redis
* 在系统启动时调用
*/
void initializeAllKnowledgeToRedis();
/**
* 清除所有知识库配置
*/
void clearAllKnowledgeConfig();
}

View File

@@ -76,17 +76,6 @@ public class AiAgentConfigServiceImpl implements AiAgentConfigService {
agentConfig.setStatus(1); // 默认启用
}
// 设置默认模型参数
if (agentConfig.getTemperature() == null) {
agentConfig.setTemperature(new BigDecimal("0.7"));
}
if (agentConfig.getMaxTokens() == null) {
agentConfig.setMaxTokens(2000);
}
if (agentConfig.getTopP() == null) {
agentConfig.setTopP(new BigDecimal("1.0"));
}
// 5. 插入数据库
int rows = agentConfigMapper.insertAgentConfig(agentConfig);
if (rows > 0) {

View File

@@ -1,7 +1,7 @@
package org.xyzh.ai.service.impl;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.alibaba.fastjson2.JSON;
import com.alibaba.fastjson2.JSONWriter;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
@@ -38,8 +38,6 @@ public class AiChatHistoryServiceImpl implements AiChatHistoryService {
@Autowired
private AiMessageMapper messageMapper;
private final ObjectMapper objectMapper = new ObjectMapper();
@Override
public PageDomain<TbAiConversation> pageUserConversations(
String agentId,
@@ -539,17 +537,13 @@ public class AiChatHistoryServiceImpl implements AiChatHistoryService {
exportData.put("messages", messages);
exportData.put("exportTime", new Date());
// 转JSON
String json = objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(exportData);
// 使用Fastjson2转JSON格式化输出
String json = JSON.toJSONString(exportData, JSONWriter.Feature.PrettyFormat);
log.info("导出会话JSON成功: {}", conversationId);
resultDomain.success("导出成功", json);
return resultDomain;
} catch (JsonProcessingException e) {
log.error("JSON序列化失败", e);
resultDomain.fail("导出失败: JSON序列化错误");
return resultDomain;
} catch (Exception e) {
log.error("导出会话JSON失败", e);
resultDomain.fail("导出失败: " + e.getMessage());

View File

@@ -1,7 +1,7 @@
package org.xyzh.ai.service.impl;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.alibaba.fastjson2.JSON;
import com.alibaba.fastjson2.JSONObject;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
@@ -12,24 +12,31 @@ import org.xyzh.ai.client.DifyApiClient;
import org.xyzh.ai.client.callback.StreamCallback;
import org.xyzh.ai.client.dto.ChatRequest;
import org.xyzh.ai.client.dto.ChatResponse;
import org.xyzh.ai.config.DifyConfig;
import org.xyzh.ai.exception.DifyException;
import org.xyzh.ai.mapper.AiAgentConfigMapper;
import org.xyzh.ai.mapper.AiConversationMapper;
import org.xyzh.ai.mapper.AiMessageMapper;
import org.xyzh.ai.mapper.AiUploadFileMapper;
import org.xyzh.ai.service.AiKnowledgeRedisService;
import org.xyzh.api.ai.chat.AiChatService;
import org.xyzh.api.ai.dto.DifyFileInfo;
import org.xyzh.common.core.domain.LoginDomain;
import org.xyzh.common.core.domain.ResultDomain;
import org.xyzh.common.dto.ai.TbAiAgentConfig;
import org.xyzh.common.dto.ai.TbAiConversation;
import org.xyzh.common.dto.ai.TbAiMessage;
import org.xyzh.common.dto.ai.TbAiUploadFile;
import org.xyzh.common.dto.user.TbSysUser;
import org.xyzh.common.vo.UserDeptRoleVO;
import org.xyzh.system.utils.LoginUtil;
import org.springframework.data.redis.core.RedisTemplate;
import java.io.IOException;
import java.util.*;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicReference;
/**
@@ -56,20 +63,104 @@ public class AiChatServiceImpl implements AiChatService {
private DifyApiClient difyApiClient;
@Autowired
private DifyConfig difyConfig;
private AiKnowledgeRedisService knowledgeRedisService;
private final ObjectMapper objectMapper = new ObjectMapper();
@Autowired
private AiUploadFileMapper uploadFileMapper;
@Autowired
private RedisTemplate<String, Object> redisTemplate;
// 异步任务线程池(用于异步生成摘要等后台任务)
private final ExecutorService executorService = Executors.newFixedThreadPool(3);
// Redis会话key前缀
private static final String CHAT_SESSION_PREFIX = "chat:session:";
@Override
public SseEmitter streamChatWithSse(String agentId, String conversationId, String query, List<String> knowledgeIds) {
public ResultDomain<String> prepareChatSession(String agentId, String conversationId, String query, List<DifyFileInfo> filesData) {
ResultDomain<String> resultDomain = new ResultDomain<>();
try {
// 参数验证
if (!StringUtils.hasText(agentId)) {
resultDomain.fail("智能体ID不能为空");
return resultDomain;
}
if (!StringUtils.hasText(query)) {
resultDomain.fail("问题不能为空");
return resultDomain;
}
// 生成sessionId
String sessionId = UUID.randomUUID().toString();
// 构建会话数据
Map<String, Object> sessionData = new HashMap<>();
sessionData.put("agentId", agentId);
sessionData.put("conversationId", conversationId);
sessionData.put("query", query);
sessionData.put("filesData", filesData);
sessionData.put("createTime", System.currentTimeMillis());
// 存入Redis5分钟过期
String redisKey = CHAT_SESSION_PREFIX + sessionId;
redisTemplate.opsForValue().set(redisKey, sessionData, 5, TimeUnit.MINUTES);
log.info("创建对话会话: sessionId={}, agentId={}", sessionId, agentId);
resultDomain.success("会话创建成功", sessionId);
return resultDomain;
} catch (Exception e) {
log.error("创建对话会话失败", e);
resultDomain.fail("创建会话失败: " + e.getMessage());
return resultDomain;
}
}
@Override
@SuppressWarnings("unchecked")
public SseEmitter streamChatWithSse(String sessionId) {
// 创建SseEmitter设置超时时间为5分钟
SseEmitter emitter = new SseEmitter(5 * 60 * 1000L);
try {
// 1. 参数验证
// 1. 从Redis获取并删除会话数据
String redisKey = CHAT_SESSION_PREFIX + sessionId;
Map<String, Object> sessionData = (Map<String, Object>) redisTemplate.opsForValue().get(redisKey);
if (sessionData == null) {
emitter.send(SseEmitter.event().name("error").data("会话不存在或已过期"));
emitter.complete();
return emitter;
}
// 删除Redis中的会话数据一次性使用
redisTemplate.delete(redisKey);
String agentId = (String) sessionData.get("agentId");
String conversationId = (String) sessionData.get("conversationId");
String query = (String) sessionData.get("query");
// 转换文件数据
List<DifyFileInfo> filesData = null;
Object filesObj = sessionData.get("filesData");
if (filesObj != null) {
if (filesObj instanceof List) {
filesData = new ArrayList<>();
for (Object fileObj : (List<?>) filesObj) {
if (fileObj instanceof Map) {
// 使用Fastjson2转换Map为对象
String json = JSON.toJSONString(fileObj);
DifyFileInfo fileInfo = JSON.parseObject(json, DifyFileInfo.class);
filesData.add(fileInfo);
} else if (fileObj instanceof DifyFileInfo) {
filesData.add((DifyFileInfo) fileObj);
}
}
}
}
// 2. 参数验证
if (!StringUtils.hasText(agentId)) {
emitter.send(SseEmitter.event().name("error").data("智能体ID不能为空"));
emitter.complete();
@@ -118,7 +209,7 @@ public class AiChatServiceImpl implements AiChatService {
}
} else {
// 创建新会话
ResultDomain<TbAiConversation> createResult = createConversation(agentId, null);
ResultDomain<TbAiConversation> createResult = createConversation(agentId, query.substring(0, 20));
if (!createResult.isSuccess()) {
emitter.send(SseEmitter.event().name("error").data(createResult.getMessage()));
emitter.complete();
@@ -131,7 +222,8 @@ public class AiChatServiceImpl implements AiChatService {
// 5. 创建用户消息记录
TbAiMessage userMessage = new TbAiMessage();
userMessage.setID(UUID.randomUUID().toString());
String userMessageId = UUID.randomUUID().toString();
userMessage.setID(userMessageId);
userMessage.setConversationID(finalConversationId);
userMessage.setAgentID(agentId);
userMessage.setRole("user");
@@ -140,10 +232,39 @@ public class AiChatServiceImpl implements AiChatService {
userMessage.setUpdateTime(new Date());
userMessage.setDeleted(false);
userMessage.setUserID(currentUser.getID());
// 处理文件关联将文件ID列表转换为JSON数组保存
if (filesData != null && !filesData.isEmpty()) {
try {
// 提取系统文件ID列表从前端传来的sysFileId
List<String> sysFileIds = new ArrayList<>();
for (DifyFileInfo fileInfo : filesData) {
// 使用sysFileId字段
if (fileInfo.getSysFileId() != null) {
sysFileIds.add(fileInfo.getSysFileId());
}
}
if (!sysFileIds.isEmpty()) {
// 使用Fastjson2序列化为JSON字符串
userMessage.setFileIDs(JSON.toJSONString(sysFileIds));
}
} catch (Exception e) {
log.warn("保存文件ID列表失败", e);
}
}
messageMapper.insertMessage(userMessage);
// 6. 保存文件关联记录到tb_ai_upload_file
if (filesData != null && !filesData.isEmpty()) {
saveMessageFileRecords(userMessageId, finalConversationId, currentUser.getID(), filesData);
}
// 注意AI消息记录将在获取到Dify的task_id后创建
// 6. 从Redis获取当前用户可访问的知识库ID列表
List<String> knowledgeIds = getKnowledgeIdsByUser(currentUser);
// 7. 构建Dify请求
ChatRequest chatRequest = new ChatRequest();
chatRequest.setQuery(query);
@@ -152,15 +273,18 @@ public class AiChatServiceImpl implements AiChatService {
if (StringUtils.hasText(conversation.getDifyConversationId())) {
chatRequest.setConversationId(conversation.getDifyConversationId());
}
// 设置知识库ID列表从Redis获取
if (knowledgeIds != null && !knowledgeIds.isEmpty()) {
chatRequest.setDatasetIds(knowledgeIds);
log.info("使用知识库: {}", knowledgeIds);
}
chatRequest.setTemperature(agent.getTemperature() != null ?
agent.getTemperature().doubleValue() : difyConfig.getChat().getDefaultTemperature());
chatRequest.setMaxTokens(agent.getMaxTokens() != null ?
agent.getMaxTokens() : difyConfig.getChat().getDefaultMaxTokens());
chatRequest.setResponseMode("streaming");
Map<String, Object> inputs = new HashMap<>();
inputs.put("connectInternet", agent.getConnectInternet());
chatRequest.setInputs(inputs);
chatRequest.setFiles(filesData);
// 6. 调用Dify流式对话
final TbAiConversation finalConversation = conversation;
StringBuilder fullAnswer = new StringBuilder();
@@ -194,13 +318,13 @@ public class AiChatServiceImpl implements AiChatService {
return; // 已停止,不再处理
}
try {
// 解析metadata
JsonNode json = objectMapper.readTree(metadata);
if (json.has("conversation_id")) {
difyConversationId.set(json.get("conversation_id").asText());
// 使用Fastjson2解析metadata
JSONObject json = JSON.parseObject(metadata);
if (json.containsKey("conversation_id")) {
difyConversationId.set(json.getString("conversation_id"));
}
if (json.has("id")) {
difyMessageId.set(json.get("id").asText());
if (json.containsKey("id")) {
difyMessageId.set(json.getString("id"));
}
// 更新AI消息内容使用task_id作为消息ID
@@ -247,9 +371,9 @@ public class AiChatServiceImpl implements AiChatService {
// 如果还没有创建消息记录尝试从任何事件中提取task_id
if (!messageCreated.get()) {
JsonNode json = objectMapper.readTree(eventData);
if (json.has("task_id")) {
String difyTaskId = json.get("task_id").asText();
JSONObject json = JSON.parseObject(eventData);
if (json.containsKey("task_id")) {
String difyTaskId = json.getString("task_id");
// 只有在taskId为空时才设置并创建消息
if (taskId.get() == null) {
@@ -363,9 +487,7 @@ public class AiChatServiceImpl implements AiChatService {
public ResultDomain<TbAiMessage> blockingChat(
String agentId,
String conversationId,
String query,
List<String> knowledgeIds) {
String query){
ResultDomain<TbAiMessage> resultDomain = new ResultDomain<>();
try {
@@ -426,21 +548,12 @@ public class AiChatServiceImpl implements AiChatService {
if (StringUtils.hasText(conversation.getDifyConversationId())) {
chatRequest.setConversationId(conversation.getDifyConversationId());
}
if (knowledgeIds != null && !knowledgeIds.isEmpty()) {
chatRequest.setDatasetIds(knowledgeIds);
}
if (agent.getTemperature() != null) {
chatRequest.setTemperature(agent.getTemperature().doubleValue());
} else {
chatRequest.setTemperature(difyConfig.getChat().getDefaultTemperature());
}
if (agent.getMaxTokens() != null) {
chatRequest.setMaxTokens(agent.getMaxTokens());
} else {
chatRequest.setMaxTokens(difyConfig.getChat().getDefaultMaxTokens());
}
chatRequest.setResponseMode("blocking");
Map<String, Object> inputs = new HashMap<>();
inputs.put("connectInternet", agent.getConnectInternet());
chatRequest.setInputs(inputs);
chatRequest.setFiles(null);
// 调用Dify阻塞式对话
ChatResponse chatResponse = difyApiClient.blockingChat(chatRequest, agent.getDifyApiKey());
@@ -809,7 +922,7 @@ public class AiChatServiceImpl implements AiChatService {
TbAiMessage userQuestion = null;
for (int i = messages.size() - 1; i >= 0; i--) {
if ("user".equals(messages.get(i).getRole()) &&
messages.get(i).getCreateTime().before(originalMessage.getCreateTime())) {
!messages.get(i).getCreateTime().after(originalMessage.getCreateTime())) {
userQuestion = messages.get(i);
break;
}
@@ -821,13 +934,23 @@ public class AiChatServiceImpl implements AiChatService {
return emitter;
}
// 直接返回streamChatWithSse的结果
return streamChatWithSse(
originalMessage.getAgentID(),
originalMessage.getConversationID(),
userQuestion.getContent(),
null
);
// 重新生成创建临时session并调用新的streamChatWithSse
String sessionId = UUID.randomUUID().toString();
// 构建会话数据
Map<String, Object> sessionData = new HashMap<>();
sessionData.put("agentId", originalMessage.getAgentID());
sessionData.put("conversationId", originalMessage.getConversationID());
sessionData.put("query", userQuestion.getContent());
sessionData.put("filesData", null); // 重新生成不需要传文件
sessionData.put("createTime", System.currentTimeMillis());
// 存入Redis5分钟过期
String redisKey = CHAT_SESSION_PREFIX + sessionId;
redisTemplate.opsForValue().set(redisKey, sessionData, 5, TimeUnit.MINUTES);
log.info("重新生成回答: messageId={}, sessionId={}", messageId, sessionId);
return streamChatWithSse(sessionId);
} catch (Exception e) {
log.error("重新生成回答异常", e);
@@ -971,5 +1094,91 @@ public class AiChatServiceImpl implements AiChatService {
return resultDomain;
}
}
/**
* 保存消息关联的文件记录
* @param messageId 消息ID
* @param conversationId 会话ID
* @param userId 用户ID
* @param filesData 文件数据列表
*/
private void saveMessageFileRecords(String messageId, String conversationId, String userId, List<DifyFileInfo> filesData) {
try {
List<TbAiUploadFile> fileRecords = new ArrayList<>();
Date now = new Date();
for (DifyFileInfo fileInfo : filesData) {
TbAiUploadFile uploadFile = new TbAiUploadFile();
uploadFile.setID(UUID.randomUUID().toString());
uploadFile.setUserID(userId);
uploadFile.setConversationID(conversationId);
uploadFile.setMessageID(messageId); // 绑定到消息
// 从文件信息中提取系统文件ID前端传递sys_file_id字段
uploadFile.setSysFileId(fileInfo.getSysFileId());
uploadFile.setFileName(fileInfo.getName());
uploadFile.setFilePath(fileInfo.getFilePath() != null ? fileInfo.getFilePath() : ""); // 从系统文件表获取的文件路径
uploadFile.setFileSize(fileInfo.getSize() != null ? fileInfo.getSize().longValue() : 0L);
uploadFile.setFileType(fileInfo.getExtension());
uploadFile.setMimeType(fileInfo.getMimeType());
uploadFile.setDifyUploadFileId(fileInfo.getUploadFileId()); // Dify的上传文件ID
uploadFile.setStatus(2); // 已完成(对话文件直接可用)
uploadFile.setCreateTime(now);
uploadFile.setUpdateTime(now);
uploadFile.setDeleted(false);
fileRecords.add(uploadFile);
}
if (!fileRecords.isEmpty()) {
uploadFileMapper.batchInsertUploadFiles(fileRecords);
log.info("消息文件记录已保存: messageId={}, fileCount={}", messageId, fileRecords.size());
}
} catch (Exception e) {
log.error("保存消息文件记录失败: messageId={}", messageId, e);
// 不抛出异常,不影响主流程
}
}
/**
* 根据用户获取可访问的知识库ID列表基于部门路径
* @param user 当前用户
* @return 知识库ID列表
*/
private List<String> getKnowledgeIdsByUser(TbSysUser user) {
try {
// 获取当前登录用户的完整信息(包含部门角色列表)
LoginDomain loginDomain = LoginUtil.getCurrentLoginDomain();
if (loginDomain == null || loginDomain.getRoles() == null || loginDomain.getRoles().isEmpty()) {
log.warn("用户 {} 没有部门角色信息,返回空知识库列表", user.getID());
return null;
}
// 获取用户的第一个部门角色(通常用户属于一个主部门)
UserDeptRoleVO userRole = loginDomain.getRoles().get(0);
String deptPath = userRole.getDeptPath();
if (deptPath == null || deptPath.isEmpty()) {
log.warn("用户 {} 的部门路径为空,返回空知识库列表", user.getID());
return null;
}
// 根据部门路径获取知识库ID列表包含父部门的知识库
List<String> knowledgeIds = ((AiKnowledgeRedisServiceImpl) knowledgeRedisService).getKnowledgeIdsByDeptPath(deptPath);
if (knowledgeIds == null || knowledgeIds.isEmpty()) {
log.warn("用户 {} 所在部门路径 {} 没有关联的知识库", user.getID(), deptPath);
return null;
}
log.info("用户 {} 从部门路径 {} 获取到 {} 个知识库", user.getID(), deptPath, knowledgeIds.size());
return knowledgeIds;
} catch (Exception e) {
log.error("根据用户获取知识库ID失败: userId={}", user.getID(), e);
return null;
}
}
}

View File

@@ -0,0 +1,170 @@
package org.xyzh.ai.service.impl;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.CommandLineRunner;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.stereotype.Service;
import org.xyzh.ai.mapper.AiKnowledgeMapper;
import org.xyzh.ai.service.AiKnowledgeRedisService;
import org.xyzh.common.dto.ai.TbAiKnowledge;
import java.util.*;
import java.util.concurrent.TimeUnit;
/**
* @description AI知识库Redis管理服务实现类
* @filename AiKnowledgeRedisServiceImpl.java
* @author AI Assistant
* @copyright xyzh
* @since 2025-11-06
*/
@Slf4j
@Service
public class AiKnowledgeRedisServiceImpl implements AiKnowledgeRedisService, CommandLineRunner {
@Autowired
private RedisTemplate<String, Object> redisTemplate;
@Autowired
private AiKnowledgeMapper knowledgeMapper;
// Redis Key前缀ai:dept:knowledge:{deptId}
private static final String REDIS_KEY_DEPT_KNOWLEDGE = "ai:dept:knowledge:";
// Redis过期时间7天
private static final long REDIS_EXPIRE_DAYS = 7;
/**
* 系统启动时自动初始化
*/
@Override
public void run(String... args) throws Exception {
log.info("=== 开始初始化知识库到Redis ===");
initializeAllKnowledgeToRedis();
log.info("=== 知识库初始化完成 ===");
}
@Override
public void initializeAllKnowledgeToRedis() {
try {
// 查询所有已同步到Dify的知识库
List<TbAiKnowledge> allKnowledges = knowledgeMapper.selectAllKnowledges(new TbAiKnowledge());
// 按部门分组知识库
Map<String, List<String>> deptKnowledgeMap = new HashMap<>();
for (TbAiKnowledge knowledge : allKnowledges) {
// 只处理已同步到Dify且未删除的知识库
if (knowledge.getDifyDatasetId() != null && !knowledge.getDifyDatasetId().isEmpty()
&& !knowledge.getDeleted()) {
String deptId = knowledge.getCreatorDept();
if (deptId == null || deptId.isEmpty()) {
log.warn("知识库 {} 没有部门信息,跳过", knowledge.getID());
continue;
}
// 将知识库添加到对应部门的列表中
deptKnowledgeMap.computeIfAbsent(deptId, k -> new ArrayList<>())
.add(knowledge.getDifyDatasetId());
}
}
if (deptKnowledgeMap.isEmpty()) {
log.warn("未找到已同步到Dify且有部门信息的知识库");
return;
}
// 为每个部门设置知识库
int totalCount = 0;
for (Map.Entry<String, List<String>> entry : deptKnowledgeMap.entrySet()) {
String deptId = entry.getKey();
List<String> knowledgeIds = entry.getValue();
setDeptKnowledgeIds(deptId, knowledgeIds);
totalCount += knowledgeIds.size();
log.info("已设置部门 {} 的知识库,共 {} 个", deptId, knowledgeIds.size());
}
log.info("知识库初始化完成,共 {} 个部门,{} 个知识库", deptKnowledgeMap.size(), totalCount);
} catch (Exception e) {
log.error("初始化知识库到Redis失败", e);
}
}
/**
* 设置部门的知识库ID列表
*/
public void setDeptKnowledgeIds(String deptId, List<String> knowledgeIds) {
try {
String key = REDIS_KEY_DEPT_KNOWLEDGE + deptId;
redisTemplate.opsForValue().set(key, knowledgeIds, REDIS_EXPIRE_DAYS, TimeUnit.DAYS);
log.debug("已设置部门 {} 的知识库: {}", deptId, knowledgeIds);
} catch (Exception e) {
log.error("设置部门知识库失败: deptId={}", deptId, e);
}
}
/**
* 根据部门路径获取知识库ID列表包含父部门的知识库
* @param deptPath 部门路径,例如:/1/3/5/
* @return 知识库ID列表已去重
*/
@SuppressWarnings("unchecked")
public List<String> getKnowledgeIdsByDeptPath(String deptPath) {
Set<String> knowledgeIdSet = new HashSet<>();
try {
if (deptPath == null || deptPath.isEmpty()) {
log.warn("部门路径为空,返回空知识库列表");
return new ArrayList<>();
}
// 解析部门路径,例如:/1/3/5/ -> [1, 3, 5]
String[] deptIds = deptPath.split("/");
List<String> deptIdList = new ArrayList<>();
for (String deptId : deptIds) {
if (deptId != null && !deptId.isEmpty()) {
deptIdList.add(deptId);
}
}
if (deptIdList.isEmpty()) {
log.warn("解析部门路径失败: {}", deptPath);
return new ArrayList<>();
}
// 依次查询当前部门及所有父部门的知识库
for (String deptId : deptIdList) {
String key = REDIS_KEY_DEPT_KNOWLEDGE + deptId;
Object result = redisTemplate.opsForValue().get(key);
if (result != null) {
List<String> deptKnowledgeIds = (List<String>) result;
knowledgeIdSet.addAll(deptKnowledgeIds);
log.debug("从部门 {} 获取到 {} 个知识库", deptId, deptKnowledgeIds.size());
}
}
List<String> resultList = new ArrayList<>(knowledgeIdSet);
log.info("根据部门路径 {} 获取到 {} 个知识库(已去重)", deptPath, resultList.size());
return resultList;
} catch (Exception e) {
log.error("根据部门路径获取知识库失败: deptPath={}", deptPath, e);
return new ArrayList<>();
}
}
@Override
public void clearAllKnowledgeConfig() {
try {
// 删除所有知识库相关的Redis Key
redisTemplate.delete(redisTemplate.keys(REDIS_KEY_DEPT_KNOWLEDGE + "*"));
log.info("已清除所有知识库配置");
} catch (Exception e) {
log.error("清除所有知识库配置失败", e);
}
}
}

View File

@@ -8,19 +8,24 @@ import org.springframework.util.StringUtils;
import org.springframework.web.multipart.MultipartFile;
import org.xyzh.ai.client.DifyApiClient;
import org.xyzh.ai.client.dto.DocumentStatusResponse;
import org.xyzh.api.ai.dto.DifyFileInfo;
import org.xyzh.ai.client.dto.DocumentUploadRequest;
import org.xyzh.ai.client.dto.DocumentUploadResponse;
import org.xyzh.ai.config.DifyConfig;
import org.xyzh.ai.exception.DifyException;
import org.xyzh.ai.exception.FileProcessException;
import org.xyzh.ai.mapper.AiAgentConfigMapper;
import org.xyzh.ai.mapper.AiKnowledgeMapper;
import org.xyzh.ai.mapper.AiUploadFileMapper;
import org.xyzh.api.ai.file.AiUploadFileService;
import org.xyzh.api.file.FileService;
import org.xyzh.common.core.domain.ResultDomain;
import org.xyzh.common.core.page.PageDomain;
import org.xyzh.common.core.page.PageParam;
import org.xyzh.common.dto.ai.TbAiAgentConfig;
import org.xyzh.common.dto.ai.TbAiKnowledge;
import org.xyzh.common.dto.ai.TbAiUploadFile;
import org.xyzh.common.dto.system.TbSysFile;
import org.xyzh.common.dto.user.TbSysUser;
import org.xyzh.system.utils.LoginUtil;
@@ -58,9 +63,119 @@ public class AiUploadFileServiceImpl implements AiUploadFileService {
@Autowired
private DifyConfig difyConfig;
@Autowired
private AiAgentConfigMapper agentConfigMapper;
@Autowired
private FileService fileService;
// 异步处理线程池
private final ExecutorService executorService = Executors.newFixedThreadPool(5);
@Override
public ResultDomain<Map<String, Object>> uploadFileForChat(MultipartFile file, String agentId) {
ResultDomain<Map<String, Object>> resultDomain = new ResultDomain<>();
try {
// 1. 参数验证
if (file == null || file.isEmpty()) {
resultDomain.fail("文件不能为空");
return resultDomain;
}
if (!StringUtils.hasText(agentId)) {
resultDomain.fail("智能体ID不能为空");
return resultDomain;
}
// 2. 获取当前用户
TbSysUser currentUser = LoginUtil.getCurrentUser();
if (currentUser == null) {
resultDomain.fail("用户未登录");
return resultDomain;
}
// 3. 验证文件类型和大小
String originalFilename = file.getOriginalFilename();
if (!isValidFileType(originalFilename)) {
resultDomain.fail("不支持的文件类型");
return resultDomain;
}
long maxSize = difyConfig.getUpload().getMaxSize() * 1024 * 1024; // MB转字节
if (file.getSize() > maxSize) {
resultDomain.fail("文件大小超过限制: " + (maxSize / 1024 / 1024) + "MB");
return resultDomain;
}
// 4. 获取智能体配置
TbAiAgentConfig agent = agentConfigMapper.selectAgentConfigById(agentId);
if (agent == null || agent.getDeleted()) {
resultDomain.fail("智能体不存在");
return resultDomain;
}
// 5. 先保存到系统文件表(永久存储)
ResultDomain<TbSysFile> uploadResult = fileService.uploadFile(
file,
"ai-agent", // 模块名
agentId, // 业务ID智能体ID
currentUser.getID() // 上传者
);
if (!uploadResult.isSuccess() || uploadResult.getData() == null) {
resultDomain.fail("保存文件失败: " + uploadResult.getMessage());
return resultDomain;
}
TbSysFile sysFile = (TbSysFile) uploadResult.getData();
log.info("文件已保存到系统文件表: sysFileId={}, fileName={}", sysFile.getID(), sysFile.getOriginalName());
// 6. 获取已保存文件的File对象直接用于上传到Dify不需要再保存临时文件
File fileToUpload = fileService.getFileByRelativePath(sysFile.getFilePath());
try {
// 7. 调用Dify API上传文件
DifyFileInfo difyResponse = difyApiClient.uploadFileForChat(
fileToUpload,
originalFilename,
currentUser.getID(),
agent.getDifyApiKey());
// 9. 转换为前端需要的格式包含系统文件ID和文件路径
Map<String, Object> fileInfo = new HashMap<>();
fileInfo.put("id", difyResponse.getId()); // Dify文件ID
fileInfo.put("sys_file_id", sysFile.getID()); // 系统文件ID重要用于关联消息
fileInfo.put("file_path", sysFile.getFilePath()); // 文件路径(重要:用于保存记录)
fileInfo.put("name", difyResponse.getName());
fileInfo.put("size", difyResponse.getSize());
fileInfo.put("extension", difyResponse.getExtension());
fileInfo.put("mime_type", difyResponse.getMimeType());
fileInfo.put("type", getFileType(originalFilename));
fileInfo.put("transfer_method", "local_file");
fileInfo.put("upload_file_id", difyResponse.getId()); // Dify上传文件ID
fileInfo.put("file_url", sysFile.getFileUrl()); // 文件访问URL
log.info("对话文件上传成功: sysFileId={}, difyFileId={}", sysFile.getID(), difyResponse.getId());
resultDomain.success("文件上传成功", fileInfo);
return resultDomain;
} catch (DifyException e) {
log.error("上传文件到Dify失败", e);
resultDomain.fail("上传文件失败: " + e.getMessage());
return resultDomain;
} catch (Exception e) {
log.error("上传文件异常", e);
resultDomain.fail("上传文件异常: " + e.getMessage());
return resultDomain;
}
} catch (Exception e) {
log.error("文件上传处理异常", e);
resultDomain.fail("文件上传处理异常: " + e.getMessage());
return resultDomain;
}
}
@Override
@Transactional(rollbackFor = Exception.class)
public ResultDomain<TbAiUploadFile> uploadToKnowledge(
@@ -113,64 +228,72 @@ public class AiUploadFileServiceImpl implements AiUploadFileService {
return resultDomain;
}
// 5. 保存临时文件
File tempFile = saveTempFile(file);
if (tempFile == null) {
resultDomain.fail("保存临时文件失败");
// 5. 保存到系统文件表(永久存储)
ResultDomain<TbSysFile> uploadResult = fileService.uploadFile(
file,
"ai-knowledge", // 模块名
knowledgeId, // 业务ID知识库ID
currentUser.getID() // 上传者
);
if (!uploadResult.isSuccess() || uploadResult.getData() == null) {
resultDomain.fail("保存文件失败: " + uploadResult.getMessage());
return resultDomain;
}
try {
// 6. 上传到Dify
DocumentUploadRequest uploadRequest = new DocumentUploadRequest();
uploadRequest.setName(originalFilename);
if (!StringUtils.hasText(indexingTechnique)) {
indexingTechnique = knowledge.getDifyIndexingTechnique();
}
uploadRequest.setIndexingTechnique(indexingTechnique);
TbSysFile sysFile = (TbSysFile) uploadResult.getData();
log.info("文件已保存到系统文件表: sysFileId={}, fileName={}", sysFile.getID(), sysFile.getOriginalName());
DocumentUploadResponse difyResponse = difyApiClient.uploadDocumentByFile(
knowledge.getDifyDatasetId(),
tempFile,
originalFilename,
uploadRequest,
difyConfig.getApiKey()
);
// 6. 获取已保存文件的File对象直接用于上传到Dify
File fileToUpload = fileService.getFileByRelativePath(sysFile.getFilePath());
// 7. 保存到本地数据库
TbAiUploadFile uploadFile = new TbAiUploadFile();
uploadFile.setID(UUID.randomUUID().toString());
uploadFile.setKnowledgeId(knowledgeId);
uploadFile.setFileName(originalFilename);
uploadFile.setFilePath(tempFile.getAbsolutePath());
uploadFile.setFileSize(file.getSize());
uploadFile.setFileType(getFileExtension(originalFilename));
uploadFile.setDifyDocumentId(difyResponse.getId());
uploadFile.setDifyBatchId(difyResponse.getBatch());
uploadFile.setStatus(1); // 1=处理中
uploadFile.setChunkCount(0);
uploadFile.setCreateTime(new Date());
uploadFile.setUpdateTime(new Date());
uploadFile.setDeleted(false);
// 7. 上传到Dify
DocumentUploadRequest uploadRequest = new DocumentUploadRequest();
uploadRequest.setName(originalFilename);
int rows = uploadFileMapper.insertUploadFile(uploadFile);
if (rows > 0) {
log.info("文件上传成功: {} - {}", uploadFile.getID(), originalFilename);
// 8. 异步更新向量化状态
asyncUpdateVectorStatus(uploadFile.getID());
resultDomain.success("文件上传成功", uploadFile);
return resultDomain;
} else {
resultDomain.fail("保存文件记录失败");
return resultDomain;
}
if (!StringUtils.hasText(indexingTechnique)) {
indexingTechnique = knowledge.getDifyIndexingTechnique();
}
uploadRequest.setIndexingTechnique(indexingTechnique);
} finally {
// 清理临时文件
deleteTempFile(tempFile);
DocumentUploadResponse difyResponse = difyApiClient.uploadDocumentByFile(
knowledge.getDifyDatasetId(),
fileToUpload,
originalFilename,
uploadRequest,
difyConfig.getApiKey());
// 8. 保存到本地数据库
TbAiUploadFile uploadFile = new TbAiUploadFile();
uploadFile.setID(UUID.randomUUID().toString());
uploadFile.setUserID(currentUser.getID());
uploadFile.setKnowledgeId(knowledgeId);
uploadFile.setSysFileId(sysFile.getID()); // 关联系统文件ID
uploadFile.setFileName(originalFilename);
uploadFile.setFilePath(sysFile.getFilePath()); // 保存系统文件的相对路径
uploadFile.setFileSize(file.getSize());
uploadFile.setFileType(getFileExtension(originalFilename));
uploadFile.setDifyDocumentId(difyResponse.getId());
uploadFile.setDifyBatchId(difyResponse.getBatch());
uploadFile.setStatus(1); // 1=处理中
uploadFile.setChunkCount(0);
uploadFile.setCreateTime(new Date());
uploadFile.setUpdateTime(new Date());
uploadFile.setDeleted(false);
int rows = uploadFileMapper.insertUploadFile(uploadFile);
if (rows > 0) {
log.info("知识库文件上传成功: uploadFileId={}, sysFileId={}, fileName={}",
uploadFile.getID(), sysFile.getID(), originalFilename);
// 9. 异步更新向量化状态
asyncUpdateVectorStatus(uploadFile.getID());
resultDomain.success("文件上传成功", uploadFile);
return resultDomain;
} else {
resultDomain.fail("保存文件记录失败");
return resultDomain;
}
} catch (DifyException e) {
@@ -203,10 +326,8 @@ public class AiUploadFileServiceImpl implements AiUploadFileService {
List<String> failedFiles = new ArrayList<>();
for (MultipartFile file : files) {
ResultDomain<TbAiUploadFile> uploadResult = uploadToKnowledge(
knowledgeId, file, indexingTechnique
);
ResultDomain<TbAiUploadFile> uploadResult = uploadToKnowledge(knowledgeId, file, indexingTechnique);
if (uploadResult.isSuccess()) {
uploadedFiles.add(uploadResult.getData());
} else {
@@ -252,14 +373,13 @@ public class AiUploadFileServiceImpl implements AiUploadFileService {
}
// 3. 删除Dify中的文档
if (StringUtils.hasText(file.getDifyDocumentId()) &&
StringUtils.hasText(knowledge.getDifyDatasetId())) {
if (StringUtils.hasText(file.getDifyDocumentId()) &&
StringUtils.hasText(knowledge.getDifyDatasetId())) {
try {
difyApiClient.deleteDocument(
knowledge.getDifyDatasetId(),
file.getDifyDocumentId(),
difyConfig.getApiKey()
);
difyConfig.getApiKey());
log.info("Dify文档删除成功: {}", file.getDifyDocumentId());
} catch (DifyException e) {
log.error("删除Dify文档失败继续删除本地记录", e);
@@ -268,7 +388,7 @@ public class AiUploadFileServiceImpl implements AiUploadFileService {
// 4. 获取当前用户
TbSysUser currentUser = LoginUtil.getCurrentUser();
// 5. 逻辑删除本地记录
TbAiUploadFile deleteEntity = new TbAiUploadFile();
deleteEntity.setID(fileId);
@@ -347,7 +467,7 @@ public class AiUploadFileServiceImpl implements AiUploadFileService {
try {
// 查询列表
List<TbAiUploadFile> files = uploadFileMapper.selectUploadFilesPage(filter, pageParam);
// 查询总数
long total = uploadFileMapper.countUploadFiles(filter);
@@ -355,7 +475,7 @@ public class AiUploadFileServiceImpl implements AiUploadFileService {
PageParam resultPageParam = new PageParam(pageParam.getPageNumber(), pageParam.getPageSize());
resultPageParam.setTotalElements(total);
resultPageParam.setTotalPages((int) Math.ceil((double) total / pageParam.getPageSize()));
return new PageDomain<>(resultPageParam, files);
} catch (Exception e) {
@@ -393,19 +513,18 @@ public class AiUploadFileServiceImpl implements AiUploadFileService {
DocumentStatusResponse statusResponse = difyApiClient.getDocumentStatus(
knowledge.getDifyDatasetId(),
file.getDifyBatchId(),
difyConfig.getApiKey()
);
difyConfig.getApiKey());
// 4. 更新本地状态
TbAiUploadFile update = new TbAiUploadFile();
update.setID(fileId);
// 映射Dify状态到本地状态completed=2, processing=1, error=3
// DocumentStatusResponse返回的是文档列表取第一个
if (statusResponse.getData() != null && !statusResponse.getData().isEmpty()) {
DocumentStatusResponse.DocumentStatus docStatus = statusResponse.getData().get(0);
String indexingStatus = docStatus.getIndexingStatus();
if ("completed".equals(indexingStatus)) {
update.setStatus(2); // 已完成
} else if ("error".equals(indexingStatus)) {
@@ -413,12 +532,12 @@ public class AiUploadFileServiceImpl implements AiUploadFileService {
} else {
update.setStatus(1); // 处理中
}
if (docStatus.getCompletedSegments() != null) {
update.setChunkCount(docStatus.getCompletedSegments());
}
}
update.setUpdateTime(new Date());
uploadFileMapper.updateUploadFile(update);
@@ -449,7 +568,7 @@ public class AiUploadFileServiceImpl implements AiUploadFileService {
try {
// 查询知识库的所有文件
List<TbAiUploadFile> files = uploadFileMapper.selectFilesByKnowledgeId(knowledgeId);
if (files.isEmpty()) {
resultDomain.success("没有需要同步的文件", files);
return resultDomain;
@@ -488,10 +607,10 @@ public class AiUploadFileServiceImpl implements AiUploadFileService {
if (!StringUtils.hasText(filename)) {
return false;
}
String extension = getFileExtension(filename).toLowerCase();
String[] allowedTypes = difyConfig.getUpload().getAllowedTypes();
for (String type : allowedTypes) {
if (type.equalsIgnoreCase(extension)) {
return true;
@@ -507,7 +626,7 @@ public class AiUploadFileServiceImpl implements AiUploadFileService {
if (!StringUtils.hasText(filename)) {
return "";
}
int lastDot = filename.lastIndexOf('.');
if (lastDot > 0 && lastDot < filename.length() - 1) {
return filename.substring(lastDot + 1);
@@ -516,35 +635,45 @@ public class AiUploadFileServiceImpl implements AiUploadFileService {
}
/**
* 保存临时文件
* 获取文件类型(使用具体的文件扩展名)
* 图片类型返回 "image"其他类型返回具体扩展名pdf, docx, txt等
*/
private File saveTempFile(MultipartFile file) {
try {
String tempDir = System.getProperty("java.io.tmpdir");
String filename = UUID.randomUUID().toString() + "_" + file.getOriginalFilename();
Path tempPath = Paths.get(tempDir, filename);
Files.copy(file.getInputStream(), tempPath);
return tempPath.toFile();
} catch (IOException e) {
log.error("保存临时文件失败", e);
return null;
private String getFileType(String filename) {
if (!StringUtils.hasText(filename)) {
return "file";
}
}
/**
* 删除临时文件
*/
private void deleteTempFile(File file) {
if (file != null && file.exists()) {
try {
Files.delete(file.toPath());
log.debug("临时文件已删除: {}", file.getAbsolutePath());
} catch (IOException e) {
log.warn("删除临时文件失败: {}", file.getAbsolutePath(), e);
// 转换为大写以匹配数组中的类型
String extension = getFileExtension(filename).toUpperCase();
// 图片类型统一返回 "image"
String[] imageTypes = { "JPG", "JPEG", "PNG", "GIF", "WEBP", "SVG" };
String[] documentTypes = { "TXT", "MD", "MARKDOWN", "MDX", "PDF", "HTML", "XLSX", "XLS", "VTT", "PROPERTIES",
"DOC", "DOCX", "CSV", "EML", "MSG", "PPTX", "PPT", "XML", "EPUB" };
String[] audioTypes = { "MP3", "M4A", "WAV", "WEBM", "MPGA" };
String[] videoTypes = { "MP4", "MOV", "MPEG", "WEBM" };
for (String type : imageTypes) {
if (type.equals(extension)) {
return "image";
}
}
for (String type : documentTypes) {
if (type.equals(extension)) {
return "document";
}
}
for (String type : audioTypes) {
if (type.equals(extension)) {
return "audio";
}
}
for (String type : videoTypes) {
if (type.equals(extension)) {
return "video";
}
}
return "custom";
}
/**
@@ -555,7 +684,7 @@ public class AiUploadFileServiceImpl implements AiUploadFileService {
try {
// 等待3秒后开始检查状态
Thread.sleep(3000);
// 最多检查10次每次间隔3秒
for (int i = 0; i < 10; i++) {
ResultDomain<TbAiUploadFile> result = syncFileStatus(fileId);
@@ -574,5 +703,26 @@ public class AiUploadFileServiceImpl implements AiUploadFileService {
}
}, executorService);
}
}
@Override
public ResultDomain<TbAiUploadFile> listFilesByMessageId(String messageId) {
ResultDomain<TbAiUploadFile> resultDomain = new ResultDomain<>();
try {
if (!StringUtils.hasText(messageId)) {
resultDomain.fail("消息ID不能为空");
return resultDomain;
}
List<TbAiUploadFile> files = uploadFileMapper.selectFilesByMessageId(messageId);
log.info("查询消息文件列表成功: messageId={}, fileCount={}", messageId, files.size());
resultDomain.success("查询成功", files);
return resultDomain;
} catch (Exception e) {
log.error("查询消息文件列表异常: messageId={}", messageId, e);
resultDomain.fail("查询失败: " + e.getMessage());
return resultDomain;
}
}
}

View File

@@ -8,12 +8,7 @@
<result column="name" property="name" jdbcType="VARCHAR"/>
<result column="avatar" property="avatar" jdbcType="VARCHAR"/>
<result column="description" property="description" jdbcType="VARCHAR"/>
<result column="system_prompt" property="systemPrompt" jdbcType="LONGVARCHAR"/>
<result column="model_name" property="modelName" jdbcType="VARCHAR"/>
<result column="model_provider" property="modelProvider" jdbcType="VARCHAR"/>
<result column="temperature" property="temperature" jdbcType="DECIMAL"/>
<result column="max_tokens" property="maxTokens" jdbcType="INTEGER"/>
<result column="top_p" property="topP" jdbcType="DECIMAL"/>
<result column="connect_internet" property="connectInternet" jdbcType="INTEGER"/>
<result column="dify_app_id" property="difyAppId" jdbcType="VARCHAR"/>
<result column="dify_api_key" property="difyApiKey" jdbcType="VARCHAR"/>
<result column="status" property="status" jdbcType="INTEGER"/>
@@ -27,8 +22,7 @@
<!-- 基础字段 -->
<sql id="Base_Column_List">
id, name, avatar, description, system_prompt, model_name, model_provider,
temperature, max_tokens, top_p, dify_app_id, dify_api_key, status,
id, name, avatar, description, connect_internet, dify_app_id, dify_api_key, status,
creator, updater, create_time, update_time, delete_time, deleted
</sql>
@@ -39,12 +33,6 @@
<if test="name != null and name != ''">
AND name LIKE CONCAT('%', #{name}, '%')
</if>
<if test="modelName != null and modelName != ''">
AND model_name = #{modelName}
</if>
<if test="modelProvider != null and modelProvider != ''">
AND model_provider = #{modelProvider}
</if>
<if test="status != null">
AND status = #{status}
</if>
@@ -54,12 +42,10 @@
<!-- 插入智能体配置 -->
<insert id="insertAgentConfig" parameterType="org.xyzh.common.dto.ai.TbAiAgentConfig">
INSERT INTO tb_ai_agent_config (
id, name, avatar, description, system_prompt, model_name, model_provider,
temperature, max_tokens, top_p, dify_app_id, dify_api_key, status,
id, name, avatar, description, connect_internet, dify_app_id, dify_api_key, status,
creator, updater, create_time, update_time, deleted
) VALUES (
#{id}, #{name}, #{avatar}, #{description}, #{systemPrompt}, #{modelName}, #{modelProvider},
#{temperature}, #{maxTokens}, #{topP}, #{difyAppId}, #{difyApiKey}, #{status},
#{id}, #{name}, #{avatar}, #{description}, #{connectInternet}, #{difyAppId}, #{difyApiKey}, #{status},
#{creator}, #{updater}, #{createTime}, #{updateTime}, #{deleted}
)
</insert>
@@ -71,12 +57,7 @@
<if test="name != null and name != ''">name = #{name},</if>
<if test="avatar != null">avatar = #{avatar},</if>
<if test="description != null">description = #{description},</if>
<if test="systemPrompt != null">system_prompt = #{systemPrompt},</if>
<if test="modelName != null">model_name = #{modelName},</if>
<if test="modelProvider != null">model_provider = #{modelProvider},</if>
<if test="temperature != null">temperature = #{temperature},</if>
<if test="maxTokens != null">max_tokens = #{maxTokens},</if>
<if test="topP != null">top_p = #{topP},</if>
<if test="connectInternet != null">connect_internet = #{connectInternet},</if>
<if test="difyAppId != null">dify_app_id = #{difyAppId},</if>
<if test="difyApiKey != null">dify_api_key = #{difyApiKey},</if>
<if test="status != null">status = #{status},</if>
@@ -116,9 +97,6 @@
<if test="filter.status != null">
AND status = #{filter.status}
</if>
<if test="filter.modelProvider != null and filter.modelProvider != ''">
AND model_provider = #{filter.modelProvider}
</if>
</if>
ORDER BY create_time DESC
</select>
@@ -136,9 +114,6 @@
<if test="filter.status != null">
AND status = #{filter.status}
</if>
<if test="filter.modelProvider != null and filter.modelProvider != ''">
AND model_provider = #{filter.modelProvider}
</if>
</if>
ORDER BY create_time DESC
LIMIT #{pageParam.offset}, #{pageParam.pageSize}
@@ -156,9 +131,6 @@
<if test="filter.status != null">
AND status = #{filter.status}
</if>
<if test="filter.modelProvider != null and filter.modelProvider != ''">
AND model_provider = #{filter.modelProvider}
</if>
</if>
</select>

View File

@@ -8,6 +8,8 @@
<result column="user_id" property="userID" jdbcType="VARCHAR"/>
<result column="knowledge_id" property="knowledgeId" jdbcType="VARCHAR"/>
<result column="conversation_id" property="conversationID" jdbcType="VARCHAR"/>
<result column="message_id" property="messageID" jdbcType="VARCHAR"/>
<result column="sys_file_id" property="sysFileId" jdbcType="VARCHAR"/>
<result column="file_name" property="fileName" jdbcType="VARCHAR"/>
<result column="file_path" property="filePath" jdbcType="VARCHAR"/>
<result column="file_size" property="fileSize" jdbcType="BIGINT"/>
@@ -16,12 +18,10 @@
<result column="extracted_text" property="extractedText" jdbcType="LONGVARCHAR"/>
<result column="dify_document_id" property="difyDocumentId" jdbcType="VARCHAR"/>
<result column="dify_batch_id" property="difyBatchId" jdbcType="VARCHAR"/>
<result column="vector_status" property="vectorStatus" jdbcType="INTEGER"/>
<result column="dify_upload_file_id" property="difyUploadFileId" jdbcType="VARCHAR"/>
<result column="chunk_count" property="chunkCount" jdbcType="INTEGER"/>
<result column="status" property="status" jdbcType="INTEGER"/>
<result column="error_message" property="errorMessage" jdbcType="VARCHAR"/>
<result column="creator" property="creator" jdbcType="VARCHAR"/>
<result column="updater" property="updater" jdbcType="VARCHAR"/>
<result column="create_time" property="createTime" jdbcType="TIMESTAMP"/>
<result column="update_time" property="updateTime" jdbcType="TIMESTAMP"/>
<result column="delete_time" property="deleteTime" jdbcType="TIMESTAMP"/>
@@ -30,9 +30,9 @@
<!-- 基础字段 -->
<sql id="Base_Column_List">
id, user_id, knowledge_id, conversation_id, file_name, file_path, file_size,
file_type, mime_type, extracted_text, dify_document_id, dify_batch_id,
vector_status, chunk_count, status, error_message, creator, updater,
id, user_id, knowledge_id, conversation_id, message_id, sys_file_id, file_name, file_path, file_size,
file_type, mime_type, extracted_text, dify_document_id, dify_batch_id, dify_upload_file_id,
chunk_count, status, error_message,
create_time, update_time, delete_time, deleted
</sql>
@@ -56,9 +56,6 @@
<if test="filter.fileType != null and filter.fileType != ''">
AND file_type = #{filter.fileType}
</if>
<if test="filter.vectorStatus != null">
AND vector_status = #{filter.vectorStatus}
</if>
<if test="filter.status != null">
AND status = #{filter.status}
</if>
@@ -69,14 +66,14 @@
<!-- insertUploadFile插入文件记录 -->
<insert id="insertUploadFile" parameterType="org.xyzh.common.dto.ai.TbAiUploadFile">
INSERT INTO tb_ai_upload_file (
id, user_id, knowledge_id, conversation_id, file_name, file_path, file_size,
file_type, mime_type, extracted_text, dify_document_id, dify_batch_id,
vector_status, chunk_count, status, error_message, creator, updater,
id, user_id, knowledge_id, conversation_id, message_id, sys_file_id, file_name, file_path, file_size,
file_type, mime_type, extracted_text, dify_document_id, dify_batch_id, dify_upload_file_id,
chunk_count, status, error_message,
create_time, update_time, deleted
) VALUES (
#{ID}, #{userID}, #{knowledgeId}, #{conversationID}, #{fileName}, #{filePath}, #{fileSize},
#{fileType}, #{mimeType}, #{extractedText}, #{difyDocumentId}, #{difyBatchId},
#{vectorStatus}, #{chunkCount}, #{status}, #{errorMessage}, #{creator}, #{updater},
#{ID}, #{userID}, #{knowledgeId}, #{conversationID}, #{messageID}, #{sysFileId}, #{fileName}, #{filePath}, #{fileSize},
#{fileType}, #{mimeType}, #{extractedText}, #{difyDocumentId}, #{difyBatchId}, #{difyUploadFileId},
#{chunkCount}, #{status}, #{errorMessage},
#{createTime}, #{updateTime}, #{deleted}
)
</insert>
@@ -88,6 +85,8 @@
<if test="userID != null">user_id = #{userID},</if>
<if test="knowledgeId != null">knowledge_id = #{knowledgeId},</if>
<if test="conversationID != null">conversation_id = #{conversationID},</if>
<if test="messageID != null">message_id = #{messageID},</if>
<if test="sysFileId != null">sys_file_id = #{sysFileId},</if>
<if test="fileName != null">file_name = #{fileName},</if>
<if test="filePath != null">file_path = #{filePath},</if>
<if test="fileSize != null">file_size = #{fileSize},</if>
@@ -96,11 +95,10 @@
<if test="extractedText != null">extracted_text = #{extractedText},</if>
<if test="difyDocumentId != null">dify_document_id = #{difyDocumentId},</if>
<if test="difyBatchId != null">dify_batch_id = #{difyBatchId},</if>
<if test="vectorStatus != null">vector_status = #{vectorStatus},</if>
<if test="difyUploadFileId != null">dify_upload_file_id = #{difyUploadFileId},</if>
<if test="chunkCount != null">chunk_count = #{chunkCount},</if>
<if test="status != null">status = #{status},</if>
<if test="errorMessage != null">error_message = #{errorMessage},</if>
<if test="updater != null">updater = #{updater},</if>
<if test="updateTime != null">update_time = #{updateTime},</if>
</set>
WHERE id = #{ID} AND deleted = 0
@@ -110,8 +108,7 @@
<update id="deleteUploadFile" parameterType="org.xyzh.common.dto.ai.TbAiUploadFile">
UPDATE tb_ai_upload_file
SET deleted = 1,
delete_time = NOW(),
updater = #{updater}
delete_time = NOW()
WHERE id = #{ID} AND deleted = 0
</update>
@@ -186,4 +183,31 @@
ORDER BY create_time DESC
</select>
<!-- selectFilesByMessageId根据消息ID查询关联的文件列表 -->
<select id="selectFilesByMessageId" resultMap="BaseResultMap">
SELECT
<include refid="Base_Column_List"/>
FROM tb_ai_upload_file
WHERE message_id = #{messageId}
AND deleted = 0
ORDER BY create_time ASC
</select>
<!-- batchInsertUploadFiles批量插入文件记录 -->
<insert id="batchInsertUploadFiles" parameterType="java.util.List">
INSERT INTO tb_ai_upload_file (
id, user_id, knowledge_id, conversation_id, message_id, sys_file_id, file_name, file_path, file_size,
file_type, mime_type, dify_document_id, dify_batch_id, dify_upload_file_id,
chunk_count, status, create_time, update_time, deleted
) VALUES
<foreach collection="files" item="file" separator=",">
(
#{file.ID}, #{file.userID}, #{file.knowledgeId}, #{file.conversationID}, #{file.messageID},
#{file.sysFileId}, #{file.fileName}, #{file.filePath}, #{file.fileSize},
#{file.fileType}, #{file.mimeType}, #{file.difyDocumentId}, #{file.difyBatchId}, #{file.difyUploadFileId},
#{file.chunkCount}, #{file.status}, #{file.createTime}, #{file.updateTime}, #{file.deleted}
)
</foreach>
</insert>
</mapper>