Files
schoolNews/schoolNewsServ/ai/src/main/resources/mapper/AiUploadFileMapper.xml
2025-11-28 17:16:17 +08:00

236 lines
11 KiB
XML
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd">
<mapper namespace="org.xyzh.ai.mapper.AiUploadFileMapper">
<!-- 基础结果映射 -->
<resultMap id="BaseResultMap" type="org.xyzh.common.dto.ai.TbAiUploadFile">
<id column="id" property="id" jdbcType="VARCHAR"/>
<result column="user_id" property="userID" jdbcType="VARCHAR"/>
<result column="knowledge_id" property="knowledgeId" jdbcType="VARCHAR"/>
<result column="conversation_id" property="conversationID" jdbcType="VARCHAR"/>
<result column="message_id" property="messageID" jdbcType="VARCHAR"/>
<result column="sys_file_id" property="sysFileId" jdbcType="VARCHAR"/>
<result column="file_name" property="fileName" jdbcType="VARCHAR"/>
<result column="file_path" property="filePath" jdbcType="VARCHAR"/>
<result column="file_size" property="fileSize" jdbcType="BIGINT"/>
<result column="file_type" property="fileType" jdbcType="VARCHAR"/>
<result column="mime_type" property="mimeType" jdbcType="VARCHAR"/>
<result column="extracted_text" property="extractedText" jdbcType="LONGVARCHAR"/>
<result column="dify_document_id" property="difyDocumentId" jdbcType="VARCHAR"/>
<result column="dify_batch_id" property="difyBatchId" jdbcType="VARCHAR"/>
<result column="dify_upload_file_id" property="difyUploadFileId" jdbcType="VARCHAR"/>
<result column="chunk_count" property="chunkCount" jdbcType="INTEGER"/>
<result column="status" property="status" jdbcType="INTEGER"/>
<result column="error_message" property="errorMessage" jdbcType="VARCHAR"/>
<result column="enabled" property="enabled" jdbcType="BOOLEAN"/>
<result column="display_status" property="displayStatus" jdbcType="VARCHAR"/>
<result column="create_time" property="createTime" jdbcType="TIMESTAMP"/>
<result column="update_time" property="updateTime" jdbcType="TIMESTAMP"/>
<result column="delete_time" property="deleteTime" jdbcType="TIMESTAMP"/>
<result column="deleted" property="deleted" jdbcType="BOOLEAN"/>
</resultMap>
<!-- 基础字段 -->
<sql id="Base_Column_List">
id, user_id, knowledge_id, conversation_id, message_id, sys_file_id, file_name, file_path, file_size,
file_type, mime_type, extracted_text, dify_document_id, dify_batch_id, dify_upload_file_id,
chunk_count, status, error_message, enabled, display_status,
create_time, update_time, delete_time, deleted
</sql>
<!-- 通用条件 -->
<sql id="Filter_Clause">
<where>
deleted = 0
<if test="filter != null">
<if test="filter.userID != null and filter.userID != ''">
AND user_id = #{filter.userID}
</if>
<if test="filter.knowledgeId != null and filter.knowledgeId != ''">
AND knowledge_id = #{filter.knowledgeId}
</if>
<if test="filter.conversationID != null and filter.conversationID != ''">
AND conversation_id = #{filter.conversationID}
</if>
<if test="filter.fileName != null and filter.fileName != ''">
AND file_name LIKE CONCAT('%', #{filter.fileName}, '%')
</if>
<if test="filter.fileType != null and filter.fileType != ''">
AND file_type = #{filter.fileType}
</if>
<if test="filter.status != null">
AND status = #{filter.status}
</if>
</if>
</where>
</sql>
<!-- insertUploadFile插入文件记录 -->
<insert id="insertUploadFile" parameterType="org.xyzh.common.dto.ai.TbAiUploadFile">
INSERT INTO tb_ai_upload_file (
id, user_id, knowledge_id, conversation_id, message_id, sys_file_id, file_name, file_path, file_size,
file_type, mime_type, extracted_text, dify_document_id, dify_batch_id, dify_upload_file_id,
chunk_count, status, error_message, enabled, display_status,
create_time, update_time, deleted
) VALUES (
#{ID}, #{userID}, #{knowledgeId}, #{conversationID}, #{messageID}, #{sysFileId}, #{fileName}, #{filePath}, #{fileSize},
#{fileType}, #{mimeType}, #{extractedText}, #{difyDocumentId}, #{difyBatchId}, #{difyUploadFileId},
#{chunkCount}, #{status}, #{errorMessage}, #{enabled}, #{displayStatus},
#{createTime}, #{updateTime}, #{deleted}
)
</insert>
<!-- updateUploadFile更新文件记录 -->
<update id="updateUploadFile" parameterType="org.xyzh.common.dto.ai.TbAiUploadFile">
UPDATE tb_ai_upload_file
<set>
<if test="userID != null">user_id = #{userID},</if>
<if test="knowledgeId != null">knowledge_id = #{knowledgeId},</if>
<if test="conversationID != null">conversation_id = #{conversationID},</if>
<if test="messageID != null">message_id = #{messageID},</if>
<if test="sysFileId != null">sys_file_id = #{sysFileId},</if>
<if test="fileName != null">file_name = #{fileName},</if>
<if test="filePath != null">file_path = #{filePath},</if>
<if test="fileSize != null">file_size = #{fileSize},</if>
<if test="fileType != null">file_type = #{fileType},</if>
<if test="mimeType != null">mime_type = #{mimeType},</if>
<if test="extractedText != null">extracted_text = #{extractedText},</if>
<if test="difyDocumentId != null">dify_document_id = #{difyDocumentId},</if>
<if test="difyBatchId != null">dify_batch_id = #{difyBatchId},</if>
<if test="difyUploadFileId != null">dify_upload_file_id = #{difyUploadFileId},</if>
<if test="chunkCount != null">chunk_count = #{chunkCount},</if>
<if test="status != null">status = #{status},</if>
<if test="errorMessage != null">error_message = #{errorMessage},</if>
<if test="enabled != null">enabled = #{enabled},</if>
<if test="displayStatus != null">display_status = #{displayStatus},</if>
<if test="updateTime != null">update_time = #{updateTime},</if>
</set>
WHERE id = #{ID} AND deleted = 0
</update>
<!-- deleteUploadFile逻辑删除文件记录 -->
<update id="deleteUploadFile" parameterType="org.xyzh.common.dto.ai.TbAiUploadFile">
UPDATE tb_ai_upload_file
SET deleted = 1,
delete_time = NOW()
WHERE id = #{ID} AND deleted = 0
</update>
<!-- selectUploadFileById根据ID查询文件 -->
<select id="selectUploadFileById" resultMap="BaseResultMap">
SELECT
<include refid="Base_Column_List"/>
FROM tb_ai_upload_file
WHERE id = #{fileId} AND deleted = 0
</select>
<!-- selectAllUploadFiles查询所有文件 -->
<select id="selectAllUploadFiles" resultMap="BaseResultMap">
SELECT
<include refid="Base_Column_List"/>
FROM tb_ai_upload_file
<include refid="Filter_Clause"/>
ORDER BY create_time DESC
</select>
<!-- selectFilesByKnowledgeId根据知识库ID查询文件列表 -->
<select id="selectFilesByKnowledgeId" resultMap="BaseResultMap">
SELECT
<include refid="Base_Column_List"/>
FROM tb_ai_upload_file
WHERE knowledge_id = #{knowledgeId}
AND deleted = 0
ORDER BY create_time DESC
</select>
<!-- selectUploadFilesPage分页查询文件 -->
<select id="selectUploadFilesPage" resultMap="BaseResultMap">
SELECT
<include refid="Base_Column_List"/>
FROM tb_ai_upload_file
<include refid="Filter_Clause"/>
ORDER BY create_time DESC
LIMIT #{pageParam.offset}, #{pageParam.pageSize}
</select>
<!-- countUploadFiles统计文件总数 -->
<select id="countUploadFiles" resultType="java.lang.Long">
SELECT COUNT(1)
FROM tb_ai_upload_file
<include refid="Filter_Clause"/>
</select>
<!-- selectAiUploadFiles原有方法保留兼容性 -->
<select id="selectAiUploadFiles" resultMap="BaseResultMap">
SELECT
<include refid="Base_Column_List"/>
FROM tb_ai_upload_file
WHERE deleted = 0
<if test="userID != null and userID != ''">
AND user_id = #{userID}
</if>
<if test="knowledgeId != null and knowledgeId != ''">
AND knowledge_id = #{knowledgeId}
</if>
<if test="conversationID != null and conversationID != ''">
AND conversation_id = #{conversationID}
</if>
<if test="fileName != null and fileName != ''">
AND file_name LIKE CONCAT('%', #{fileName}, '%')
</if>
<if test="fileType != null and fileType != ''">
AND file_type = #{fileType}
</if>
<if test="status != null">
AND status = #{status}
</if>
ORDER BY create_time DESC
</select>
<!-- selectFilesByMessageId根据消息ID查询关联的文件列表 -->
<select id="selectFilesByMessageId" resultMap="BaseResultMap">
SELECT
<include refid="Base_Column_List"/>
FROM tb_ai_upload_file
WHERE message_id = #{messageId}
AND deleted = 0
ORDER BY create_time ASC
</select>
<!-- selectFileByDifyDocumentId根据Dify文档ID查询文件 -->
<select id="selectFileByDifyDocumentId" resultMap="BaseResultMap">
SELECT
<include refid="Base_Column_List"/>
FROM tb_ai_upload_file
WHERE dify_document_id = #{difyDocumentId}
AND deleted = 0
LIMIT 1
</select>
<!-- batchInsertUploadFiles批量插入文件记录 -->
<insert id="batchInsertUploadFiles" parameterType="java.util.List">
INSERT INTO tb_ai_upload_file (
id, user_id, knowledge_id, conversation_id, message_id, sys_file_id, file_name, file_path, file_size,
file_type, mime_type, dify_document_id, dify_batch_id, dify_upload_file_id,
chunk_count, status, enabled, display_status, create_time, update_time, deleted
) VALUES
<foreach collection="files" item="file" separator=",">
(
#{file.id}, #{file.userID}, #{file.knowledgeId}, #{file.conversationID}, #{file.messageID},
#{file.sysFileId}, #{file.fileName}, #{file.filePath}, #{file.fileSize},
#{file.fileType}, #{file.mimeType}, #{file.difyDocumentId}, #{file.difyBatchId}, #{file.difyUploadFileId},
#{file.chunkCount}, #{file.status}, #{file.enabled}, #{file.displayStatus}, #{file.createTime}, #{file.updateTime}, #{file.deleted}
)
</foreach>
</insert>
<!-- chunkSync -->
<update id="chunkSync">
UPDATE tb_ai_upload_file
SET chunk_count = chunk_count + #{action}
WHERE dify_document_id = #{difyDocumentId}
AND deleted = 0
</update>
</mapper>