实现敏感词检测后,失败发生邮箱
This commit is contained in:
@@ -0,0 +1,46 @@
|
||||
package org.xyzh.sensitive.controller;
|
||||
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.web.bind.annotation.DeleteMapping;
|
||||
import org.springframework.web.bind.annotation.PostMapping;
|
||||
import org.springframework.web.bind.annotation.PutMapping;
|
||||
import org.springframework.web.bind.annotation.RequestBody;
|
||||
import org.springframework.web.bind.annotation.RequestMapping;
|
||||
import org.springframework.web.bind.annotation.RestController;
|
||||
import org.xyzh.api.sensitive.SensitiveService;
|
||||
import org.xyzh.common.core.domain.ResultDomain;
|
||||
import org.xyzh.common.core.page.PageRequest;
|
||||
import org.xyzh.common.dto.sensitive.TbSensitive;
|
||||
|
||||
@RestController
|
||||
@RequestMapping("/sensitive")
|
||||
public class SensitiveController {
|
||||
|
||||
@Autowired
|
||||
private SensitiveService sensitiveService;
|
||||
|
||||
@PostMapping("/page")
|
||||
public ResultDomain<TbSensitive> page(@RequestBody PageRequest<TbSensitive> pageRequest){
|
||||
return sensitiveService.page(pageRequest);
|
||||
}
|
||||
|
||||
@PostMapping
|
||||
public ResultDomain<Boolean> addSensitiveWord(@RequestBody TbSensitive tbSensitive){
|
||||
return sensitiveService.addSensitiveWord(tbSensitive);
|
||||
}
|
||||
|
||||
@DeleteMapping
|
||||
public ResultDomain<Boolean> deleteSensitiveWord(@RequestBody TbSensitive tbSensitive){
|
||||
return sensitiveService.deleteSensitiveWord(tbSensitive);
|
||||
}
|
||||
|
||||
@PutMapping
|
||||
public ResultDomain<Boolean> changeSensitiveWordType(@RequestBody TbSensitive tbSensitive){
|
||||
return sensitiveService.changeSensitiveWordType(tbSensitive);
|
||||
}
|
||||
|
||||
@PostMapping("/judge")
|
||||
public ResultDomain<String> judgeSensitive(@RequestBody String text){
|
||||
return sensitiveService.judgeSensitive(text);
|
||||
}
|
||||
}
|
||||
@@ -3,6 +3,8 @@ package org.xyzh.sensitive.mapper;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.ibatis.annotations.Mapper;
|
||||
import org.apache.ibatis.annotations.Param;
|
||||
import org.xyzh.common.core.page.PageParam;
|
||||
import org.xyzh.common.dto.sensitive.TbSensitive;
|
||||
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
|
||||
|
||||
@@ -14,5 +16,11 @@ public interface SensitiveMapper extends BaseMapper<TbSensitive> {
|
||||
|
||||
public Integer addSensitiveWord(TbSensitive tbSensitive);
|
||||
|
||||
public Integer changeWordType(TbSensitive tbSensitive);
|
||||
|
||||
public Integer deleteSensitiveWord(TbSensitive tbSensitive);
|
||||
|
||||
public List<TbSensitive> selectTbSensitivePage(@Param("filter") TbSensitive filter, @Param("pageParam") PageParam pageParam);
|
||||
|
||||
public Integer countByFilter(@Param("filter") TbSensitive filter);
|
||||
}
|
||||
|
||||
@@ -4,6 +4,8 @@ import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.stereotype.Service;
|
||||
import org.xyzh.api.sensitive.SensitiveService;
|
||||
import org.xyzh.common.core.domain.ResultDomain;
|
||||
import org.xyzh.common.core.page.PageDomain;
|
||||
import org.xyzh.common.core.page.PageRequest;
|
||||
import org.xyzh.common.dto.sensitive.TbSensitive;
|
||||
import org.xyzh.common.redis.service.RedisService;
|
||||
import org.xyzh.sensitive.constants.SensitiveRedisContants;
|
||||
@@ -19,12 +21,22 @@ public class SensitiveServiceImpl implements SensitiveService{
|
||||
@Autowired
|
||||
private SensitiveMapper sensitiveMapper;
|
||||
|
||||
@Autowired
|
||||
private SensitiveWordHelper sensitiveWordHelper;
|
||||
|
||||
@Autowired
|
||||
private RedisService redisService;
|
||||
|
||||
@Override
|
||||
public ResultDomain<TbSensitive> page(PageRequest<TbSensitive> pageRequest) {
|
||||
ResultDomain<TbSensitive> resultDomain = new ResultDomain<>();
|
||||
List<TbSensitive> tbSensitiveList = sensitiveMapper.selectTbSensitivePage(pageRequest.getFilter(), pageRequest.getPageParam());
|
||||
int count = sensitiveMapper.countByFilter(pageRequest.getFilter());
|
||||
PageDomain<TbSensitive> pageDomain = new PageDomain<>();
|
||||
pageRequest.getPageParam().setTotalElements(count);
|
||||
pageDomain.setPageParam(pageRequest.getPageParam());
|
||||
pageDomain.setDataList(tbSensitiveList);
|
||||
resultDomain.success("查询成功", pageDomain);
|
||||
return resultDomain;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ResultDomain<Boolean> addSensitiveWord(TbSensitive tbSensitive) {
|
||||
ResultDomain<Boolean> resultDomain = new ResultDomain<>();
|
||||
@@ -59,6 +71,44 @@ public class SensitiveServiceImpl implements SensitiveService{
|
||||
return resultDomain;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ResultDomain<Boolean> changeSensitiveWordType(TbSensitive tbSensitive) {
|
||||
ResultDomain<Boolean> resultDomain = new ResultDomain<>();
|
||||
String word = tbSensitive.getWord();
|
||||
String newType = tbSensitive.getType();
|
||||
|
||||
// 目标Redis键
|
||||
String targetRedisKey = "allow".equals(newType) ? SensitiveRedisContants.SENSITIVE_WORD_ALLOW : SensitiveRedisContants.SENSITIVE_WORD_DENY;
|
||||
|
||||
// 检查目标类型中是否已存在该词
|
||||
if (redisService.sMembers(targetRedisKey).contains(word)) {
|
||||
resultDomain.fail("该敏感词在目标类型中已存在");
|
||||
return resultDomain;
|
||||
}
|
||||
|
||||
// 加锁
|
||||
synchronized (this) {
|
||||
// 再次检查
|
||||
if (redisService.sMembers(targetRedisKey).contains(word)) {
|
||||
resultDomain.fail("该敏感词在目标类型中已存在");
|
||||
return resultDomain;
|
||||
}
|
||||
|
||||
// 执行数据库更新
|
||||
int i = sensitiveMapper.changeWordType(tbSensitive);
|
||||
if (i > 0) {
|
||||
// 同步到Redis:从原类型中删除,添加到新类型中
|
||||
String oldRedisKey = "allow".equals(newType) ? SensitiveRedisContants.SENSITIVE_WORD_DENY : SensitiveRedisContants.SENSITIVE_WORD_ALLOW;
|
||||
redisService.sRemove(oldRedisKey, word);
|
||||
redisService.sAdd(targetRedisKey, word);
|
||||
resultDomain.success("修改敏感词类型成功", true);
|
||||
} else {
|
||||
resultDomain.fail("修改敏感词类型失败");
|
||||
}
|
||||
}
|
||||
return resultDomain;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ResultDomain<Boolean> deleteSensitiveWord(TbSensitive tbSensitive) {
|
||||
ResultDomain<Boolean> resultDomain = new ResultDomain<>();
|
||||
@@ -94,11 +144,15 @@ public class SensitiveServiceImpl implements SensitiveService{
|
||||
}
|
||||
|
||||
@Override
|
||||
public ResultDomain<Boolean> judgeSensitive(String text) {
|
||||
ResultDomain<Boolean> resultDomain = new ResultDomain<>();
|
||||
|
||||
boolean containsSensitive = sensitiveWordHelper.contains(text);
|
||||
resultDomain.success("敏感词检测完成", containsSensitive);
|
||||
public ResultDomain<String> judgeSensitive(String text) {
|
||||
ResultDomain<String> resultDomain = new ResultDomain<>();
|
||||
List<String> sensitiveWords = SensitiveWordHelper.findAll(text);
|
||||
// boolean containsSensitive = sensitiveWordHelper.contains(text);
|
||||
if (sensitiveWords.size() > 0) {
|
||||
resultDomain.fail(text, sensitiveWords);
|
||||
}else {
|
||||
resultDomain.success("不包含敏感词", text);
|
||||
}
|
||||
|
||||
return resultDomain;
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd">
|
||||
<mapper namespace="org.xyzh.sensitive.mapper.SensitiveMapper">
|
||||
<resultMap id="BaseMap" resultType="org.xyzh.common.dto.sensitive.TbSensitive">
|
||||
<resultMap id="BaseMap" type="org.xyzh.common.dto.sensitive.TbSensitive">
|
||||
<id column="id" property="id"/>
|
||||
<result column="word" property="word"/>
|
||||
<result column="type" property="type"/>
|
||||
@@ -14,20 +14,65 @@
|
||||
<select id="selectAll" resultMap="BaseMap">
|
||||
SELECT
|
||||
<include refid="Base_Column_List"/>
|
||||
FROM tb_sensitive
|
||||
FROM tb_sensitive_word
|
||||
</select>
|
||||
|
||||
<insert id="addSensitiveWord" parameterType="org.xyzh.common.dto.sensitive.TbSensitive" useGeneratedKeys="true" keyProperty="id" keyColumn="id">
|
||||
INSERT INTO tb_sensitive (word, type)
|
||||
INSERT INTO tb_sensitive_word (word, type)
|
||||
VALUES (#{word}, #{type})
|
||||
</insert>
|
||||
|
||||
|
||||
<!-- updateSensitiveWord -->
|
||||
|
||||
<update id="changeWordType">
|
||||
UPDATE tb_sensitive_word
|
||||
SET type = #{type}
|
||||
WHERE id = #{id}
|
||||
</update>
|
||||
|
||||
<delete id="deleteSensitiveWord" parameterType="org.xyzh.common.dto.sensitive.TbSensitive">
|
||||
DELETE FROM tb_sensitive
|
||||
DELETE FROM tb_sensitive_word
|
||||
WHERE word = #{word}
|
||||
<if test="type != null and type != ''">
|
||||
AND type = #{type}
|
||||
</if>
|
||||
</delete>
|
||||
|
||||
<!-- selectTbSensitivePage -->
|
||||
|
||||
<select id="selectTbSensitivePage" resultMap="BaseMap">
|
||||
SELECT
|
||||
<include refid="Base_Column_List"/>
|
||||
FROM tb_sensitive_word
|
||||
WHERE 1=1
|
||||
<if test="filter.id != null and filter.id != ''">
|
||||
AND id = #{filter.id}
|
||||
</if>
|
||||
<if test="filter.word != null and filter.word != ''">
|
||||
AND word LIKE CONCAT('%', #{filter.word}, '%')
|
||||
</if>
|
||||
<if test="filter.type != null and filter.type != ''">
|
||||
AND type = #{filter.type}
|
||||
</if>
|
||||
LIMIT #{pageParam.offset}, #{pageParam.pageSize}
|
||||
</select>
|
||||
|
||||
<!-- countByFilter -->
|
||||
|
||||
<select id="countByFilter" resultType="int">
|
||||
SELECT COUNT(*)
|
||||
FROM tb_sensitive_word
|
||||
WHERE 1=1
|
||||
<if test="filter.id != null and filter.id != ''">
|
||||
AND id = #{filter.id}
|
||||
</if>
|
||||
<if test="filter.word != null and filter.word != ''">
|
||||
AND word LIKE CONCAT('%', #{filter.word}, '%')
|
||||
</if>
|
||||
<if test="filter.type != null and filter.type != ''">
|
||||
AND type = #{filter.type}
|
||||
</if>
|
||||
</select>
|
||||
|
||||
</mapper>
|
||||
@@ -1,12 +0,0 @@
|
||||
生日快乐
|
||||
曹操
|
||||
幸运
|
||||
幸运儿
|
||||
17年前
|
||||
1条
|
||||
1梯两户
|
||||
1比1
|
||||
年检
|
||||
幸存
|
||||
恶搞
|
||||
游戏机
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,12 +0,0 @@
|
||||
fuck
|
||||
duck
|
||||
shit
|
||||
chicken
|
||||
fowl
|
||||
sex
|
||||
sexy
|
||||
prostitute
|
||||
whore
|
||||
harlot
|
||||
hooker
|
||||
gender
|
||||
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user