SensitivewordEngine.java 敏感词过滤工具类
package keyFilter;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
/**
* @ClassName:SensitivewordEngine
* @Description:敏感词过滤工具类
* @Author:wuke.hwk
* @Date:2018/11/20
*/
public class SensitivewordEngine
{
/**
* 敏感词库
*/
public static Map sensitiveWordMap = null;
/**
* 只过滤最小敏感词
*/
public static int minMatchTYpe = 1;
/**
* 过滤所有敏感词
*/
public static int maxMatchType = 2;
/**
* 敏感词库敏感词数量
*
* @return
*/
public static int getWordSize()
{
if (SensitivewordEngine.sensitiveWordMap == null)
{
return 0;
}
return SensitivewordEngine.sensitiveWordMap.size();
}
/**
* 是否包含敏感词
*
* @param txt
* @param matchType
* @return
*/
public static boolean isContaintSensitiveWord(String txt, int matchType)
{
boolean flag = false;
for (int i = 0; i < txt.length(); i++)
{
int matchFlag = checkSensitiveWord(txt, i, matchType);
if (matchFlag > 0)
{
flag = true;
}
}
return flag;
}
/**
* 获取敏感词内容
*
* @param txt
* @param matchType
* @return 敏感词内容
*/
public static Set<String> getSensitiveWord(String txt, int matchType)
{
Set<String> sensitiveWordList = new HashSet<String>();
for (int i = 0; i < txt.length(); i++)
{
int length = checkSensitiveWord(txt, i, matchType);
if (length > 0)
{
// 将检测出的敏感词保存到集合中
sensitiveWordList.add(txt.substring(i, i + length));
i = i + length - 1;
}
}
return sensitiveWordList;
}
/**
* 替换敏感词
*
* @param txt
* @param matchType
* @param replaceChar
* @return
*/
public static String replaceSensitiveWord(String txt, int matchType, String replaceChar)
{
String resultTxt = txt;
Set<String> set = getSensitiveWord(txt, matchType);
Iterator<String> iterator = set.iterator();
String word = null;
String replaceString = null;
while (iterator.hasNext())
{
word = iterator.next();
replaceString = getReplaceChars(replaceChar, word.length());
resultTxt = resultTxt.replaceAll(word, replaceString);
}
return resultTxt;
}
/**
* 替换敏感词内容
*
* @param replaceChar
* @param length
* @return
*/
private static String getReplaceChars(String replaceChar, int length)
{
String resultReplace = replaceChar;
for (int i = 1; i < length; i++)
{
resultReplace += replaceChar;
}
return resultReplace;
}
/**
* 检查敏感词数量
*
* @param txt
* @param beginIndex
* @param matchType
* @return
*/
public static int checkSensitiveWord(String txt, int beginIndex, int matchType)
{
boolean flag = false;
// 记录敏感词数量
int matchFlag = 0;
char word = 0;
Map nowMap = SensitivewordEngine.sensitiveWordMap;
for (int i = beginIndex; i < txt.length(); i++)
{
word = txt.charAt(i);
// 判断该字是否存在于敏感词库中
nowMap = (Map) nowMap.get(word);
if (nowMap != null)
{
matchFlag++;
// 判断是否是敏感词的结尾字,如果是结尾字则判断是否继续检测
if ("1".equals(nowMap.get("isEnd")))
{
flag = true;
// 判断过滤类型,如果是小过滤则跳出循环,否则继续循环
if (SensitivewordEngine.minMatchTYpe == matchType)
{
break;
}
}
}
else
{
break;
}
}
if (!flag)
{
matchFlag = 0;
}
return matchFlag;
}
}
SensitiveWordInit.java 敏感词库初始化
package keyFilter;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* @ClassName:SensitiveWordInit
* @Description:敏感词库初始化
* @Author:wuke.hwk
* @Date:2018/11/20
*/
public class SensitiveWordInit
{
/**
* 敏感词库
*/
public HashMap sensitiveWordMap;
/**
* 初始化敏感词
*
* @return
*/
public Map initKeyWord(List<String> sensitiveWords)
{
try
{
// 从敏感词集合对象中取出敏感词并封装到Set集合中
Set<String> keyWordSet = new HashSet<String>();
for (String s : sensitiveWords)
{
keyWordSet.add(s.trim());
}
// 将敏感词库加入到HashMap中
addSensitiveWordToHashMap(keyWordSet);
}
catch (Exception e)
{
e.printStackTrace();
}
return sensitiveWordMap;
}
/**
* 封装敏感词库
*
* @param keyWordSet
*/
@SuppressWarnings("rawtypes")
private void addSensitiveWordToHashMap(Set<String> keyWordSet)
{
// 初始化HashMap对象并控制容器的大小
sensitiveWordMap = new HashMap(keyWordSet.size());
// 敏感词
String key = null;
// 用来按照相应的格式保存敏感词库数据
Map nowMap = null;
// 用来辅助构建敏感词库
Map<String, String> newWorMap = null;
// 使用一个迭代器来循环敏感词集合
Iterator<String> iterator = keyWordSet.iterator();
while (iterator.hasNext())
{
key = iterator.next();
// 等于敏感词库,HashMap对象在内存中占用的是同一个地址,所以此nowMap对象的变化,sensitiveWordMap对象也会跟着改变
nowMap = sensitiveWordMap;
for (int i = 0; i < key.length(); i++)
{
// 截取敏感词当中的字,在敏感词库中字为HashMap对象的Key键值
char keyChar = key.charAt(i);
// 判断这个字是否存在于敏感词库中
Object wordMap = nowMap.get(keyChar);
if (wordMap != null)
{
nowMap = (Map) wordMap;
}
else
{
newWorMap = new HashMap<String, String>();
newWorMap.put("isEnd", "0");
nowMap.put(keyChar, newWorMap);
nowMap = newWorMap;
}
// 如果该字是当前敏感词的最后一个字,则标识为结尾字
if (i == key.length() - 1)
{
nowMap.put("isEnd", "1");
}
System.out.println("封装敏感词库过程:"+sensitiveWordMap);
}
System.out.println("查看敏感词库数据:" + sensitiveWordMap);
}
}
}
Test.java 测试
package keyFilter;
import com.alibaba.fastjson.JSON;
import textFilter.WordFilter;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* @Description:
* @Author:wuke.hwk
* @Date:2018/11/19
*/
public class Test {
public static void main(String[] args) {
sensitiveWordFiltering("中国");
sensitiveWordFiltering("大");
}
public static Set<String> sensitiveWordFiltering(String text)
{
// 初始化敏感词库对象
SensitiveWordInit sensitiveWordInit = new SensitiveWordInit();
// 从数据库中获取敏感词对象集合(调用的方法来自Dao层,此方法是service层的实现类)
List<String> sensitiveWords = new ArrayList<String>();
for(int i=0;i<100000;i++) {
String str="";
for(int z=0;z<5;z++){
str= str+(char) (Math.random ()*26+'a');
}
sensitiveWords.add(str);
}
sensitiveWords.add("acc");
sensitiveWords.add("w何北");
sensitiveWords.add("中国");
sensitiveWords.add("大");
sensitiveWords.add("王在");
// 构建敏感词库
Map sensitiveWordMap = sensitiveWordInit.initKeyWord(sensitiveWords);
// 传入SensitivewordEngine类中的敏感词库
SensitivewordEngine.sensitiveWordMap = sensitiveWordMap;
// 得到敏感词有哪些,传入2表示获取所有敏感词
long startTime=System.currentTimeMillis();
Set<String> set = SensitivewordEngine.getSensitiveWord(text, 2);
long endTime=System.currentTimeMillis();
System.out.println("filterTime:"+(endTime-startTime)+"|result:["+JSON.toJSONString(set)+"]");
startTime=System.currentTimeMillis();
boolean a=sensitiveWords.contains(text);
endTime=System.currentTimeMillis();
System.out.println("contentTime:"+(endTime-startTime)+"|result:["+a+"]");
WordFilter.init();
startTime=System.currentTimeMillis();
String r=WordFilter.doFilter(text);
endTime=System.currentTimeMillis();
System.out.println("wordFilter:"+(endTime-startTime)+"|result:["+JSON.toJSONString(r)+"]");
return set;
}
}