Spring Boot Integration with Redis for Search History, Hot Search, and Sensitive Word Filtering
This article demonstrates how to integrate Spring Boot with Redis to implement personal search‑history storage, a hot‑search ranking feature, and a DFA‑based sensitive‑word filtering mechanism, providing complete Maven dependencies, configuration, utility classes, service logic, and controller endpoints.
This guide explains how to use Spring Boot together with Redis to build three related features: personal search‑history recording, a hot‑search ranking system, and a sensitive‑word filter based on the DFA algorithm.
Maven dependencies required for the project are:
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-web</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-test</artifactId>
<scope>test</scope>
</dependency>
<!-- Redis data starter -->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-data-redis</artifactId>
<version>2.7.0</version>
</dependency>
<!-- Apache Commons Lang3 -->
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
<version>3.12.0</version>
</dependency>The application.yml file configures the Redis connection:
spring:
redis:
# database index
database: 0
host: 192.168.31.28
port: 6379
password: 123456
lettuce:
pool:
# max connections
max-active: 8
# max wait time (negative means unlimited)
max-wait: -1
# max idle
max-idle: 8
# min idle
min-idle: 0
timeout: 10000Sensitive‑word filtering is implemented with a DFA (deterministic finite automaton). The SensitiveWordInit class reads a word list (e.g., resources/static/word.txt ) and builds a nested Map structure where each character points to the next node and an isEnd flag marks the end of a word.
/**
* @author shawn
* @version 1.0
* @ClassName SensitiveWordInit
* Description: Initialize the sensitive‑word map using DFA.
*/
@Configuration
@SuppressWarnings({"rawtypes", "unchecked"})
public class SensitiveWordInit {
private String ENCODING = "UTF-8";
public Map initKeyWord() throws IOException {
Set
wordSet = readSensitiveWordFile();
return addSensitiveWordToHashMap(wordSet);
}
private Set
readSensitiveWordFile() throws IOException {
Set
wordSet = null;
ClassPathResource classPathResource = new ClassPathResource("static/word.txt");
InputStream inputStream = classPathResource.getInputStream();
try {
InputStreamReader read = new InputStreamReader(inputStream, ENCODING);
wordSet = new HashSet<>();
BufferedReader br = new BufferedReader(read);
String txt = null;
while ((txt = br.readLine()) != null) {
wordSet.add(txt);
}
br.close();
read.close();
} catch (Exception e) {
e.printStackTrace();
}
return wordSet;
}
private Map addSensitiveWordToHashMap(Set
wordSet) {
Map wordMap = new HashMap(wordSet.size());
for (String word : wordSet) {
Map nowMap = wordMap;
for (int i = 0; i < word.length(); i++) {
char keyChar = word.charAt(i);
Object tempMap = nowMap.get(keyChar);
if (tempMap != null) {
nowMap = (Map) tempMap;
} else {
Map
newMap = new HashMap<>();
newMap.put("isEnd", "0");
nowMap.put(keyChar, newMap);
nowMap = newMap;
}
if (i == word.length() - 1) {
nowMap.put("isEnd", "1");
}
}
}
return wordMap;
}
}The SensitiveFilter component uses the map to detect, extract, and replace sensitive words. It supports both minimum and maximum match rules and allows a custom placeholder (default "**" ).
/**
* @author shawn
* @ClassName SensitiveFilter
* Description: DFA‑based sensitive‑word filter.
*/
@Component
public class SensitiveFilter {
private Map sensitiveWordMap = null;
public static int minMatchType = 1;
public static int maxMatchType = 2;
public static String placeHolder = "**";
private static SensitiveFilter instance = null;
private SensitiveFilter() throws IOException {
sensitiveWordMap = new SensitiveWordInit().initKeyWord();
}
public static SensitiveFilter getInstance() throws IOException {
if (instance == null) {
instance = new SensitiveFilter();
}
return instance;
}
public Set
getSensitiveWord(String txt, int matchType) {
Set
set = new HashSet<>();
for (int i = 0; i < txt.length(); i++) {
int length = CheckSensitiveWord(txt, i, matchType);
if (length > 0) {
set.add(txt.substring(i, i + length));
i = i + length - 1;
}
}
return set;
}
public String replaceSensitiveWord(String txt) {
return replaceSensitiveWord(txt, minMatchType, placeHolder);
}
public String replaceSensitiveWord(String txt, int matchType, String replaceChar) {
String resultTxt = txt;
Set
set = getSensitiveWord(txt, matchType);
for (String word : set) {
String replaceString = getReplaceChars(replaceChar, word.length());
resultTxt = resultTxt.replaceAll(word, replaceString);
}
return resultTxt;
}
private String getReplaceChars(String replaceChar, int length) {
StringBuilder sb = new StringBuilder(replaceChar);
for (int i = 1; i < length; i++) {
sb.append(replaceChar);
}
return sb.toString();
}
public int CheckSensitiveWord(String txt, int beginIndex, int matchType) {
boolean flag = false;
int matchFlag = 0;
Map nowMap = sensitiveWordMap;
for (int i = beginIndex; i < txt.length(); i++) {
char word = txt.charAt(i);
nowMap = (Map) nowMap.get(word);
if (nowMap != null) {
matchFlag++;
if ("1".equals(nowMap.get("isEnd"))) {
flag = true;
if (minMatchType == matchType) {
break;
}
}
} else {
break;
}
}
if ((maxMatchType == matchType || minMatchType == matchType) && (matchFlag < 2 || !flag)) {
matchFlag = 0;
}
return matchFlag;
}
}A small utility class RedisKeyUtils centralises the naming of Redis keys for search history, hot‑search sets, and timestamp storage.
public class RedisKeyUtils {
private static final String SPLIT = ":";
private static final String SEARCH = "search";
private static final String SEARCH_HISTORY = "search-history";
private static final String HOT_SEARCH = "hot-search";
private static final String SEARCH_TIME = "search-time";
public static String getSearchHistoryKey(String userId) {
return SEARCH + SPLIT + SEARCH_HISTORY + SPLIT + userId;
}
public static String getHotSearchKey() {
return SEARCH + SPLIT + HOT_SEARCH;
}
public static String getSearchTimeKey(String searchKey) {
return SEARCH + SPLIT + SEARCH_TIME + SPLIT + searchKey;
}
}The RedisService class implements the core business logic: adding/deleting/querying a user’s search history, retrieving the top hot‑search terms (limited to the most recent month), and incrementing the score of a hot‑search keyword.
@Service("redisService")
public class RedisService {
private Logger logger = LoggerFactory.getLogger(RedisService.class);
private static final Integer HOT_SEARCH_NUMBER = 9;
private static final Long HOT_SEARCH_TIME = 30L * 24 * 60 * 60;
@Resource
private StringRedisTemplate redisSearchTemplate;
// methods: addSearchHistoryByUserId, delSearchHistoryByUserId, getSearchHistoryByUserId,
// getHotList, incrementScoreByUserId, incrementScore (omitted for brevity)
}Finally, a SearchHistoryController exposes REST endpoints for the front‑end to add a search record, delete it, fetch a user’s history, and obtain the hot‑search list.
@RestController
public class SearchHistoryController {
@Autowired
RedisService redisService;
@GetMapping("/add")
public String addSearchHistoryByUserId(String userId, String searchKey) {
redisService.addSearchHistoryByUserId(userId, searchKey);
redisService.incrementScore(searchKey);
return null;
}
@GetMapping("/del")
public Long delSearchHistoryByUserId(String userId, String searchKey) {
return redisService.delSearchHistoryByUserId(userId, searchKey);
}
@GetMapping("/getUser")
public List
getSearchHistoryByUserId(String userId) {
return redisService.getSearchHistoryByUserId(userId);
}
@GetMapping("/getHot")
public List
getHotList(String searchKey) {
return redisService.getHotList(searchKey);
}
}Architect's Guide
Dedicated to sharing programmer-architect skills—Java backend, system, microservice, and distributed architectures—to help you become a senior architect.
How this landed with the community
Was this worth your time?
0 Comments
Thoughtful readers leave field notes, pushback, and hard-won operational detail here.