Redis不仅是缓存工具,更是高性能数据结构服务器。下面我将从基础到高级,详细解析Redis在实际业务中的应用场景。
一、基础数据结构的业务应用
1. String(字符串)场景
缓存用户会话
java
// 存储用户信息(带过期时间)
public void cacheUserSession(String userId, User user, int expireSeconds) {
String key = "session:" + userId;
String value = JSON.toJSONString(user);
// 设置值和过期时间
redisTemplate.opsForValue().set(key, value, expireSeconds, TimeUnit.SECONDS);
}
// 获取用户会话(支持自动续期)
public User getUserSession(String userId) {
String key = "session:" + userId;
String json = redisTemplate.opsForValue().get(key);
if (json != null) {
// 续期
redisTemplate.expire(key, 30, TimeUnit.MINUTES);
return JSON.parseObject(json, User.class);
}
return null;
}
分布式锁(基础版)
java
public class SimpleDistributedLock {
private RedisTemplate<String, String> redisTemplate;
public boolean tryLock(String lockKey, String requestId, int expireSeconds) {
return Boolean.TRUE.equals(redisTemplate.opsForValue()
.setIfAbsent(lockKey, requestId, expireSeconds, TimeUnit.SECONDS));
}
public boolean unlock(String lockKey, String requestId) {
String script =
"if redis.call('get', KEYS[1]) == ARGV[1] then " +
" return redis.call('del', KEYS[1]) " +
"else " +
" return 0 " +
"end";
Long result = redisTemplate.execute(
new DefaultRedisScript<>(script, Long.class),
Collections.singletonList(lockKey),
requestId
);
return result != null && result > 0;
}
}
2. Hash(哈希)场景
购物车实现
java
@Component
public class ShoppingCartService {
@Autowired
private RedisTemplate<String, String> redisTemplate;
// 添加商品到购物车
public void addToCart(String userId, String productId, int quantity) {
String key = "cart:" + userId;
redisTemplate.opsForHash().increment(key, productId, quantity);
// 设置购物车过期时间(7天)
redisTemplate.expire(key, 7, TimeUnit.DAYS);
}
// 获取购物车商品数量
public Map<String, Integer> getCart(String userId) {
String key = "cart:" + userId;
Map<Object, Object> entries = redisTemplate.opsForHash().entries(key);
return entries.entrySet().stream()
.collect(Collectors.toMap(
e -> (String) e.getKey(),
e -> Integer.parseInt((String) e.getValue())
));
}
// 批量删除购物车商品
public void removeItems(String userId, List<String> productIds) {
String key = "cart:" + userId;
Object[] array = productIds.toArray();
redisTemplate.opsForHash().delete(key, array);
}
}
对象缓存(存储用户详情)
java
public class UserCacheService {
public void cacheUserDetail(User user) {
String key = "user:detail:" + user.getId();
Map<String, String> hash = new HashMap<>();
hash.put("name", user.getName());
hash.put("email", user.getEmail());
hash.put("phone", user.getPhone());
hash.put("avatar", user.getAvatar());
hash.put("updatedAt", String.valueOf(System.currentTimeMillis()));
redisTemplate.opsForHash().putAll(key, hash);
redisTemplate.expire(key, 1, TimeUnit.HOURS);
}
// 部分更新用户信息
public void updateUserField(String userId, String field, String value) {
String key = "user:detail:" + userId;
redisTemplate.opsForHash().put(key, field, value);
redisTemplate.opsForHash().put(key, "updatedAt", String.valueOf(System.currentTimeMillis()));
}
}
3. List(列表)场景
消息队列(简易版)
java
public class SimpleMessageQueue {
@Autowired
private RedisTemplate<String, String> redisTemplate;
// 发送消息到队列
public void sendMessage(String queue, String message) {
redisTemplate.opsForList().leftPush(queue, message);
// 限制队列长度,防止内存溢出
redisTemplate.opsForList().trim(queue, 0, 9999);
}
// 消费消息(阻塞式)
public String consumeMessage(String queue, long timeout) {
return redisTemplate.opsForList().rightPop(queue, timeout, TimeUnit.SECONDS);
}
// 批量发送消息
public void batchSend(String queue, List<String> messages) {
redisTemplate.executePipelined((RedisCallback<Object>) connection -> {
for (String message : messages) {
connection.lPush(queue.getBytes(), message.getBytes());
}
return null;
});
}
}
最近浏览记录
java
public class BrowseHistoryService {
private static final int MAX_HISTORY = 50;
public void addBrowseHistory(String userId, String productId) {
String key = "browse:history:" + userId;
// 先删除已存在的记录,再添加到最前面
redisTemplate.execute(new RedisCallback<Object>() {
@Override
public Object doInRedis(RedisConnection connection) {
connection.lRem(key.getBytes(), 0, productId.getBytes());
connection.lPush(key.getBytes(), productId.getBytes());
connection.lTrim(key.getBytes(), 0, MAX_HISTORY - 1);
connection.expire(key.getBytes(), 30L * 24 * 60 * 60); // 30天
return null;
}
});
}
public List<String> getBrowseHistory(String userId, int count) {
String key = "browse:history:" + userId;
return redisTemplate.opsForList().range(key, 0, count - 1);
}
}
4. Set(集合)场景
用户标签系统
java
public class UserTagService {
// 为用户打标签
public void tagUser(String userId, String tag) {
String key = "user:tags:" + userId;
redisTemplate.opsForSet().add(key, tag);
}
// 为用户批量打标签
public void tagUser(String userId, Set<String> tags) {
String key = "user:tags:" + userId;
redisTemplate.opsForSet().add(key, tags.toArray(new String[0]));
}
// 获取两个用户的共同标签
public Set<String> getCommonTags(String userId1, String userId2) {
String key1 = "user:tags:" + userId1;
String key2 = "user:tags:" + userId2;
return redisTemplate.opsForSet().intersect(key1, key2);
}
// 获取拥有指定标签的所有用户
public Set<String> getUsersByTag(String tag) {
String key = "tag:users:" + tag;
return redisTemplate.opsForSet().members(key);
// 注意:需要在为用户打标签时同时维护这个反向索引
// redisTemplate.opsForSet().add("tag:users:" + tag, userId);
}
}
抽奖系统
java
public class LotteryService {
// 添加参与者
public void addParticipant(String lotteryId, String userId) {
String key = "lottery:participants:" + lotteryId;
redisTemplate.opsForSet().add(key, userId);
}
// 随机抽取中奖者
public List<String> drawWinners(String lotteryId, int winnerCount) {
String key = "lottery:participants:" + lotteryId;
// 方式1:随机抽取(可能重复)
// redisTemplate.opsForSet().randomMembers(key, winnerCount);
// 方式2:随机抽取不重复
return redisTemplate.opsForSet().distinctRandomMembers(key, winnerCount);
}
// 检查用户是否已参与
public boolean hasParticipated(String lotteryId, String userId) {
String key = "lottery:participants:" + lotteryId;
return Boolean.TRUE.equals(redisTemplate.opsForSet().isMember(key, userId));
}
}
5. Sorted Set(有序集合)场景
实时排行榜
java
@Slf4j
@Service
public class RealTimeRankingService {
// 更新用户分数
public void updateScore(String rankingKey, String userId, double score) {
redisTemplate.opsForZSet().add(rankingKey, userId, score);
// 保留前1000名,避免内存无限增长
redisTemplate.opsForZSet().removeRange(rankingKey, 1000, -1);
}
// 增加用户分数
public void incrementScore(String rankingKey, String userId, double delta) {
redisTemplate.opsForZSet().incrementScore(rankingKey, userId, delta);
}
// 获取用户排名(从0开始)
public Long getUserRank(String rankingKey, String userId, boolean desc) {
if (desc) {
// 降序排名(分数高的排前面)
return redisTemplate.opsForZSet().reverseRank(rankingKey, userId);
} else {
return redisTemplate.opsForZSet().rank(rankingKey, userId);
}
}
// 获取排行榜前N名
public List<RankingItem> getTopN(String rankingKey, int n) {
Set<ZSetOperations.TypedTuple<String>> topSet =
redisTemplate.opsForZSet().reverseRangeWithScores(rankingKey, 0, n - 1);
return Optional.ofNullable(topSet)
.orElse(Collections.emptySet())
.stream()
.map(tuple -> new RankingItem(tuple.getValue(), tuple.getScore()))
.collect(Collectors.toList());
}
// 分段获取排行榜(分页)
public List<RankingItem> getRankingRange(String rankingKey, int start, int end, boolean desc) {
Set<ZSetOperations.TypedTuple<String>> rangeSet;
if (desc) {
rangeSet = redisTemplate.opsForZSet().reverseRangeWithScores(rankingKey, start, end);
} else {
rangeSet = redisTemplate.opsForZSet().rangeWithScores(rankingKey, start, end);
}
return Optional.ofNullable(rangeSet)
.orElse(Collections.emptySet())
.stream()
.map(tuple -> new RankingItem(tuple.getValue(), tuple.getScore()))
.collect(Collectors.toList());
}
// 获取用户分数附近的排名(社交场景)
public List<RankingItem> getNearbyRanking(String rankingKey, String userId, int range) {
Long rank = getUserRank(rankingKey, userId, true);
if (rank == null) return Collections.emptyList();
long start = Math.max(0, rank - range);
long end = rank + range;
return getRankingRange(rankingKey, (int)start, (int)end, true);
}
@Data
@AllArgsConstructor
public static class RankingItem {
private String userId;
private Double score;
}
}
延迟队列
java
public class DelayQueueService {
// 添加延迟任务
public void addDelayTask(String queueKey, String taskId, String data, long delaySeconds) {
double score = System.currentTimeMillis() + delaySeconds * 1000;
redisTemplate.opsForZSet().add(queueKey, taskId + ":" + data, score);
}
// 处理到期任务
public void processDelayTasks(String queueKey) {
// 获取当前时间之前的所有任务
Set<ZSetOperations.TypedTuple<String>> tasks =
redisTemplate.opsForZSet().rangeByScoreWithScores(queueKey, 0, System.currentTimeMillis());
if (tasks == null || tasks.isEmpty()) return;
for (ZSetOperations.TypedTuple<String> tuple : tasks) {
String value = tuple.getValue();
String[] parts = value.split(":", 2);
String taskId = parts[0];
String data = parts.length > 1 ? parts[1] : "";
try {
// 处理任务
processTask(taskId, data);
// 从队列中移除
redisTemplate.opsForZSet().remove(queueKey, value);
} catch (Exception e) {
log.error("处理延迟任务失败: {}", taskId, e);
// 可以重试或移到死信队列
}
}
}
// 分布式场景下的延迟任务处理(防止多个实例重复处理)
public void processDelayTasksDistributed(String queueKey, String instanceId) {
String lockKey = "lock:" + queueKey;
// 获取分布式锁
if (tryLock(lockKey, instanceId, 30)) {
try {
processDelayTasks(queueKey);
} finally {
unlock(lockKey, instanceId);
}
}
}
}
二、高级数据结构与功能应用
1. BitMap(位图)高级应用
用户签到系统
java
public class CheckInSystem {
// 用户签到
public boolean checkIn(String userId, LocalDate date) {
String key = getCheckInKey(userId, date.getYear(), date.getMonthValue());
int dayOfMonth = date.getDayOfMonth();
// 设置签到位
return Boolean.TRUE.equals(
redisTemplate.opsForValue().setBit(key, dayOfMonth - 1, true)
);
}
// 检查是否已签到
public boolean isCheckedIn(String userId, LocalDate date) {
String key = getCheckInKey(userId, date.getYear(), date.getMonthValue());
int dayOfMonth = date.getDayOfMonth();
return Boolean.TRUE.equals(
redisTemplate.opsForValue().getBit(key, dayOfMonth - 1)
);
}
// 获取连续签到天数
public int getContinuousCheckInDays(String userId, LocalDate endDate) {
int days = 0;
LocalDate current = endDate;
while (days < 31) { // 最多检查31天
if (isCheckedIn(userId, current)) {
days++;
current = current.minusDays(1);
} else {
break;
}
}
return days;
}
// 统计月签到天数
public int getMonthlyCheckInCount(String userId, int year, int month) {
String key = getCheckInKey(userId, year, month);
// 使用bitcount命令统计签到天数
Long count = redisTemplate.execute((RedisCallback<Long>) connection ->
connection.bitCount(key.getBytes())
);
return count != null ? count.intValue() : 0;
}
// 获取当月签到详情
public List<Integer> getMonthlyCheckInDetails(String userId, int year, int month) {
String key = getCheckInKey(userId, year, month);
List<Integer> checkedDays = new ArrayList<>();
// 获取位图数据
byte[] bitmap = redisTemplate.execute((RedisCallback<byte[]>) connection ->
connection.get(key.getBytes())
);
if (bitmap != null) {
for (int i = 0; i < bitmap.length * 8; i++) {
int byteIndex = i / 8;
int bitIndex = i % 8;
if ((bitmap[byteIndex] & (1 << bitIndex)) != 0) {
checkedDays.add(i + 1); // 日期从1开始
}
}
}
return checkedDays;
}
private String getCheckInKey(String userId, int year, int month) {
return String.format("checkin:%s:%d-%02d", userId, year, month);
}
}
活跃用户统计
java
public class ActiveUserStatistics {
// 记录用户活跃(日活跃)
public void recordActive(String userId, LocalDate date) {
String key = getDailyActiveKey(date);
int userIdHash = Math.abs(userId.hashCode()) % 100000000; // 1亿用户空间
redisTemplate.opsForValue().setBit(key, userIdHash, true);
redisTemplate.expire(key, 30, TimeUnit.DAYS); // 保留30天
}
// 获取日活跃用户数
public long getDailyActiveCount(LocalDate date) {
String key = getDailyActiveKey(date);
return redisTemplate.execute((RedisCallback<Long>) connection ->
connection.bitCount(key.getBytes())
);
}
// 获取周活跃用户数(OR操作)
public long getWeeklyActiveCount(LocalDate endDate) {
LocalDate startDate = endDate.minusDays(6);
List<byte[]> keys = new ArrayList<>();
// 收集7天的key
for (LocalDate date = startDate; !date.isAfter(endDate); date = date.plusDays(1)) {
String key = getDailyActiveKey(date);
keys.add(key.getBytes());
}
// 执行BITOP OR操作
String destKey = "weekly:active:" + endDate.toString();
Long result = redisTemplate.execute((RedisCallback<Long>) connection ->
connection.bitOp(RedisStringCommands.BitOperation.OR,
destKey.getBytes(),
keys.toArray(new byte[0][])
)
);
if (result != null) {
// 统计合并后的活跃用户数
Long count = redisTemplate.execute((RedisCallback<Long>) conn ->
conn.bitCount(destKey.getBytes())
);
// 删除临时key
redisTemplate.delete(destKey);
return count != null ? count : 0;
}
return 0;
}
// 获取留存用户数(AND操作)
public long getRetentionCount(LocalDate date1, LocalDate date2) {
String key1 = getDailyActiveKey(date1);
String key2 = getDailyActiveKey(date2);
String destKey = "retention:" + date1 + ":" + date2;
// 执行BITOP AND操作
Long result = redisTemplate.execute((RedisCallback<Long>) connection ->
connection.bitOp(RedisStringCommands.BitOperation.AND,
destKey.getBytes(),
key1.getBytes(),
key2.getBytes()
)
);
if (result != null) {
Long count = redisTemplate.execute((RedisCallback<Long>) conn ->
conn.bitCount(destKey.getBytes())
);
redisTemplate.delete(destKey);
return count != null ? count : 0;
}
return 0;
}
private String getDailyActiveKey(LocalDate date) {
return "active:daily:" + date.toString();
}
}
2. HyperLogLog 应用
UV统计(独立访客)
java
public class UVStatisticsService {
// 记录访问(日UV)
public void recordVisit(String pageId, String userId, LocalDate date) {
String key = getDailyUVKey(pageId, date);
redisTemplate.opsForHyperLogLog().add(key, userId);
// 设置过期时间
redisTemplate.expire(key, 31, TimeUnit.DAYS);
}
// 获取日UV
public Long getDailyUV(String pageId, LocalDate date) {
String key = getDailyUVKey(pageId, date);
return redisTemplate.opsForHyperLogLog().size(key);
}
// 获取周UV(合并统计)
public Long getWeeklyUV(String pageId, LocalDate endDate) {
LocalDate startDate = endDate.minusDays(6);
List<String> keys = new ArrayList<>();
for (LocalDate date = startDate; !date.isAfter(endDate); date = date.plusDays(1)) {
keys.add(getDailyUVKey(pageId, date));
}
String destKey = "weekly:uv:" + pageId + ":" + endDate;
// 合并周数据
Long result = redisTemplate.opsForHyperLogLog().union(destKey, keys.toArray(new String[0]));
if (result != null) {
Long count = redisTemplate.opsForHyperLogLog().size(destKey);
redisTemplate.delete(destKey);
return count;
}
return 0L;
}
// 实时UV统计(滑动窗口,小时级)
public Long getHourlyUV(String pageId) {
String hourKey = getHourlyUVKey(pageId, LocalDateTime.now());
String prevHourKey = getHourlyUVKey(pageId, LocalDateTime.now().minusHours(1));
// 合并最近两小时数据(滑动窗口)
String destKey = "sliding:uv:" + pageId;
redisTemplate.opsForHyperLogLog().union(destKey, hourKey, prevHourKey);
Long count = redisTemplate.opsForHyperLogLog().size(destKey);
redisTemplate.delete(destKey);
return count;
}
private String getDailyUVKey(String pageId, LocalDate date) {
return String.format("uv:daily:%s:%s", pageId, date.toString());
}
private String getHourlyUVKey(String pageId, LocalDateTime datetime) {
return String.format("uv:hourly:%s:%s", pageId,
datetime.format(DateTimeFormatter.ofPattern("yyyyMMddHH")));
}
}
3. Geo(地理位置)应用
附近的人/商家
java
@Slf4j
@Service
public class NearbyService {
@Autowired
private RedisTemplate<String, String> redisTemplate;
// 添加位置
public void addLocation(String userId, double longitude, double latitude) {
String key = "user:locations";
redisTemplate.opsForGeo().add(key, new Point(longitude, latitude), userId);
}
// 更新位置
public void updateLocation(String userId, double longitude, double latitude) {
addLocation(userId, longitude, latitude); // add会自动更新
}
// 查找附近的人
public List<NearbyUser> findNearbyUsers(String userId, double radiusKm, int limit) {
String key = "user:locations";
// 先获取当前用户位置
List<Point> points = redisTemplate.opsForGeo().position(key, userId);
if (points == null || points.isEmpty()) {
return Collections.emptyList();
}
Point currentPoint = points.get(0);
// 查找附近的人
Circle within = new Circle(currentPoint, new Distance(radiusKm, Metrics.KILOMETERS));
GeoResults<RedisGeoCommands.GeoLocation<String>> results =
redisTemplate.opsForGeo().radius(key, within,
RedisGeoCommands.GeoRadiusCommandArgs.newGeoRadiusArgs()
.includeDistance()
.sortAscending()
.limit(limit));
return results.getContent().stream()
.filter(geoResult -> !geoResult.getContent().getName().equals(userId))
.map(geoResult -> {
RedisGeoCommands.GeoLocation<String> location = geoResult.getContent();
Distance distance = geoResult.getDistance();
return new NearbyUser(
location.getName(),
location.getPoint().getX(),
location.getPoint().getY(),
distance.getValue()
);
})
.collect(Collectors.toList());
}
// 计算两个用户距离
public Double getDistance(String userId1, String userId2) {
String key = "user:locations";
Distance distance = redisTemplate.opsForGeo().distance(key, userId1, userId2, Metrics.KILOMETERS);
return distance != null ? distance.getValue() : null;
}
// 获取地理位置hash(用于前端地图展示)
public String getGeoHash(String userId) {
String key = "user:locations";
List<String> hash = redisTemplate.opsForGeo().hash(key, userId);
return hash != null && !hash.isEmpty() ? hash.get(0) : null;
}
// 批量导入商家位置
public void batchAddMerchants(List<Merchant> merchants) {
String key = "merchant:locations";
Map<String, Point> merchantLocations = merchants.stream()
.collect(Collectors.toMap(
Merchant::getId,
m -> new Point(m.getLongitude(), m.getLatitude())
));
redisTemplate.opsForGeo().add(key, merchantLocations);
}
// 查找附近的商家(带过滤条件)
public List<Merchant> findNearbyMerchants(double longitude, double latitude,
double radiusKm, String category) {
String key = "merchant:locations";
Point center = new Point(longitude, latitude);
Circle within = new Circle(center, new Distance(radiusKm, Metrics.KILOMETERS));
// 先获取附近所有商家
GeoResults<RedisGeoCommands.GeoLocation<String>> results =
redisTemplate.opsForGeo().radius(key, within);
// 如果不需要过滤,直接返回
if (category == null) {
return convertToMerchants(results);
}
// 需要根据品类过滤
List<String> merchantIds = results.getContent().stream()
.map(geoResult -> geoResult.getContent().getName())
.collect(Collectors.toList());
// 批量查询商家品类(使用pipeline提高性能)
List<Object> categories = redisTemplate.executePipelined((RedisCallback<Object>) connection -> {
for (String merchantId : merchantIds) {
connection.hGet(("merchant:" + merchantId).getBytes(), "category".getBytes());
}
return null;
});
// 筛选符合条件的商家
List<Merchant> filteredMerchants = new ArrayList<>();
for (int i = 0; i < merchantIds.size(); i++) {
String merchantCategory = (String) categories.get(i);
if (category.equals(merchantCategory)) {
// 获取商家详情
Merchant merchant = getMerchant(merchantIds.get(i));
if (merchant != null) {
filteredMerchants.add(merchant);
}
}
}
return filteredMerchants;
}
@Data
@AllArgsConstructor
public static class NearbyUser {
private String userId;
private double longitude;
private double latitude;
private double distanceKm;
}
@Data
public static class Merchant {
private String id;
private String name;
private double longitude;
private double latitude;
private String category;
}
}
4. Stream(流)应用
消息队列(支持消费组)
java
@Component
public class RedisStreamMQ {
// 生产消息
public String produce(String streamKey, Map<String, String> message) {
ObjectRecord<String, Map<String, String>> record =
StreamRecords.newRecord()
.in(streamKey)
.ofMap(message);
return redisTemplate.opsForStream().add(record).getValue();
}
// 创建消费组
public void createConsumerGroup(String streamKey, String groupName) {
try {
redisTemplate.opsForStream().createGroup(streamKey, groupName);
} catch (RedisSystemException e) {
if (!e.getMessage().contains("BUSYGROUP")) {
throw e;
}
// 消费组已存在,忽略
}
}
// 消费消息(单个消费者)
public List<MapRecord<String, String, String>> consume(String streamKey, String consumerName, int batchSize) {
// 从上次读取的位置继续
String lastId = getLastProcessedId(consumerName);
if (lastId == null) {
lastId = "0-0"; // 从开始读取
}
ReadOffset readOffset = ReadOffset.from(lastId);
StreamReadOptions options = StreamReadOptions.empty().count(batchSize);
List<MapRecord<String, String, String>> records =
redisTemplate.opsForStream().read(String.class, options, readOffset);
if (!records.isEmpty()) {
// 更新最后处理的消息ID
MapRecord<String, String, String> lastRecord = records.get(records.size() - 1);
saveLastProcessedId(consumerName, lastRecord.getId().getValue());
}
return records;
}
// 消费消息(消费组模式)
public List<MapRecord<String, String, String>> consumeGroup(
String streamKey, String groupName, String consumerName, int batchSize) {
Consumer consumer = Consumer.from(groupName, consumerName);
StreamReadOptions options = StreamReadOptions.empty().count(batchSize);
ReadOffset readOffset = ReadOffset.lastConsumed();
List<MapRecord<String, String, String>> records =
redisTemplate.opsForStream().read(String.class, consumer, options, readOffset);
// 处理消息确认(ACK)
for (MapRecord<String, String, String> record : records) {
redisTemplate.opsForStream().acknowledge(streamKey, groupName, record.getId());
}
return records;
}
// 死信队列处理
public void processDeadLetter(String streamKey, String groupName,
String deadLetterStream, int retryCount) {
// 检查待处理消息
PendingMessages pendingMessages = redisTemplate.opsForStream()
.pending(streamKey, groupName, Range.unbounded(), 100);
for (PendingMessage pending : pendingMessages) {
// 获取消息详情
List<MapRecord<String, String, String>> messages = redisTemplate.opsForStream()
.range(String.class, streamKey, Range.of(pending.getId(), pending.getId()));
if (!messages.isEmpty()) {
MapRecord<String, String, String> message = messages.get(0);
// 检查重试次数
String retryKey = "retry:" + pending.getId();
Long retry = redisTemplate.opsForValue().increment(retryKey);
redisTemplate.expire(retryKey, 1, TimeUnit.HOURS);
if (retry != null && retry > retryCount) {
// 超过重试次数,移到死信队列
produce(deadLetterStream, message.getValue());
// 确认消息,从原队列删除
redisTemplate.opsForStream().acknowledge(streamKey, groupName, pending.getId());
}
}
}
}
}
三、复杂业务场景整合
1. 电商秒杀系统
java
@Slf4j
@Service
public class SeckillService {
// 初始化秒杀库存
public boolean initSeckillStock(String seckillId, int totalStock) {
String stockKey = "seckill:stock:" + seckillId;
String boughtKey = "seckill:bought:" + seckillId;
// 使用Lua脚本保证原子性
String script =
"if redis.call('exists', KEYS[1]) == 0 then " +
" redis.call('set', KEYS[1], ARGV[1]) " +
" return 1 " +
"else " +
" return 0 " +
"end";
Long result = redisTemplate.execute(
new DefaultRedisScript<>(script, Long.class),
Collections.singletonList(stockKey),
String.valueOf(totalStock)
);
return result != null && result == 1;
}
// 秒杀核心逻辑
public SeckillResult seckill(String seckillId, String userId) {
String stockKey = "seckill:stock:" + seckillId;
String boughtKey = "seckill:bought:" + seckillId;
String lockKey = "seckill:lock:" + seckillId + ":" + userId;
// 防刷:检查用户是否已经购买
if (Boolean.TRUE.equals(redisTemplate.hasKey(boughtKey + ":" + userId))) {
return SeckillResult.alreadyBought();
}
// 获取分布式锁(防止同一用户重复请求)
if (!tryLock(lockKey, userId, 5)) {
return SeckillResult.retryLater();
}
try {
// 扣减库存(Lua脚本保证原子性)
String script =
"local stock = tonumber(redis.call('get', KEYS[1])) " +
"if stock and stock > 0 then " +
" redis.call('decr', KEYS[1]) " +
" redis.call('sadd', KEYS[2], ARGV[1]) " +
" return 1 " +
"else " +
" return 0 " +
"end";
Long success = redisTemplate.execute(
new DefaultRedisScript<>(script, Long.class),
Arrays.asList(stockKey, boughtKey),
userId
);
if (success != null && success == 1) {
// 秒杀成功,记录购买信息
recordPurchase(seckillId, userId);
return SeckillResult.success();
} else {
return SeckillResult.soldOut();
}
} finally {
unlock(lockKey, userId);
}
}
// 获取秒杀结果
public SeckillStatus getSeckillStatus(String seckillId, String userId) {
String boughtKey = "seckill:bought:" + seckillId;
String stockKey = "seckill:stock:" + seckillId;
SeckillStatus status = new SeckillStatus();
// 检查是否购买成功
Boolean isBought = redisTemplate.opsForSet().isMember(boughtKey, userId);
status.setBought(Boolean.TRUE.equals(isBought));
// 获取剩余库存
String stock = redisTemplate.opsForValue().get(stockKey);
status.setRemainingStock(stock != null ? Integer.parseInt(stock) : 0);
// 获取排名(购买顺序)
if (status.isBought()) {
Long rank = redisTemplate.opsForZSet().rank(
"seckill:order:" + seckillId, userId);
status.setRank(rank != null ? rank + 1 : null);
}
return status;
}
@Data
public static class SeckillResult {
private boolean success;
private String message;
private String orderId;
public static SeckillResult success() {
SeckillResult result = new SeckillResult();
result.setSuccess(true);
result.setMessage("秒杀成功");
result.setOrderId(UUID.randomUUID().toString());
return result;
}
public static SeckillResult soldOut() {
SeckillResult result = new SeckillResult();
result.setSuccess(false);
result.setMessage("库存不足");
return result;
}
public static SeckillResult alreadyBought() {
SeckillResult result = new SeckillResult();
result.setSuccess(false);
result.setMessage("已购买过此商品");
return result;
}
public static SeckillResult retryLater() {
SeckillResult result = new SeckillResult();
result.setSuccess(false);
result.setMessage("请求过于频繁,请稍后重试");
return result;
}
}
}
2. 分布式Session共享
java
@Configuration
public class RedisSessionConfig {
@Bean
public RedisSerializer<Object> springSessionDefaultRedisSerializer() {
return new GenericJackson2JsonRedisSerializer();
}
// 配置Session存储
@Bean
public RedisIndexedSessionRepository sessionRepository(
RedisOperations<String, Object> sessionRedisOperations) {
RedisIndexedSessionRepository repository = new RedisIndexedSessionRepository(sessionRedisOperations);
// 配置Session序列化
RedisSerializer<Object> serializer = new GenericJackson2JsonRedisSerializer();
repository.setDefaultSerializer(serializer);
// 设置Session过期时间
repository.setDefaultMaxInactiveInterval(1800); // 30分钟
return repository;
}
}
java
@Service
public class SessionService {
// 自定义Session属性管理
public void setSessionAttribute(String sessionId, String key, Object value) {
String sessionKey = "spring:session:sessions:" + sessionId;
redisTemplate.opsForHash().put(sessionKey, key, value);
// 更新Session最后访问时间
String attrKey = "spring:session:sessions:expires:" + sessionId;
redisTemplate.expire(attrKey, 30, TimeUnit.MINUTES);
}
// 获取分布式Session统计
public SessionStats getSessionStats() {
Set<String> keys = redisTemplate.keys("spring:session:sessions:*");
SessionStats stats = new SessionStats();
stats.setTotalSessions(keys != null ? keys.size() : 0);
// 统计活跃Session
long activeSessions = Optional.ofNullable(keys).orElse(Collections.emptySet())
.stream()
.filter(key -> !key.contains("expires"))
.filter(key -> {
Long ttl = redisTemplate.getExpire(key);
return ttl != null && ttl > 0;
})
.count();
stats.setActiveSessions(activeSessions);
return stats;
}
}
3. 实时数据统计与监控
java
@Service
public class RealtimeMetricsService {
// 记录请求指标
public void recordRequest(String api, long duration, boolean success) {
String minuteKey = getMinuteKey();
// 使用Pipeline批量记录
redisTemplate.executePipelined((RedisCallback<Object>) connection -> {
// 记录请求总数
connection.hIncrBy((minuteKey + ":total").getBytes(), api.getBytes(), 1);
// 记录成功/失败数
String statusKey = success ? "success" : "error";
connection.hIncrBy((minuteKey + ":" + statusKey).getBytes(), api.getBytes(), 1);
// 记录响应时间
connection.zAdd((minuteKey + ":duration").getBytes(), duration, api.getBytes());
// 设置过期时间(保留最近60分钟数据)
connection.expire((minuteKey + ":total").getBytes(), 3600);
connection.expire((minuteKey + ":success").getBytes(), 3600);
connection.expire((minuteKey + ":error").getBytes(), 3600);
connection.expire((minuteKey + ":duration").getBytes(), 3600);
return null;
});
}
// 获取实时统计数据
public Map<String, Object> getRealtimeMetrics(String api, int lastMinutes) {
Map<String, Object> metrics = new HashMap<>();
List<String> minuteKeys = getLastMinuteKeys(lastMinutes);
long total = 0;
long success = 0;
long error = 0;
List<Long> durations = new ArrayList<>();
for (String minuteKey : minuteKeys) {
// 获取总数
Object totalObj = redisTemplate.opsForHash().get(minuteKey + ":total", api);
if (totalObj != null) total += Long.parseLong(totalObj.toString());
// 获取成功数
Object successObj = redisTemplate.opsForHash().get(minuteKey + ":success", api);
if (successObj != null) success += Long.parseLong(successObj.toString());
// 获取失败数
Object errorObj = redisTemplate.opsForHash().get(minuteKey + ":error", api);
if (errorObj != null) error += Long.parseLong(errorObj.toString());
// 获取响应时间统计
Set<ZSetOperations.TypedTuple<Object>> durationsSet =
redisTemplate.opsForZSet().rangeWithScores(minuteKey + ":duration", 0, -1);
if (durationsSet != null) {
for (ZSetOperations.TypedTuple<Object> tuple : durationsSet) {
if (api.equals(tuple.getValue())) {
durations.add(tuple.getScore().longValue());
}
}
}
}
metrics.put("total", total);
metrics.put("success", success);
metrics.put("error", error);
metrics.put("successRate", total > 0 ? (double) success / total * 100 : 0);
// 计算响应时间统计
if (!durations.isEmpty()) {
metrics.put("avgDuration", durations.stream().mapToLong(Long::longValue).average().orElse(0));
metrics.put("p95Duration", calculatePercentile(durations, 95));
metrics.put("p99Duration", calculatePercentile(durations, 99));
metrics.put("maxDuration", durations.stream().max(Long::compare).orElse(0L));
}
return metrics;
}
// 异常告警检测
public List<Alert> checkAlerts() {
List<Alert> alerts = new ArrayList<>();
String currentMinuteKey = getMinuteKey();
// 检查错误率
Set<String> apis = redisTemplate.keys(currentMinuteKey + ":total");
for (String key : Optional.ofNullable(apis).orElse(Collections.emptySet())) {
String api = key.substring(key.lastIndexOf(":") + 1);
Object totalObj = redisTemplate.opsForHash().get(key, api);
Object errorObj = redisTemplate.opsForHash().get(currentMinuteKey + ":error", api);
if (totalObj != null && errorObj != null) {
long total = Long.parseLong(totalObj.toString());
long error = Long.parseLong(errorObj.toString());
if (total > 10) { // 至少有10个请求
double errorRate = (double) error / total;
if (errorRate > 0.1) { // 错误率超过10%
alerts.add(new Alert(api, "ERROR_RATE_HIGH",
String.format("API %s 错误率 %.1f%%", api, errorRate * 100)));
}
}
}
}
return alerts;
}
private String getMinuteKey() {
return "metrics:" + System.currentTimeMillis() / (60 * 1000);
}
}
四、性能优化与最佳实践
1. Pipeline批量操作
java
public class RedisPipelineExample {
public Map<String, User> batchGetUsers(List<String> userIds) {
List<Object> results = redisTemplate.executePipelined((RedisCallback<Object>) connection -> {
for (String userId : userIds) {
String key = "user:" + userId;
connection.get(key.getBytes());
}
return null;
});
Map<String, User> users = new HashMap<>();
for (int i = 0; i < userIds.size(); i++) {
Object result = results.get(i);
if (result != null) {
users.put(userIds.get(i), JSON.parseObject((String) result, User.class));
}
}
return users;
}
// 批量设置带过期时间
public void batchSetWithExpire(Map<String, String> keyValues, long expireSeconds) {
redisTemplate.executePipelined((RedisCallback<Object>) connection -> {
for (Map.Entry<String, String> entry : keyValues.entrySet()) {
connection.setEx(
entry.getKey().getBytes(),
expireSeconds,
entry.getValue().getBytes()
);
}
return null;
});
}
}
2. Lua脚本保证原子性
java
public class RedisLuaScripts {
// 库存扣减Lua脚本
private static final String DECR_STOCK_SCRIPT =
"local stock = tonumber(redis.call('get', KEYS[1])) " +
"if not stock then return -1 end " +
"if stock <= 0 then return 0 end " +
"redis.call('decr', KEYS[1]) " +
"return 1";
// 限流Lua脚本
private static final String RATE_LIMIT_SCRIPT =
"local key = KEYS[1] " +
"local limit = tonumber(ARGV[1]) " +
"local window = tonumber(ARGV[2]) " +
"local current = redis.call('incr', key) " +
"if current == 1 then " +
" redis.call('expire', key, window) " +
"end " +
"if current > limit then " +
" return 0 " +
"else " +
" return 1 " +
"end";
// 分布式锁Lua脚本
private static final String DISTRIBUTED_LOCK_SCRIPT =
"if redis.call('setnx', KEYS[1], ARGV[1]) == 1 then " +
" redis.call('pexpire', KEYS[1], ARGV[2]) " +
" return 1 " +
"else " +
" return 0 " +
"end";
private final DefaultRedisScript<Long> decrStockScript;
private final DefaultRedisScript<Long> rateLimitScript;
private final DefaultRedisScript<Long> lockScript;
public RedisLuaScripts() {
decrStockScript = new DefaultRedisScript<>();
decrStockScript.setScriptText(DECR_STOCK_SCRIPT);
decrStockScript.setResultType(Long.class);
rateLimitScript = new DefaultRedisScript<>();
rateLimitScript.setScriptText(RATE_LIMIT_SCRIPT);
rateLimitScript.setResultType(Long.class);
lockScript = new DefaultRedisScript<>();
lockScript.setScriptText(DISTRIBUTED_LOCK_SCRIPT);
lockScript.setResultType(Long.class);
}
public boolean tryAcquireLock(String lockKey, String requestId, long expireMillis) {
Long result = redisTemplate.execute(
lockScript,
Collections.singletonList(lockKey),
requestId,
String.valueOf(expireMillis)
);
return result != null && result == 1;
}
}
3. Redis Cluster生产配置
java
spring:
redis:
cluster:
nodes:
- 192.168.1.101:6379
- 192.168.1.102:6379
- 192.168.1.103:6379
max-redirects: 3
lettuce:
pool:
max-active: 8
max-idle: 8
min-idle: 0
max-wait: -1ms
cluster:
refresh:
adaptive: true
period: 2000ms
五、监控与运维
1. Redis指标监控
java
@Component
public class RedisMetricsMonitor {
@Autowired
private RedisConnectionFactory connectionFactory;
public RedisMetrics getMetrics() {
RedisMetrics metrics = new RedisMetrics();
try (RedisConnection connection = connectionFactory.getConnection()) {
// 获取Redis信息
Properties info = connection.info();
metrics.setUsedMemory(info.getProperty("used_memory"));
metrics.setTotalCommandsProcessed(info.getProperty("total_commands_processed"));
metrics.setConnectedClients(info.getProperty("connected_clients"));
metrics.setBlockedClients(info.getProperty("blocked_clients"));
metrics.setKeyspaceHits(info.getProperty("keyspace_hits"));
metrics.setKeyspaceMisses(info.getProperty("keyspace_misses"));
// 计算命中率
long hits = Long.parseLong(metrics.getKeyspaceHits());
long misses = Long.parseLong(metrics.getKeyspaceMisses());
double hitRate = hits + misses > 0 ?
(double) hits / (hits + misses) * 100 : 0;
metrics.setHitRate(hitRate);
}
return metrics;
}
// 慢查询监控
public List<SlowQuery> getSlowQueries() {
List<SlowQuery> slowQueries = new ArrayList<>();
try (RedisConnection connection = connectionFactory.getConnection()) {
List<Map<String, String>> slowLog = connection.slowLogGet();
for (Map<String, String> logEntry : slowLog) {
SlowQuery query = new SlowQuery();
query.setTimestamp(logEntry.get("timestamp"));
query.setExecutionTime(logEntry.get("execution_time"));
query.setCommand(logEntry.get("command"));
slowQueries.add(query);
}
}
return slowQueries;
}
@Data
public static class RedisMetrics {
private String usedMemory;
private String totalCommandsProcessed;
private String connectedClients;
private String blockedClients;
private String keyspaceHits;
private String keyspaceMisses;
private double hitRate;
}
}
2. 连接池监控
java
@Configuration
public class RedisHealthConfig {
@Bean
public HealthIndicator redisHealthIndicator(RedisConnectionFactory connectionFactory) {
return () -> {
try {
RedisConnection connection = connectionFactory.getConnection();
connection.ping();
connection.close();
return Health.up().build();
} catch (Exception e) {
return Health.down(e).build();
}
};
}
// 监控连接池状态
@Bean
public MeterBinder lettuceMetrics(RedisConnectionFactory connectionFactory) {
return meterRegistry -> {
if (connectionFactory instanceof LettuceConnectionFactory) {
LettuceConnectionFactory lettuceFactory = (LettuceConnectionFactory) connectionFactory;
// 监控活跃连接数
Gauge.builder("redis.connections.active",
lettuceFactory,
f -> f.getConnection().getNativeConnection().getState().size())
.register(meterRegistry);
}
};
}
}
六、总结与最佳实践
1. 数据结构选择指南
| 业务场景 | 推荐数据结构 | 说明 |
|---|---|---|
| 缓存对象 | String/ Hash | Hash适合部分更新 |
| 排行榜 | Sorted Set | 支持范围查询和排名 |
| 消息队列 | List/Stream | Stream支持消费组 |
| 好友关系 | Set | 支持集合运算 |
| 地理位置 | Geo | 专门的地理位置处理 |
| UV统计 | HyperLogLog | 空间效率极高 |
| 签到系统 | BitMap | 位操作效率高 |
2. 性能优化建议
-
合理设置过期时间:避免内存无限增长
-
使用Pipeline:批量操作减少网络往返
-
Lua脚本:复杂操作用Lua保证原子性
-
避免大Key:单个Key不要超过10KB
-
连接池配置:根据业务量合理配置
3. 高可用方案
-
主从复制:读写分离,提升读性能
-
哨兵模式:自动故障转移
-
Cluster模式:数据分片,水平扩展
-
持久化策略:RDB+AOF混合使用
4. 业务场景总结
Redis在实际业务中应用广泛,从简单的缓存到复杂的实时计算,都可以发挥重要作用。关键在于:
-
理解各种数据结构的特性
-
结合业务场景选择合适的数据结构
-
注意数据一致性和性能的平衡
-
建立完善的监控和告警机制
通过合理使用Redis,可以极大提升系统性能和用户体验,同时降低数据库压力。