Redis Stack 完整语法知识点及使用指南
RedisJSON
1.1 基本概念
RedisJSON 是 Redis 的 JSON 数据类型模块,支持存储、操作 JSON 文档。
1.2 核心命令及使用
JSON.SET - 设置JSON值
bash
# 语法: JSON.SET key path value [NX | XX]
# 设置简单JSON对象
JSON.SET user:1001 . '{"name":"张三","age":25,"city":"北京"}'
# 设置嵌套JSON
JSON.SET user:1001 .address '{"province":"北京","district":"朝阳区"}'
# 使用NX选项(仅当key不存在时设置)
JSON.SET user:1002 . '{"name":"李四"}' NX
# 使用XX选项(仅当key存在时设置)
JSON.SET user:1001 .age 26 XX
JSON.GET - 获取JSON值
bash
# 语法: JSON.GET key [indent] [newline] [space] [path ...]
# 获取整个JSON
JSON.GET user:1001
# 获取指定路径
JSON.GET user:1001 .name
JSON.GET user:1001 .address.province
# 格式化输出
JSON.GET user:1001 INDENT " " NEWLINE "\n" SPACE " "
JSON.ARRAPPEND - 数组追加
bash
# 语法: JSON.ARRAPPEND key path value [value ...]
# 初始化数组
JSON.SET user:1001 .hobbies '[]'
# 追加元素
JSON.ARRAPPEND user:1001 .hobbies "阅读"
JSON.ARRAPPEND user:1001 .hobbies "游泳" "编程"
JSON.OBJKEYS - 获取所有键
bash
# 语法: JSON.OBJKEYS key [path]
JSON.OBJKEYS user:1001 .
# 返回: ["name","age","city","address","hobbies"]
JSON.NUMINCRBY - 数值增加
bash
# 语法: JSON.NUMINCRBY key path value
JSON.NUMINCRBY user:1001 .age 1 # 年龄加1
JSON.NUMINCRBY user:1001 .age -2 # 年龄减2
1.3 Spring Boot 整合 RedisJSON
Maven依赖
xml
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-data-redis</artifactId>
</dependency>
<dependency>
<groupId>com.redis.om</groupId>
<artifactId>redis-om-spring</artifactId>
<version>0.8.3</version>
</dependency>
配置类
java
@Configuration
public class RedisConfig {
@Bean
public RedisConnectionFactory redisConnectionFactory() {
// 配置 Lettuce 连接工厂
RedisStandaloneConfiguration config = new RedisStandaloneConfiguration();
config.setHostName("localhost");
config.setPort(6379);
return new LettuceConnectionFactory(config);
}
@Bean
public RedisTemplate<String, Object> redisTemplate() {
RedisTemplate<String, Object> template = new RedisTemplate<>();
template.setConnectionFactory(redisConnectionFactory());
// 设置序列化器
template.setKeySerializer(new StringRedisSerializer());
template.setValueSerializer(new GenericJackson2JsonRedisSerializer());
template.setHashKeySerializer(new StringRedisSerializer());
template.setHashValueSerializer(new GenericJackson2JsonRedisSerializer());
return template;
}
}
实体类
java
@Data
@NoArgsConstructor
@AllArgsConstructor
@Document(collection = "user") // Redis OM 注解
public class User {
@Id
private String id;
@Indexed
private String name;
@Indexed
private Integer age;
@Indexed
private String city;
private Address address;
private List<String> hobbies;
@Data
public static class Address {
private String province;
private String district;
private String detail;
}
}
Repository 实现
java
@Repository
public class UserJsonRepository {
@Autowired
private RedisTemplate<String, Object> redisTemplate;
private static final String KEY_PREFIX = "user:";
/**
* 保存用户
*/
public void save(User user) {
String key = KEY_PREFIX + user.getId();
// 使用 RedisJSON 命令保存
redisTemplate.execute((RedisCallback<Object>) connection -> {
byte[] keyBytes = key.getBytes();
byte[] path = ".".getBytes();
byte[] value = JSON.toJSONString(user).getBytes();
connection.execute("JSON.SET", keyBytes, path, value);
return null;
});
}
/**
* 获取用户
*/
public User findById(String id) {
String key = KEY_PREFIX + id;
String jsonStr = (String) redisTemplate.execute((RedisCallback<Object>) connection -> {
byte[] keyBytes = key.getBytes();
byte[] path = ".".getBytes();
return connection.execute("JSON.GET", keyBytes, path);
});
return JSON.parseObject(jsonStr, User.class);
}
/**
* 更新用户年龄
*/
public void updateAge(String id, Integer increment) {
String key = KEY_PREFIX + id;
redisTemplate.execute((RedisCallback<Object>) connection -> {
byte[] keyBytes = key.getBytes();
byte[] path = ".age".getBytes();
connection.execute("JSON.NUMINCRBY", keyBytes, path, increment.toString().getBytes());
return null;
});
}
/**
* 添加兴趣爱好
*/
public void addHobby(String id, String... hobbies) {
String key = KEY_PREFIX + id;
redisTemplate.execute((RedisCallback<Object>) connection -> {
byte[] keyBytes = key.getBytes();
byte[] path = ".hobbies".getBytes();
Object[] params = new Object[2 + hobbies.length];
params[0] = keyBytes;
params[1] = path;
System.arraycopy(hobbies, 0, params, 2, hobbies.length);
return connection.execute("JSON.ARRAPPEND", params);
});
}
}
二、RediSearch
2.1 基本概念
RediSearch 是 Redis 的全文搜索引擎模块,支持索引、搜索和聚合。
2.2 核心命令及使用
FT.CREATE - 创建索引
bash
# 语法: FT.CREATE index SCHEMA field type [SORTABLE] [NOSTEM] ...
# 创建简单索引
FT.CREATE idx:users ON HASH PREFIX 1 user: SCHEMA name TEXT SORTABLE age NUMERIC city TAG
# 创建 JSON 索引
FT.CREATE idx:json_users ON JSON SCHEMA $.name AS name TEXT $.age AS age NUMERIC $.city AS city TAG
# 带分词的索引
FT.CREATE idx:articles ON HASH SCHEMA title TEXT WEIGHT 5.0 content TEXT body TEXT
FT.SEARCH - 搜索文档
bash
# 语法: FT.SEARCH index query [LIMIT offset num] [RETURN num field ...]
# 基本搜索
FT.SEARCH idx:users "张三"
# 带条件搜索(年龄大于20)
FT.SEARCH idx:users "@age:[20 +inf]"
# 组合搜索(城市为北京且年龄小于30)
FT.SEARCH idx:users "@city:{北京} @age:[-inf 30]"
# 分页搜索
FT.SEARCH idx:users "李" LIMIT 0 10
# 指定返回字段
FT.SEARCH idx:users "王" RETURN 3 name age city
FT.AGGREGATE - 聚合查询
bash
# 语法: FT.AGGREGATE index query [GROUPBY ...] [SORTBY ...] [LIMIT ...]
# 按城市分组统计人数
FT.AGGREGATE idx:users "*" GROUPBY 1 @city REDUCE COUNT 0 AS count
# 按年龄范围分组
FT.AGGREGATE idx:users "*"
APPLY "floor(@age/10)*10" AS age_group
GROUPBY 1 @age_group REDUCE COUNT 0 AS count
SORTBY 2 @age_group ASC
FT.ADD - 添加文档(Hash)
bash
# 语法: FT.ADD index docId score [NOSAVE] [REPLACE] [PAYLOAD ...] FIELDS field value ...
FT.ADD idx:users user:1001 1.0 REPLACE FIELDS name "张三" age 25 city "北京"
FT.ADD idx:users user:1002 1.0 FIELDS name "李四" age 30 city "上海"
2.3 RediSearch 整合 RedisJSON
实体类定义
java
@Data
@Document // Redis OM 注解
public class Product {
@Id
private String id;
@Searchable // 可搜索字段
private String name;
@Indexed
private Double price;
@Indexed
private String category;
@Indexed
private List<String> tags;
@Indexed
private LocalDateTime createTime;
private Map<String, Object> attributes;
}
Repository 实现
java
@Repository
public class ProductSearchRepository {
@Autowired
private RedisTemplate<String, Object> redisTemplate;
private static final String INDEX_NAME = "idx:products";
private static final String KEY_PREFIX = "product:";
/**
* 创建索引
*/
public void createIndex() {
redisTemplate.execute((RedisCallback<Object>) connection -> {
try {
// 创建 JSON 索引
connection.execute("FT.CREATE",
INDEX_NAME.getBytes(),
"ON".getBytes(),
"JSON".getBytes(),
"PREFIX".getBytes(),
"1".getBytes(),
KEY_PREFIX.getBytes(),
"SCHEMA".getBytes(),
"$.name".getBytes(),
"AS".getBytes(),
"name".getBytes(),
"TEXT".getBytes(),
"$.price".getBytes(),
"AS".getBytes(),
"price".getBytes(),
"NUMERIC".getBytes(),
"$.category".getBytes(),
"AS".getBytes(),
"category".getBytes(),
"TAG".getBytes(),
"$.tags.*".getBytes(),
"AS".getBytes(),
"tags".getBytes(),
"TAG".getBytes()
);
} catch (Exception e) {
// 索引可能已存在
System.out.println("Index may already exist: " + e.getMessage());
}
return null;
});
}
/**
* 保存产品(使用 RedisJSON)
*/
public void save(Product product) {
String key = KEY_PREFIX + product.getId();
redisTemplate.execute((RedisCallback<Object>) connection -> {
byte[] keyBytes = key.getBytes();
byte[] path = ".".getBytes();
byte[] value = JSON.toJSONString(product).getBytes();
connection.execute("JSON.SET", keyBytes, path, value);
return null;
});
}
/**
* 搜索产品
*/
public List<Product> search(String query, int offset, int limit) {
return (List<Product>) redisTemplate.execute((RedisCallback<List<Product>>) connection -> {
// 执行搜索
Object result = connection.execute("FT.SEARCH",
INDEX_NAME.getBytes(),
query.getBytes(),
"LIMIT".getBytes(),
String.valueOf(offset).getBytes(),
String.valueOf(limit).getBytes(),
"RETURN".getBytes(),
"3".getBytes(),
"name".getBytes(),
"price".getBytes(),
"category".getBytes()
);
// 解析结果
List<Product> products = new ArrayList<>();
if (result instanceof List) {
List<byte[]> results = (List<byte[]>) result;
// 解析逻辑...
}
return products;
});
}
/**
* 多条件搜索
*/
public List<Product> searchWithConditions(String keyword,
Double minPrice,
Double maxPrice,
String category) {
StringBuilder query = new StringBuilder();
if (keyword != null && !keyword.isEmpty()) {
query.append("@name:").append(keyword);
}
if (minPrice != null || maxPrice != null) {
if (query.length() > 0) query.append(" ");
query.append("@price:[");
query.append(minPrice != null ? minPrice : "-inf");
query.append(" ");
query.append(maxPrice != null ? maxPrice : "+inf");
query.append("]");
}
if (category != null && !category.isEmpty()) {
if (query.length() > 0) query.append(" ");
query.append("@category:{").append(category).append("}");
}
return search(query.toString(), 0, 10);
}
/**
* 聚合统计
*/
public Map<String, Long> aggregateByCategory() {
return (Map<String, Long>) redisTemplate.execute((RedisCallback<Map<String, Long>>) connection -> {
Object result = connection.execute("FT.AGGREGATE",
INDEX_NAME.getBytes(),
"*".getBytes(),
"GROUPBY".getBytes(),
"1".getBytes(),
"@category".getBytes(),
"REDUCE".getBytes(),
"COUNT".getBytes(),
"0".getBytes(),
"AS".getBytes(),
"count".getBytes()
);
Map<String, Long> categoryCount = new HashMap<>();
// 解析结果...
return categoryCount;
});
}
}
2.4 RediSearch 中文检索
创建支持中文的索引
bash
# 需要安装中文分词插件
FT.CREATE idx:chinese_docs ON HASH SCHEMA title TEXT WEIGHT 3.0 content TEXT
# 添加中文文档
FT.ADD idx:chinese_docs doc:1 1.0 FIELDS title "Redis中文检索" content "Redis支持中文全文检索,使用jieba分词"
FT.ADD idx:chinese_docs doc:2 1.0 FIELDS title "Spring Boot整合" content "Spring Boot可以方便地整合Redis Stack"
中文搜索示例
java
@Component
public class ChineseSearchService {
@Autowired
private RedisTemplate<String, Object> redisTemplate;
/**
* 创建中文索引
*/
public void createChineseIndex() {
redisTemplate.execute((RedisCallback<Object>) connection -> {
try {
connection.execute("FT.CREATE",
"idx:chinese".getBytes(),
"ON".getBytes(),
"HASH".getBytes(),
"SCHEMA".getBytes(),
"title".getBytes(),
"TEXT".getBytes(),
"WEIGHT".getBytes(),
"3.0".getBytes(),
"content".getBytes(),
"TEXT".getBytes()
);
} catch (Exception e) {
System.out.println("Index creation error: " + e.getMessage());
}
return null;
});
}
/**
* 中文分词搜索
*/
public List<Document> searchChinese(String keyword) {
return (List<Document>) redisTemplate.execute((RedisCallback<List<Document>>) connection -> {
Object result = connection.execute("FT.SEARCH",
"idx:chinese".getBytes(),
keyword.getBytes(),
"LANGUAGE".getBytes(),
"chinese".getBytes(),
"LIMIT".getBytes(),
"0".getBytes(),
"10".getBytes()
);
List<Document> documents = new ArrayList<>();
// 解析搜索结果
if (result instanceof List) {
List<byte[]> results = (List<byte[]>) result;
// 处理结果...
}
return documents;
});
}
}
三、RedisBloom
3.1 布隆过滤器 (Bloom Filter)
基本概念
布隆过滤器用于判断一个元素是否存在于集合中,可能会有误判,但不会漏判。
核心命令
bash
# BF.RESERVE - 创建布隆过滤器
# 语法: BF.RESERVE key error_rate capacity [EXPANSION expansion] [NONSCALING]
BF.RESERVE bf:user 0.01 10000
# BF.ADD - 添加单个元素
BF.ADD bf:user "user:1001"
# BF.MADD - 批量添加
BF.MADD bf:user "user:1002" "user:1003" "user:1004"
# BF.EXISTS - 检查元素是否存在
BF.EXISTS bf:user "user:1001" # 返回 1 (可能存在)
BF.EXISTS bf:user "user:9999" # 返回 0 (一定不存在)
# BF.MEXISTS - 批量检查
BF.MEXISTS bf:user "user:1001" "user:1002" "user:9999"
Java 实现
java
@Component
public class BloomFilterService {
@Autowired
private RedisTemplate<String, Object> redisTemplate;
/**
* 创建布隆过滤器
* @param key 过滤器名称
* @param errorRate 错误率 (0.01 = 1%)
* @param capacity 预期元素数量
*/
public void createFilter(String key, double errorRate, long capacity) {
redisTemplate.execute((RedisCallback<Object>) connection -> {
connection.execute("BF.RESERVE",
key.getBytes(),
String.valueOf(errorRate).getBytes(),
String.valueOf(capacity).getBytes()
);
return null;
});
}
/**
* 添加元素
*/
public void add(String key, String element) {
redisTemplate.execute((RedisCallback<Object>) connection -> {
connection.execute("BF.ADD",
key.getBytes(),
element.getBytes()
);
return null;
});
}
/**
* 批量添加
*/
public void addAll(String key, String... elements) {
redisTemplate.execute((RedisCallback<Object>) connection -> {
Object[] params = new Object[elements.length + 1];
params[0] = key.getBytes();
for (int i = 0; i < elements.length; i++) {
params[i + 1] = elements[i].getBytes();
}
connection.execute("BF.MADD", params);
return null;
});
}
/**
* 检查元素是否存在
*/
public boolean exists(String key, String element) {
Boolean result = (Boolean) redisTemplate.execute((RedisCallback<Boolean>) connection -> {
Object value = connection.execute("BF.EXISTS",
key.getBytes(),
element.getBytes()
);
return (Long) value == 1;
});
return result != null && result;
}
/**
* 批量检查
*/
public List<Boolean> existsAll(String key, String... elements) {
return (List<Boolean>) redisTemplate.execute((RedisCallback<List<Boolean>>) connection -> {
Object[] params = new Object[elements.length + 1];
params[0] = key.getBytes();
for (int i = 0; i < elements.length; i++) {
params[i + 1] = elements[i].getBytes();
}
List<Long> results = (List<Long>) connection.execute("BF.MEXISTS", params);
return results.stream().map(r -> r == 1).collect(Collectors.toList());
});
}
}
// 使用示例
@Service
public class UserService {
@Autowired
private BloomFilterService bloomFilter;
private static final String BLOOM_FILTER_KEY = "bf:users";
@PostConstruct
public void init() {
// 初始化布隆过滤器
bloomFilter.createFilter(BLOOM_FILTER_KEY, 0.01, 100000);
}
/**
* 检查用户名是否可用(使用布隆过滤器快速过滤)
*/
public boolean isUsernameAvailable(String username) {
// 先用布隆过滤器快速判断
if (bloomFilter.exists(BLOOM_FILTER_KEY, username)) {
// 可能存在,需要到数据库二次确认
return !checkDatabase(username);
}
// 一定不存在,直接返回可用
return true;
}
private boolean checkDatabase(String username) {
// 数据库查询逻辑
return false;
}
}
3.2 布谷鸟过滤器 (Cuckoo Filter)
基本概念
布谷鸟过滤器是布隆过滤器的改进版,支持删除操作,空间利用率更高。
核心命令
bash
# CF.RESERVE - 创建布谷鸟过滤器
# 语法: CF.RESERVE key capacity [BUCKETSIZE bucketsize] [MAXITERATIONS maxiterations]
CF.RESERVE cf:user 10000
# CF.ADD - 添加元素
CF.ADD cf:user "user:1001"
# CF.ADDNX - 仅当不存在时添加
CF.ADDNX cf:user "user:1002"
# CF.DEL - 删除元素
CF.DEL cf:user "user:1001"
# CF.EXISTS - 检查是否存在
CF.EXISTS cf:user "user:1001"
# CF.COUNT - 统计元素出现次数
CF.COUNT cf:user "user:1001"
Java 实现
java
@Component
public class CuckooFilterService {
@Autowired
private RedisTemplate<String, Object> redisTemplate;
/**
* 创建布谷鸟过滤器
*/
public void createFilter(String key, long capacity) {
redisTemplate.execute((RedisCallback<Object>) connection -> {
connection.execute("CF.RESERVE",
key.getBytes(),
String.valueOf(capacity).getBytes()
);
return null;
});
}
/**
* 添加元素
*/
public boolean add(String key, String element) {
Long result = (Long) redisTemplate.execute((RedisCallback<Long>) connection -> {
return (Long) connection.execute("CF.ADD",
key.getBytes(),
element.getBytes()
);
});
return result != null && result == 1;
}
/**
* 仅当不存在时添加
*/
public boolean addIfNotExists(String key, String element) {
Long result = (Long) redisTemplate.execute((RedisCallback<Long>) connection -> {
return (Long) connection.execute("CF.ADDNX",
key.getBytes(),
element.getBytes()
);
});
return result != null && result == 1;
}
/**
* 删除元素
*/
public boolean delete(String key, String element) {
Long result = (Long) redisTemplate.execute((RedisCallback<Long>) connection -> {
return (Long) connection.execute("CF.DEL",
key.getBytes(),
element.getBytes()
);
});
return result != null && result == 1;
}
/**
* 检查元素是否存在
*/
public boolean exists(String key, String element) {
Long result = (Long) redisTemplate.execute((RedisCallback<Long>) connection -> {
return (Long) connection.execute("CF.EXISTS",
key.getBytes(),
element.getBytes()
);
});
return result != null && result == 1;
}
}
3.3 Top-K 过滤器
基本概念
Top-K 用于找出出现频率最高的 K 个元素。
核心命令
bash
# TOPK.RESERVE - 创建 Top-K 过滤器
# 语法: TOPK.RESERVE key topk [width depth decay]
TOPK.RESERVE topk:words 10 2000 7 0.9
# TOPK.ADD - 添加元素
TOPK.ADD topk:words "redis" "bloom" "redis" "search"
# TOPK.INCRBY - 增加计数
TOPK.INCRBY topk:words "redis" 5
# TOPK.QUERY - 查询元素是否在 Top-K 中
TOPK.QUERY topk:words "redis" "mysql"
# TOPK.LIST - 获取 Top-K 列表
TOPK.LIST topk:words
Java 实现
java
@Component
public class TopKService {
@Autowired
private RedisTemplate<String, Object> redisTemplate;
/**
* 创建 Top-K 过滤器
*/
public void createTopK(String key, int topk) {
redisTemplate.execute((RedisCallback<Object>) connection -> {
connection.execute("TOPK.RESERVE",
key.getBytes(),
String.valueOf(topk).getBytes()
);
return null;
});
}
/**
* 添加元素并增加计数
*/
public void add(String key, String... elements) {
redisTemplate.execute((RedisCallback<Object>) connection -> {
Object[] params = new Object[elements.length + 1];
params[0] = key.getBytes();
for (int i = 0; i < elements.length; i++) {
params[i + 1] = elements[i].getBytes();
}
connection.execute("TOPK.ADD", params);
return null;
});
}
/**
* 增加指定计数
*/
public void incrementBy(String key, String element, int increment) {
redisTemplate.execute((RedisCallback<Object>) connection -> {
connection.execute("TOPK.INCRBY",
key.getBytes(),
element.getBytes(),
String.valueOf(increment).getBytes()
);
return null;
});
}
/**
* 查询元素是否在 Top-K 中
*/
public List<Boolean> query(String key, String... elements) {
return (List<Boolean>) redisTemplate.execute((RedisCallback<List<Boolean>>) connection -> {
Object[] params = new Object[elements.length + 1];
params[0] = key.getBytes();
for (int i = 0; i < elements.length; i++) {
params[i + 1] = elements[i].getBytes();
}
List<Long> results = (List<Long>) connection.execute("TOPK.QUERY", params);
return results.stream().map(r -> r == 1).collect(Collectors.toList());
});
}
/**
* 获取 Top-K 列表
*/
public List<String> getTopK(String key) {
return (List<String>) redisTemplate.execute((RedisCallback<List<String>>) connection -> {
List<byte[]> results = (List<byte[]>) connection.execute("TOPK.LIST", key.getBytes());
return results.stream().map(String::new).collect(Collectors.toList());
});
}
}
3.4 T-Digest (百分位数估算)
基本概念
T-Digest 用于计算百分位数,适合大数据量的流式计算。
核心命令
bash
# TDIGEST.CREATE - 创建 T-Digest
# 语法: TDIGEST.CREATE key [COMPRESSION compression]
TDIGEST.CREATE tdigest:latency COMPRESSION 100
# TDIGEST.ADD - 添加观测值
TDIGEST.ADD tdigest:latency 10.5 23.7 15.2 18.9
# TDIGEST.MERGE - 合并多个 T-Digest
TDIGEST.MERGE tdigest:merged 2 tdigest:latency1 tdigest:latency2
# TDIGEST.QUANTILE - 查询分位数
TDIGEST.QUANTILE tdigest:latency 0.5 0.95 0.99
# TDIGEST.CDF - 查询累计分布函数
TDIGEST.CDF tdigest:latency 15 20 30
# TDIGEST.MIN - 获取最小值
TDIGEST.MIN tdigest:latency
# TDIGEST.MAX - 获取最大值
TDIGEST.MAX tdigest:latency
Java 实现
java
@Component
public class TDigestService {
@Autowired
private RedisTemplate<String, Object> redisTemplate;
/**
* 创建 T-Digest
*/
public void createDigest(String key, int compression) {
redisTemplate.execute((RedisCallback<Object>) connection -> {
connection.execute("TDIGEST.CREATE",
key.getBytes(),
"COMPRESSION".getBytes(),
String.valueOf(compression).getBytes()
);
return null;
});
}
/**
* 添加观测值
*/
public void add(String key, double... values) {
redisTemplate.execute((RedisCallback<Object>) connection -> {
Object[] params = new Object[values.length + 1];
params[0] = key.getBytes();
for (int i = 0; i < values.length; i++) {
params[i + 1] = String.valueOf(values[i]).getBytes();
}
connection.execute("TDIGEST.ADD", params);
return null;
});
}
/**
* 查询分位数
*/
public List<Double> quantile(String key, double... quantiles) {
return (List<Double>) redisTemplate.execute((RedisCallback<List<Double>>) connection -> {
Object[] params = new Object[quantiles.length + 1];
params[0] = key.getBytes();
for (int i = 0; i < quantiles.length; i++) {
params[i + 1] = String.valueOf(quantiles[i]).getBytes();
}
List<byte[]> results = (List<byte[]>) connection.execute("TDIGEST.QUANTILE", params);
return results.stream()
.map(b -> Double.parseDouble(new String(b)))
.collect(Collectors.toList());
});
}
/**
* 获取最小值
*/
public double min(String key) {
String result = (String) redisTemplate.execute((RedisCallback<String>) connection -> {
byte[] value = (byte[]) connection.execute("TDIGEST.MIN", key.getBytes());
return value != null ? new String(value) : null;
});
return result != null ? Double.parseDouble(result) : Double.NaN;
}
/**
* 获取最大值
*/
public double max(String key) {
String result = (String) redisTemplate.execute((RedisCallback<String>) connection -> {
byte[] value = (byte[]) connection.execute("TDIGEST.MAX", key.getBytes());
return value != null ? new String(value) : null;
});
return result != null ? Double.parseDouble(result) : Double.NaN;
}
}
3.5 Count-Min Sketch (频率估计)
基本概念
Count-Min Sketch 是一种概率性数据结构,用于估算元素的出现频率。
核心命令
bash
# CMS.INITBYDIM - 按维度初始化
# 语法: CMS.INITBYDIM key width depth
CMS.INITBYDIM cms:wordcount 2000 10
# CMS.INITBYPROB - 按误差率初始化
# 语法: CMS.INITBYPROB key error probability
CMS.INITBYPROB cms:wordcount 0.001 0.01
# CMS.INCRBY - 增加计数
CMS.INCRBY cms:wordcount "redis" 5
CMS.INCRBY cms:wordcount "bloom" 3 "search" 2
# CMS.QUERY - 查询计数
CMS.QUERY cms:wordcount "redis" "bloom"
# CMS.MERGE - 合并
CMS.MERGE cms:merged 2 cms:wordcount1 cms:wordcount2
Java 实现
java
@Component
public class CountMinSketchService {
@Autowired
private RedisTemplate<String, Object> redisTemplate;
/**
* 按误差率初始化
*/
public void initByProb(String key, double error, double probability) {
redisTemplate.execute((RedisCallback<Object>) connection -> {
connection.execute("CMS.INITBYPROB",
key.getBytes(),
String.valueOf(error).getBytes(),
String.valueOf(probability).getBytes()
);
return null;
});
}
/**
* 增加计数
*/
public void incrementBy(String key, Map<String, Integer> increments) {
redisTemplate.execute((RedisCallback<Object>) connection -> {
Object[] params = new Object[increments.size() * 2 + 1];
params[0] = key.getBytes();
int i = 1;
for (Map.Entry<String, Integer> entry : increments.entrySet()) {
params[i++] = entry.getKey().getBytes();
params[i++] = String.valueOf(entry.getValue()).getBytes();
}
connection.execute("CMS.INCRBY", params);
return null;
});
}
/**
* 查询计数
*/
public List<Long> query(String key, String... items) {
return (List<Long>) redisTemplate.execute((RedisCallback<List<Long>>) connection -> {
Object[] params = new Object[items.length + 1];
params[0] = key.getBytes();
for (int i = 0; i < items.length; i++) {
params[i + 1] = items[i].getBytes();
}
return (List<Long>) connection.execute("CMS.QUERY", params);
});
}
}
3.6 RoaringBitmap (位图压缩)
基本概念
RoaringBitmap 是高效的压缩位图实现,适合存储整数集合。
核心命令
bash
# 创建位图并设置位
SETBIT rb:users 1001 1
SETBIT rb:users 1002 1
# 位运算操作
BITOP AND rb:active_users rb:users rb:active
BITOP OR rb:all_users rb:users1 rb:users2
BITOP XOR rb:diff_users rb:users1 rb:users2
# 统计位为1的数量
BITCOUNT rb:users
# 查找第一个为1的位
BITPOS rb:users 1
Java 实现
java
@Component
public class RoaringBitmapService {
@Autowired
private RedisTemplate<String, Object> redisTemplate;
/**
* 添加用户ID到位图
*/
public void addUser(String bitmapKey, long userId) {
redisTemplate.opsForValue().setBit(bitmapKey, userId, true);
}
/**
* 批量添加用户
*/
public void addUsers(String bitmapKey, List<Long> userIds) {
redisTemplate.executePipelined((RedisCallback<Object>) connection -> {
for (Long userId : userIds) {
connection.setBit(bitmapKey.getBytes(), userId, true);
}
return null;
});
}
/**
* 检查用户是否存在
*/
public boolean containsUser(String bitmapKey, long userId) {
return Boolean.TRUE.equals(redisTemplate.opsForValue().getBit(bitmapKey, userId));
}
/**
* 统计用户数量
*/
public long countUsers(String bitmapKey) {
return redisTemplate.execute((RedisCallback<Long>) connection ->
connection.bitCount(bitmapKey.getBytes())
);
}
/**
* 位图交集(共同用户)
*/
public long intersectCount(String bitmapKey1, String bitmapKey2) {
String tempKey = "temp:intersect:" + System.currentTimeMillis();
try {
redisTemplate.execute((RedisCallback<Object>) connection -> {
connection.bitOp(RedisStringCommands.BitOperation.AND,
tempKey.getBytes(),
bitmapKey1.getBytes(),
bitmapKey2.getBytes());
return null;
});
return redisTemplate.execute((RedisCallback<Long>) connection ->
connection.bitCount(tempKey.getBytes())
);
} finally {
redisTemplate.delete(tempKey);
}
}
/**
* 位图并集(总用户)
*/
public long unionCount(String bitmapKey1, String bitmapKey2) {
String tempKey = "temp:union:" + System.currentTimeMillis();
try {
redisTemplate.execute((RedisCallback<Object>) connection -> {
connection.bitOp(RedisStringCommands.BitOperation.OR,
tempKey.getBytes(),
bitmapKey1.getBytes(),
bitmapKey2.getBytes());
return null;
});
return redisTemplate.execute((RedisCallback<Long>) connection ->
connection.bitCount(tempKey.getBytes())
);
} finally {
redisTemplate.delete(tempKey);
}
}
}
四、RedisTimeSeries
4.1 基本概念
RedisTimeSeries 是时序数据库模块,专门用于处理时间序列数据。
4.2 核心命令及使用
TS.CREATE - 创建时间序列
bash
# 语法: TS.CREATE key [RETENTION retention] [ENCODING encoding] [CHUNK_SIZE size] [LABELS label value...]
TS.CREATE ts:cpu:usage RETENTION 86400000 ENCODING COMPRESSED CHUNK_SIZE 128 LABELS host server1 region cn
# 创建带标签的时间序列
TS.CREATE ts:memory:usage LABELS host server1 type memory
TS.ADD - 添加数据点
bash
# 语法: TS.ADD key timestamp value [RETENTION retention] [ENCODING encoding] [CHUNK_SIZE size] [LABELS label value...]
# 添加当前时间戳
TS.ADD ts:cpu:usage * 45.5
# 添加指定时间戳
TS.ADD ts:cpu:usage 1640995200000 47.2
# 批量添加
TS.MADD ts:cpu:usage 1640995200000 45.2 ts:cpu:usage 1640995201000 46.8
TS.RANGE - 范围查询
bash
# 语法: TS.RANGE key fromTimestamp toTimestamp [FILTER filter...] [COUNT count]
# 查询最近1小时的数据
TS.RANGE ts:cpu:usage 1640991600000 1640995200000
# 限制返回数量
TS.RANGE ts:cpu:usage - + COUNT 100
# 聚合查询(按分钟平均)
TS.RANGE ts:cpu:usage - + AGGREGATION avg 60000
TS.GET - 获取最新数据
bash
# 语法: TS.GET key
TS.GET ts:cpu:usage
TS.INFO - 获取序列信息
bash
# 语法: TS.INFO key
TS.INFO ts:cpu:usage
4.3 Java 实现
java
@Component
public class TimeSeriesService {
@Autowired
private RedisTemplate<String, Object> redisTemplate;
/**
* 创建时间序列
*/
public void createSeries(String key, long retentionMillis, Map<String, String> labels) {
redisTemplate.execute((RedisCallback<Object>) connection -> {
List<byte[]> params = new ArrayList<>();
params.add(key.getBytes());
params.add("RETENTION".getBytes());
params.add(String.valueOf(retentionMillis).getBytes());
params.add("ENCODING".getBytes());
params.add("COMPRESSED".getBytes());
if (labels != null && !labels.isEmpty()) {
params.add("LABELS".getBytes());
for (Map.Entry<String, String> entry : labels.entrySet()) {
params.add(entry.getKey().getBytes());
params.add(entry.getValue().getBytes());
}
}
connection.execute("TS.CREATE", params.toArray());
return null;
});
}
/**
* 添加数据点
*/
public void add(String key, long timestamp, double value) {
redisTemplate.execute((RedisCallback<Object>) connection -> {
connection.execute("TS.ADD",
key.getBytes(),
String.valueOf(timestamp).getBytes(),
String.valueOf(value).getBytes()
);
return null;
});
}
/**
* 批量添加数据点
*/
public void addAll(String key, List<TimeSeriesPoint> points) {
redisTemplate.execute((RedisCallback<Object>) connection -> {
Object[] params = new Object[points.size() * 3 + 1];
params[0] = key.getBytes();
for (int i = 0; i < points.size(); i++) {
TimeSeriesPoint point = points.get(i);
params[i * 3 + 1] = String.valueOf(point.getTimestamp()).getBytes();
params[i * 3 + 2] = String.valueOf(point.getValue()).getBytes();
params[i * 3 + 3] = key.getBytes();
}
connection.execute("TS.MADD", params);
return null;
});
}
/**
* 范围查询
*/
public List<TimeSeriesPoint> range(String key, long from, long to,
AggregationType aggType, long timeBucket) {
return (List<TimeSeriesPoint>) redisTemplate.execute((RedisCallback<List<TimeSeriesPoint>>) connection -> {
List<byte[]> results = (List<byte[]>) connection.execute("TS.RANGE",
key.getBytes(),
String.valueOf(from).getBytes(),
String.valueOf(to).getBytes(),
"AGGREGATION".getBytes(),
aggType.name().toLowerCase().getBytes(),
String.valueOf(timeBucket).getBytes()
);
List<TimeSeriesPoint> points = new ArrayList<>();
for (int i = 0; i < results.size(); i += 2) {
long timestamp = Long.parseLong(new String(results.get(i)));
double value = Double.parseDouble(new String(results.get(i + 1)));
points.add(new TimeSeriesPoint(timestamp, value));
}
return points;
});
}
/**
* 获取最新数据点
*/
public TimeSeriesPoint getLatest(String key) {
return (TimeSeriesPoint) redisTemplate.execute((RedisCallback<TimeSeriesPoint>) connection -> {
List<byte[]> results = (List<byte[]>) connection.execute("TS.GET", key.getBytes());
if (results != null && results.size() >= 2) {
long timestamp = Long.parseLong(new String(results.get(0)));
double value = Double.parseDouble(new String(results.get(1)));
return new TimeSeriesPoint(timestamp, value);
}
return null;
});
}
@Data
@AllArgsConstructor
public static class TimeSeriesPoint {
private long timestamp;
private double value;
}
public enum AggregationType {
AVG, SUM, MIN, MAX, COUNT, FIRST, LAST
}
}
// 使用示例:监控系统
@Service
public class MonitoringService {
@Autowired
private TimeSeriesService timeSeriesService;
private static final String CPU_SERIES = "ts:cpu:usage";
private static final String MEMORY_SERIES = "ts:memory:usage";
@PostConstruct
public void init() {
// 创建CPU监控序列
Map<String, String> cpuLabels = new HashMap<>();
cpuLabels.put("metric", "cpu");
cpuLabels.put("unit", "percent");
timeSeriesService.createSeries(CPU_SERIES, 7 * 24 * 3600000L, cpuLabels);
// 创建内存监控序列
Map<String, String> memoryLabels = new HashMap<>();
memoryLabels.put("metric", "memory");
memoryLabels.put("unit", "mb");
timeSeriesService.createSeries(MEMORY_SERIES, 7 * 24 * 3600000L, memoryLabels);
}
/**
* 记录CPU使用率
*/
public void recordCpuUsage(double usage) {
timeSeriesService.add(CPU_SERIES, System.currentTimeMillis(), usage);
}
/**
* 获取最近1小时的CPU使用率平均值(每分钟)
*/
public List<TimeSeriesService.TimeSeriesPoint> getHourlyCpuAvg() {
long now = System.currentTimeMillis();
long oneHourAgo = now - 3600000;
return timeSeriesService.range(CPU_SERIES, oneHourAgo, now,
TimeSeriesService.AggregationType.AVG, 60000);
}
/**
* 检测异常(超过阈值的点)
*/
public List<TimeSeriesService.TimeSeriesPoint> detectAnomalies(double threshold) {
long now = System.currentTimeMillis();
long fiveMinutesAgo = now - 300000;
List<TimeSeriesService.TimeSeriesPoint> points =
timeSeriesService.range(CPU_SERIES, fiveMinutesAgo, now, null, 0);
return points.stream()
.filter(p -> p.getValue() > threshold)
.collect(Collectors.toList());
}
}
五、RedisGraph
5.1 基本概念
RedisGraph 是 Redis 的图数据库模块,使用 Cypher 查询语言。
5.2 核心命令及使用
GRAPH.QUERY - 执行 Cypher 查询
bash
# 创建节点
GRAPH.QUERY social "CREATE (:Person {name: '张三', age: 25})"
GRAPH.QUERY social "CREATE (:Person {name: '李四', age: 28})"
GRAPH.QUERY social "CREATE (:Person {name: '王五', age: 30})"
# 创建关系
GRAPH.QUERY social "MATCH (a:Person {name: '张三'}), (b:Person {name: '李四'}) CREATE (a)-[:FRIEND]->(b)"
GRAPH.QUERY social "MATCH (a:Person {name: '张三'}), (b:Person {name: '王五'}) CREATE (a)-[:COLLEAGUE]->(b)"
# 查询
GRAPH.QUERY social "MATCH (p:Person) RETURN p.name, p.age"
# 条件查询
GRAPH.QUERY social "MATCH (p:Person) WHERE p.age > 25 RETURN p.name"
# 关系查询
GRAPH.QUERY social "MATCH (a:Person)-[:FRIEND]->(b:Person) RETURN a.name, b.name"
# 路径查询
GRAPH.QUERY social "MATCH path = (a:Person {name: '张三'})-[:FRIEND*1..2]->(b:Person) RETURN path"
5.3 Java 实现
java
@Component
public class GraphService {
@Autowired
private RedisTemplate<String, Object> redisTemplate;
private static final String GRAPH_NAME = "social";
/**
* 执行 Cypher 查询
*/
public GraphResult executeQuery(String cypher) {
return (GraphResult) redisTemplate.execute((RedisCallback<GraphResult>) connection -> {
Object result = connection.execute("GRAPH.QUERY",
GRAPH_NAME.getBytes(),
cypher.getBytes()
);
return parseResult(result);
});
}
/**
* 创建人员节点
*/
public void createPerson(String name, int age, Map<String, Object> properties) {
StringBuilder cypher = new StringBuilder("CREATE (:Person {name: '")
.append(name).append("', age: ").append(age);
if (properties != null) {
for (Map.Entry<String, Object> entry : properties.entrySet()) {
cypher.append(", ").append(entry.getKey()).append(": '")
.append(entry.getValue()).append("'");
}
}
cypher.append("})");
executeQuery(cypher.toString());
}
/**
* 创建关系
*/
public void createRelationship(String person1, String person2,
String relationshipType, Map<String, Object> properties) {
StringBuilder cypher = new StringBuilder()
.append("MATCH (a:Person {name: '").append(person1).append("'}), ")
.append("(b:Person {name: '").append(person2).append("'}) ")
.append("CREATE (a)-[r:").append(relationshipType).append(" {");
if (properties != null) {
boolean first = true;
for (Map.Entry<String, Object> entry : properties.entrySet()) {
if (!first) cypher.append(", ");
cypher.append(entry.getKey()).append(": '").append(entry.getValue()).append("'");
first = false;
}
}
cypher.append("}]->(b)");
executeQuery(cypher.toString());
}
/**
* 查找朋友的朋友
*/
public List<Person> findFriendsOfFriends(String name, int depth) {
String cypher = String.format(
"MATCH (p:Person {name: '%s'})-[:FRIEND*1..%d]->(friend) " +
"RETURN DISTINCT friend.name, friend.age", name, depth
);
GraphResult result = executeQuery(cypher);
return parsePersons(result);
}
/**
* 查找共同朋友
*/
public List<Person> findMutualFriends(String person1, String person2) {
String cypher = String.format(
"MATCH (a:Person {name: '%s'})-[:FRIEND]->(common)<-[:FRIEND]-(b:Person {name: '%s'}) " +
"RETURN common.name, common.age",
person1, person2
);
GraphResult result = executeQuery(cypher);
return parsePersons(result);
}
/**
* 最短路径
*/
public List<Person> findShortestPath(String start, String end) {
String cypher = String.format(
"MATCH path = shortestPath((start:Person {name: '%s'})-[:FRIEND*]-(end:Person {name: '%s'})) " +
"RETURN nodes(path)",
start, end
);
GraphResult result = executeQuery(cypher);
return parsePath(result);
}
/**
* 推荐朋友(基于共同好友数量)
*/
public List<Person> recommendFriends(String name, int limit) {
String cypher = String.format(
"MATCH (p:Person {name: '%s'})-[:FRIEND]->(friend:Person) " +
"MATCH (friend)-[:FRIEND]->(candidate:Person) " +
"WHERE NOT (p)-[:FRIEND]->(candidate) AND p <> candidate " +
"RETURN candidate.name, candidate.age, COUNT(friend) AS commonFriends " +
"ORDER BY commonFriends DESC LIMIT %d",
name, limit
);
GraphResult result = executeQuery(cypher);
return parsePersons(result);
}
/**
* 解析结果为 Person 对象
*/
private List<Person> parsePersons(GraphResult result) {
List<Person> persons = new ArrayList<>();
// 解析逻辑...
return persons;
}
/**
* 解析路径
*/
private List<Person> parsePath(GraphResult result) {
List<Person> path = new ArrayList<>();
// 解析逻辑...
return path;
}
/**
* 解析图查询结果
*/
private GraphResult parseResult(Object result) {
// 解析 Redis 返回的结果
GraphResult graphResult = new GraphResult();
// 实现解析逻辑...
return graphResult;
}
@Data
public static class Person {
private String name;
private int age;
private Map<String, Object> properties;
}
@Data
public static class GraphResult {
private List<String> columns;
private List<Map<String, Object>> rows;
}
}
六、Redis-Cell
6.1 基本概念
Redis-Cell 是 Redis 的限流模块,实现令牌桶算法。
6.2 核心命令及使用
bash
# CL.THROTTLE - 限流命令
# 语法: CL.THROTTLE key capacity rate period [quantity] [strict]
# 参数说明:
# key: 限流键名
# capacity: 最大容量(桶大小)
# rate: 速率
# period: 时间周期(秒)
# quantity: 本次请求数量(默认1)
# strict: 是否严格模式
# 创建限流规则(每秒10个请求,桶容量20)
CL.THROTTLE api:user:1001 20 10 60
# 响应说明:
# 1) 是否被限流 (0=允许, 1=拒绝)
# 2) 当前桶容量
# 3) 重新填充时间(毫秒)
# 4) 重置时间(秒)
# 批量请求
CL.THROTTLE api:user:1001 20 10 60 5
6.3 Java 实现
java
@Component
public class RateLimiterService {
@Autowired
private RedisTemplate<String, Object> redisTemplate;
/**
* 限流检查
* @param key 限流键
* @param capacity 桶容量
* @param rate 速率(每周期)
* @param period 周期(秒)
* @param quantity 请求数量
* @return 限流结果
*/
public RateLimitResult check(String key, int capacity, int rate,
int period, int quantity) {
return (RateLimitResult) redisTemplate.execute((RedisCallback<RateLimitResult>) connection -> {
List<byte[]> results = (List<byte[]>) connection.execute("CL.THROTTLE",
key.getBytes(),
String.valueOf(capacity).getBytes(),
String.valueOf(rate).getBytes(),
String.valueOf(period).getBytes(),
String.valueOf(quantity).getBytes()
);
RateLimitResult result = new RateLimitResult();
result.setLimited(Long.parseLong(new String(results.get(0))) == 1);
result.setCurrentCapacity(Long.parseLong(new String(results.get(1))));
result.setRefillTimeMillis(Long.parseLong(new String(results.get(2))));
result.setResetTimeSeconds(Long.parseLong(new String(results.get(3))));
return result;
});
}
/**
* 简单限流(单次请求)
*/
public boolean allow(String key, int capacity, int rate, int period) {
RateLimitResult result = check(key, capacity, rate, period, 1);
return !result.isLimited();
}
/**
* 获取剩余配额
*/
public long getRemainingQuota(String key, int capacity, int rate, int period) {
RateLimitResult result = check(key, capacity, rate, period, 0);
return result.getCurrentCapacity();
}
@Data
public static class RateLimitResult {
private boolean limited;
private long currentCapacity;
private long refillTimeMillis;
private long resetTimeSeconds;
}
}
// 使用示例:API 限流
@RestController
public class ApiController {
@Autowired
private RateLimiterService rateLimiter;
@GetMapping("/api/user/{userId}")
public ResponseEntity<?> getUserInfo(@PathVariable String userId) {
String key = "api:user:" + userId;
// 每个用户每分钟最多请求10次
if (!rateLimiter.allow(key, 20, 10, 60)) {
return ResponseEntity.status(429)
.body("Too Many Requests");
}
// 业务逻辑
return ResponseEntity.ok("User info");
}
@PostMapping("/api/order")
public ResponseEntity<?> createOrder(@RequestBody OrderRequest request) {
String key = "api:order:" + request.getUserId();
// 使用带数量的限流(每次下单消耗2个配额)
RateLimiterService.RateLimitResult result =
rateLimiter.check(key, 100, 10, 60, 2);
if (result.isLimited()) {
return ResponseEntity.status(429)
.body("Rate limit exceeded. Please try again in "
+ result.getResetTimeSeconds() + " seconds");
}
// 业务逻辑
return ResponseEntity.ok("Order created");
}
}
七、RedisAI
7.1 基本概念
RedisAI 是 Redis 的 AI 模块,支持在 Redis 中运行深度学习模型。
7.2 核心命令及使用
bash
# AI.MODELSTORE - 存储模型
# 语法: AI.MODELSTORE key MODEL {TF|TORCH|ONNX} DEVICE {CPU|GPU} BLOB model_blob [TAG tag] [INPUTS count input...] [OUTPUTS count output...]
# AI.TENSORSET - 设置张量
# 语法: AI.TENSORSET key DATA type shape values
# AI.MODELRUN - 运行模型
# 语法: AI.MODELRUN model_key INPUTS count input... OUTPUTS count output...
# AI.TENSORGET - 获取张量
# 语法: AI.TENSORGET key META | VALUES
7.3 Java 实现
java
@Component
public class AIService {
@Autowired
private RedisTemplate<String, Object> redisTemplate;
/**
* 加载模型
*/
public void loadModel(String modelKey, String modelType,
String device, byte[] modelBlob) {
redisTemplate.execute((RedisCallback<Object>) connection -> {
connection.execute("AI.MODELSTORE",
modelKey.getBytes(),
"MODEL".getBytes(),
modelType.getBytes(),
"DEVICE".getBytes(),
device.getBytes(),
"BLOB".getBytes(),
modelBlob
);
return null;
});
}
/**
* 设置输入张量
*/
public void setTensor(String tensorKey, String dataType,
List<Long> shape, byte[] data) {
redisTemplate.execute((RedisCallback<Object>) connection -> {
Object[] params = new Object[4 + shape.size()];
params[0] = tensorKey.getBytes();
params[1] = "DATA".getBytes();
params[2] = dataType.getBytes();
for (int i = 0; i < shape.size(); i++) {
params[3 + i] = String.valueOf(shape.get(i)).getBytes();
}
params[3 + shape.size()] = data;
connection.execute("AI.TENSORSET", params);
return null;
});
}
/**
* 运行推理
*/
public void runInference(String modelKey, List<String> inputs,
List<String> outputs) {
redisTemplate.execute((RedisCallback<Object>) connection -> {
Object[] params = new Object[4 + inputs.size() + outputs.size()];
params[0] = modelKey.getBytes();
params[1] = "INPUTS".getBytes();
params[2] = String.valueOf(inputs.size()).getBytes();
int idx = 3;
for (String input : inputs) {
params[idx++] = input.getBytes();
}
params[idx++] = "OUTPUTS".getBytes();
params[idx++] = String.valueOf(outputs.size()).getBytes();
for (String output : outputs) {
params[idx++] = output.getBytes();
}
connection.execute("AI.MODELRUN", params);
return null;
});
}
/**
* 获取推理结果
*/
public byte[] getTensor(String tensorKey) {
return (byte[]) redisTemplate.execute((RedisCallback<byte[]>) connection -> {
return (byte[]) connection.execute("AI.TENSORGET",
tensorKey.getBytes(),
"VALUES".getBytes()
);
});
}
}
八、综合性案例
案例1:电商系统综合应用
java
/**
* 电商系统综合案例
* 整合 RedisJSON、RediSearch、RedisBloom、RedisTimeSeries
*/
@Service
public class EcommerceSystem {
@Autowired
private UserJsonRepository userRepository;
@Autowired
private ProductSearchRepository productRepository;
@Autowired
private BloomFilterService bloomFilter;
@Autowired
private TimeSeriesService timeSeriesService;
@Autowired
private TopKService topKService;
/**
* 用户购买流程
*/
@Transactional
public OrderResult purchaseOrder(PurchaseRequest request) {
String userId = request.getUserId();
String productId = request.getProductId();
// 1. 使用布隆过滤器快速检查用户是否存在
if (!bloomFilter.exists("bf:users", userId)) {
throw new UserNotFoundException("User not found: " + userId);
}
// 2. 从 RedisJSON 获取用户信息和产品信息
User user = userRepository.findById(userId);
Product product = productRepository.findById(productId);
if (user == null || product == null) {
throw new NotFoundException("User or product not found");
}
// 3. 检查库存(使用 Redis 计数器)
String stockKey = "stock:" + productId;
Long stock = redisTemplate.opsForValue().decrement(stockKey);
if (stock < 0) {
redisTemplate.opsForValue().increment(stockKey);
throw new OutOfStockException("Product out of stock");
}
// 4. 创建订单(使用 RedisJSON)
Order order = new Order();
order.setId(UUID.randomUUID().toString());
order.setUserId(userId);
order.setProductId(productId);
order.setAmount(product.getPrice());
order.setStatus(OrderStatus.CREATED);
order.setCreateTime(LocalDateTime.now());
saveOrder(order);
// 5. 记录用户行为(用于推荐系统)
recordUserBehavior(userId, productId);
// 6. 记录销售数据(时间序列)
timeSeriesService.add("ts:sales:" + productId,
System.currentTimeMillis(), product.getPrice());
// 7. 更新热门商品排行(Top-K)
topKService.incrementBy("topk:hot_products", productId, 1);
return new OrderResult(order, stock);
}
/**
* 商品搜索(使用 RediSearch)
*/
public List<Product> searchProducts(String keyword,
Double minPrice,
Double maxPrice,
String category,
int page,
int size) {
StringBuilder query = new StringBuilder();
if (keyword != null && !keyword.isEmpty()) {
query.append("@name:").append(keyword);
}
if (minPrice != null || maxPrice != null) {
if (query.length() > 0) query.append(" ");
query.append("@price:[");
query.append(minPrice != null ? minPrice : "-inf");
query.append(" ");
query.append(maxPrice != null ? maxPrice : "+inf");
query.append("]");
}
if (category != null && !category.isEmpty()) {
if (query.length() > 0) query.append(" ");
query.append("@category:{").append(category).append("}");
}
int offset = (page - 1) * size;
return productRepository.search(query.toString(), offset, size);
}
/**
* 获取用户个性化推荐
*/
public List<Product> getRecommendations(String userId, int limit) {
// 1. 获取用户历史行为(Top-K 购买记录)
List<String> purchasedProducts = topKService.getTopK("user:purchased:" + userId);
// 2. 基于购买的产品的类别推荐
Set<String> categories = new HashSet<>();
for (String productId : purchasedProducts) {
Product product = productRepository.findById(productId);
if (product != null) {
categories.add(product.getCategory());
}
}
// 3. 搜索同类产品
List<Product> recommendations = new ArrayList<>();
for (String category : categories) {
List<Product> products = productRepository.searchWithConditions(
null, null, null, category
);
recommendations.addAll(products);
}
// 4. 去除已购买的产品
recommendations.removeIf(p -> purchasedProducts.contains(p.getId()));
// 5. 按价格和评分排序
recommendations.sort((p1, p2) -> {
int scoreCompare = p2.getScore().compareTo(p1.getScore());
if (scoreCompare != 0) return scoreCompare;
return p1.getPrice().compareTo(p2.getPrice());
});
return recommendations.stream().limit(limit).collect(Collectors.toList());
}
/**
* 记录用户行为
*/
private void recordUserBehavior(String userId, String productId) {
// 记录用户购买历史(Top-K)
topKService.incrementBy("user:purchased:" + userId, productId, 1);
// 记录产品被购买次数(Count-Min Sketch)
cmsService.incrementBy("cms:product_purchases",
Collections.singletonMap(productId, 1));
// 记录实时购买数据(时间序列)
timeSeriesService.add("ts:realtime_sales",
System.currentTimeMillis(), 1);
}
@Data
public static class OrderResult {
private Order order;
private Long remainingStock;
public OrderResult(Order order, Long remainingStock) {
this.order = order;
this.remainingStock = remainingStock;
}
}
}
案例2:社交网络分析系统
java
/**
* 社交网络分析系统
* 整合 RedisGraph、RediSearch、RedisBloom、RoaringBitmap
*/
@Service
public class SocialNetworkAnalysis {
@Autowired
private GraphService graphService;
@Autowired
private ProductSearchRepository searchRepository;
@Autowired
private BloomFilterService bloomFilter;
@Autowired
private RoaringBitmapService bitmapService;
/**
* 添加用户并创建社交关系
*/
public void addUserWithRelations(SocialUser user, List<String> friendIds) {
// 1. 创建用户节点(RedisGraph)
graphService.createPerson(user.getName(), user.getAge(), user.getProperties());
// 2. 添加到布隆过滤器
bloomFilter.add("bf:social_users", user.getId());
// 3. 添加到用户位图(用于统计分析)
bitmapService.addUser("bitmap:all_users", Long.parseLong(user.getId()));
// 4. 创建好友关系
for (String friendId : friendIds) {
graphService.createRelationship(user.getName(),
"user:" + friendId, "FRIEND", null);
}
// 5. 索引用户信息(RediSearch)
searchRepository.save(convertToProduct(user));
}
/**
* 社交网络分析 - 找到影响力最大的用户
*/
public List<SocialUser> findInfluencers(int limit) {
// 使用图查询计算用户的中心度
String cypher =
"MATCH (p:Person)-[r:FRIEND]-(friend:Person) " +
"RETURN p.name, COUNT(friend) AS friendCount " +
"ORDER BY friendCount DESC LIMIT " + limit;
GraphService.GraphResult result = graphService.executeQuery(cypher);
List<SocialUser> influencers = new ArrayList<>();
for (Map<String, Object> row : result.getRows()) {
SocialUser user = new SocialUser();
user.setName((String) row.get("p.name"));
user.setFriendCount((Long) row.get("friendCount"));
influencers.add(user);
}
return influencers;
}
/**
* 社区发现 - 找到用户的社交圈子
*/
public Set<String> findSocialCircle(String userId, int depth) {
// 查找指定深度的所有朋友
String cypher = String.format(
"MATCH (p:Person {name: '%s'})-[:FRIEND*1..%d]-(friend) " +
"RETURN DISTINCT friend.name",
userId, depth
);
GraphService.GraphResult result = graphService.executeQuery(cypher);
Set<String> circle = new HashSet<>();
for (Map<String, Object> row : result.getRows()) {
circle.add((String) row.get("friend.name"));
}
return circle;
}
/**
* 共同兴趣分析
*/
public List<SocialUser> findUsersWithCommonInterests(String userId,
List<String> interests) {
// 使用 RediSearch 查找有共同兴趣的用户
StringBuilder query = new StringBuilder("@interests:{");
for (String interest : interests) {
query.append(interest).append("|");
}
query.deleteCharAt(query.length() - 1).append("}");
List<Product> users = searchRepository.search(query.toString(), 0, 20);
// 排除自己
return users.stream()
.filter(u -> !u.getId().equals(userId))
.map(this::convertToSocialUser)
.collect(Collectors.toList());
}
/**
* 分析用户活跃度(使用时间序列)
*/
public Map<String, Object> analyzeUserActivity(String userId, long days) {
long endTime = System.currentTimeMillis();
long startTime = endTime - days * 86400000L;
// 获取用户登录记录
List<TimeSeriesService.TimeSeriesPoint> loginRecords =
timeSeriesService.range("ts:user_login:" + userId,
startTime, endTime,
TimeSeriesService.AggregationType.COUNT,
86400000); // 按天聚合
// 计算统计指标
Map<String, Object> stats = new HashMap<>();
stats.put("totalLogins", loginRecords.stream()
.mapToDouble(TimeSeriesService.TimeSeriesPoint::getValue)
.sum());
stats.put("avgDailyLogins", loginRecords.stream()
.mapToDouble(TimeSeriesService.TimeSeriesPoint::getValue)
.average().orElse(0));
stats.put("maxDailyLogins", loginRecords.stream()
.mapToDouble(TimeSeriesService.TimeSeriesPoint::getValue)
.max().orElse(0));
return stats;
}
/**
* 好友推荐(基于共同好友和共同兴趣)
*/
public List<SocialUser> recommendFriends(String userId, int limit) {
// 1. 查找共同好友
List<SocialUser> mutualFriendUsers =
findMutualFriends(userId, limit * 2);
// 2. 获取用户兴趣
SocialUser currentUser = getUserById(userId);
// 3. 查找有共同兴趣的用户
List<SocialUser> interestBasedUsers =
findUsersWithCommonInterests(userId, currentUser.getInterests());
// 4. 合并推荐结果并去重
Set<String> recommendedIds = new HashSet<>();
List<SocialUser> recommendations = new ArrayList<>();
for (SocialUser user : mutualFriendUsers) {
if (!recommendedIds.contains(user.getId()) &&
!user.getId().equals(userId)) {
recommendations.add(user);
recommendedIds.add(user.getId());
}
}
for (SocialUser user : interestBasedUsers) {
if (!recommendedIds.contains(user.getId()) &&
!user.getId().equals(userId)) {
recommendations.add(user);
recommendedIds.add(user.getId());
}
}
// 5. 按共同好友数量和共同兴趣数量排序
recommendations.sort((u1, u2) -> {
int score1 = u1.getCommonFriends() + u1.getCommonInterests();
int score2 = u2.getCommonFriends() + u2.getCommonInterests();
return Integer.compare(score2, score1);
});
return recommendations.stream().limit(limit).collect(Collectors.toList());
}
@Data
public static class SocialUser {
private String id;
private String name;
private int age;
private List<String> interests;
private Map<String, Object> properties;
private int friendCount;
private int commonFriends;
private int commonInterests;
}
}
案例3:实时监控告警系统
java
/**
* 实时监控告警系统
* 整合 RedisTimeSeries、RedisBloom、Top-K、T-Digest
*/
@Service
public class MonitoringAlertSystem {
@Autowired
private TimeSeriesService timeSeriesService;
@Autowired
private TopKService topKService;
@Autowired
private TDigestService tDigestService;
@Autowired
private RedisTemplate<String, Object> redisTemplate;
/**
* 记录监控指标
*/
public void recordMetric(Metric metric) {
String seriesKey = "ts:metric:" + metric.getName();
long timestamp = System.currentTimeMillis();
// 存储原始数据
timeSeriesService.add(seriesKey, timestamp, metric.getValue());
// 更新异常检测模型
updateAnomalyDetection(metric);
// 记录高频错误(Top-K)
if (metric.getType() == MetricType.ERROR) {
topKService.incrementBy("topk:frequent_errors",
metric.getErrorCode(), 1);
}
// 触发实时告警
checkAndAlert(metric);
}
/**
* 异常检测(使用 T-Digest 计算百分位数)
*/
private void updateAnomalyDetection(Metric metric) {
String digestKey = "tdigest:baseline:" + metric.getName();
// 添加到 T-Digest 构建基线
tDigestService.add(digestKey, metric.getValue());
// 计算第95百分位数作为阈值
List<Double> thresholds = tDigestService.quantile(digestKey, 0.95);
double threshold = thresholds.get(0);
// 如果当前值超过阈值,标记为异常
if (metric.getValue() > threshold) {
recordAnomaly(metric, threshold);
}
}
/**
* 记录异常
*/
private void recordAnomaly(Metric metric, double threshold) {
String anomalyKey = "anomaly:" + metric.getName();
// 使用 RedisJSON 存储异常详情
Map<String, Object> anomaly = new HashMap<>();
anomaly.put("timestamp", System.currentTimeMillis());
anomaly.put("metric", metric.getName());
anomaly.put("value", metric.getValue());
anomaly.put("threshold", threshold);
anomaly.put("severity", calculateSeverity(metric.getValue(), threshold));
redisTemplate.opsForList().leftPush(anomalyKey, anomaly);
redisTemplate.opsForList().trim(anomalyKey, 0, 99);
// 触发告警
sendAlert(anomaly);
}
/**
* 实时告警检查
*/
private void checkAndAlert(Metric metric) {
String alertKey = "alert:rule:" + metric.getName();
// 获取告警规则
AlertRule rule = getAlertRule(metric.getName());
if (rule == null) return;
// 获取最近时间窗口内的数据
long now = System.currentTimeMillis();
long windowStart = now - rule.getWindowMs();
List<TimeSeriesService.TimeSeriesPoint> points =
timeSeriesService.range("ts:metric:" + metric.getName(),
windowStart, now, null, 0);
// 计算窗口内的统计值
double avg = points.stream()
.mapToDouble(TimeSeriesService.TimeSeriesPoint::getValue)
.average()
.orElse(0);
double max = points.stream()
.mapToDouble(TimeSeriesService.TimeSeriesPoint::getValue)
.max()
.orElse(0);
// 检查告警条件
boolean shouldAlert = false;
String alertMessage = null;
switch (rule.getCondition()) {
case AVG_GREATER_THAN:
if (avg > rule.getThreshold()) {
shouldAlert = true;
alertMessage = String.format("Average value %.2f exceeds threshold %.2f",
avg, rule.getThreshold());
}
break;
case MAX_GREATER_THAN:
if (max > rule.getThreshold()) {
shouldAlert = true;
alertMessage = String.format("Max value %.2f exceeds threshold %.2f",
max, rule.getThreshold());
}
break;
case SPIKES:
if (detectSpikes(points, rule.getThreshold())) {
shouldAlert = true;
alertMessage = "Spike detected in metric";
}
break;
}
if (shouldAlert) {
Alert alert = new Alert();
alert.setMetric(metric.getName());
alert.setMessage(alertMessage);
alert.setTimestamp(now);
alert.setSeverity(rule.getSeverity());
sendAlert(alert);
}
}
/**
* 检测数据尖峰
*/
private boolean detectSpikes(List<TimeSeriesService.TimeSeriesPoint> points,
double threshold) {
if (points.size() < 10) return false;
// 计算滑动平均值
List<Double> values = points.stream()
.map(TimeSeriesService.TimeSeriesPoint::getValue)
.collect(Collectors.toList());
double mean = values.stream().mapToDouble(v -> v).average().orElse(0);
double stdDev = Math.sqrt(values.stream()
.mapToDouble(v -> Math.pow(v - mean, 2))
.average().orElse(0));
// 检测是否有超过 mean + threshold * stdDev 的值
double maxValue = values.stream().mapToDouble(v -> v).max().orElse(0);
return maxValue > mean + threshold * stdDev;
}
/**
* 获取系统健康报告
*/
public HealthReport getHealthReport(String metricName, long hours) {
HealthReport report = new HealthReport();
report.setMetricName(metricName);
report.setTimeRange(hours + " hours");
long endTime = System.currentTimeMillis();
long startTime = endTime - hours * 3600000L;
// 获取时间范围内的所有数据点
List<TimeSeriesService.TimeSeriesPoint> points =
timeSeriesService.range("ts:metric:" + metricName,
startTime, endTime, null, 0);
if (points.isEmpty()) {
report.setStatus("No data");
return report;
}
// 计算统计信息
DoubleSummaryStatistics stats = points.stream()
.mapToDouble(TimeSeriesService.TimeSeriesPoint::getValue)
.summaryStatistics();
report.setMin(stats.getMin());
report.setMax(stats.getMax());
report.setAvg(stats.getAverage());
report.setCount(stats.getCount());
// 计算百分位数
double[] values = points.stream()
.mapToDouble(TimeSeriesService.TimeSeriesPoint::getValue)
.toArray();
String digestKey = "tdigest:temp:" + System.currentTimeMillis();
try {
for (double v : values) {
tDigestService.add(digestKey, v);
}
List<Double> percentiles = tDigestService.quantile(digestKey, 0.5, 0.75, 0.9, 0.95, 0.99);
report.setMedian(percentiles.get(0));
report.setP75(percentiles.get(1));
report.setP90(percentiles.get(2));
report.setP95(percentiles.get(3));
report.setP99(percentiles.get(4));
} finally {
redisTemplate.delete(digestKey);
}
// 获取高频错误(如果有)
if (metricName.contains("error")) {
List<String> topErrors = topKService.getTopK("topk:frequent_errors");
report.setTopErrors(topErrors);
}
// 评估健康状况
report.setStatus(evaluateHealth(report));
return report;
}
private String evaluateHealth(HealthReport report) {
if (report.getP95() > report.getAvg() * 3) {
return "CRITICAL";
} else if (report.getP95() > report.getAvg() * 2) {
return "WARNING";
} else if (report.getP95() > report.getAvg() * 1.5) {
return "DEGRADED";
} else {
return "HEALTHY";
}
}
@Data
public static class Metric {
private String name;
private double value;
private MetricType type;
private String errorCode;
private Map<String, String> tags;
}
public enum MetricType {
COUNTER, GAUGE, ERROR, LATENCY
}
@Data
public static class AlertRule {
private String metricName;
private Condition condition;
private double threshold;
private long windowMs;
private String severity;
}
public enum Condition {
AVG_GREATER_THAN, MAX_GREATER_THAN, SPIKES
}
@Data
public static class HealthReport {
private String metricName;
private String timeRange;
private String status;
private double min;
private double max;
private double avg;
private long count;
private double median;
private double p75;
private double p90;
private double p95;
private double p99;
private List<String> topErrors;
}
}
总结
本章详细介绍了 Redis Stack 各个模块的语法知识点和使用方法:
主要模块特点:
- RedisJSON: 提供完整的 JSON 文档操作能力
- RediSearch: 强大的全文搜索引擎,支持中文检索
- RedisBloom: 提供多种概率性数据结构
- RedisTimeSeries: 专门优化的时序数据存储
- RedisGraph: 图数据库功能,支持复杂关系查询
- Redis-Cell: 精确的令牌桶限流算法
- RedisAI: 在 Redis 中运行机器学习模型
最佳实践建议:
- 选择合适的数据结构:根据业务场景选择合适的 Redis Stack 模块
- 性能优化:合理设置索引、聚合和查询参数
- 监控告警:利用 RedisTimeSeries 进行实时监控
- 组合使用:多个模块组合使用可以发挥最大效能
通过这些模块的组合使用,可以构建出高性能、可扩展的企业级应用。