|
@@ -1,107 +1,195 @@
|
|
|
package com.yaoyicloud.config;
|
|
|
|
|
|
+import com.fasterxml.jackson.core.JsonProcessingException;
|
|
|
+import com.fasterxml.jackson.databind.ObjectMapper;
|
|
|
+import org.apache.commons.collections4.MapUtils;
|
|
|
+import org.springframework.data.redis.core.HashOperations;
|
|
|
+import org.springframework.data.redis.core.RedisTemplate;
|
|
|
import org.springframework.stereotype.Component;
|
|
|
|
|
|
import java.util.Collections;
|
|
|
import java.util.HashMap;
|
|
|
+import java.util.HashSet;
|
|
|
import java.util.Map;
|
|
|
-import java.util.concurrent.ConcurrentHashMap;
|
|
|
-import java.util.concurrent.ConcurrentMap;
|
|
|
+import java.util.Set;
|
|
|
|
|
|
@Component
|
|
|
public class CommonDataCache {
|
|
|
|
|
|
- // relationId -> 通用数据映射
|
|
|
- private final ConcurrentMap<String, ConcurrentMap<String, Object>> dataCache
|
|
|
- = new ConcurrentHashMap<>();
|
|
|
+ // Redis键前缀(区分其他缓存)
|
|
|
+ private static final String KEY_PREFIX = "data:";
|
|
|
+ private final ObjectMapper objectMapper = new ObjectMapper();
|
|
|
+ // Redis Hash操作工具
|
|
|
+ private final HashOperations<String, String, String> hashOps;
|
|
|
+
|
|
|
+ // 构造函数注入RedisTemplate
|
|
|
+ public CommonDataCache(RedisTemplate<String, String> redisTemplate) {
|
|
|
+ this.hashOps = redisTemplate.opsForHash();
|
|
|
+ }
|
|
|
|
|
|
/**
|
|
|
- *
|
|
|
+ * 生成Redis Hash键(格式:data:{relationId})
|
|
|
*/
|
|
|
- public void addData(String sessionId, Map<String, Object> data) {
|
|
|
- if (data != null) {
|
|
|
- // 使用putIfAbsent避免重复创建空Map
|
|
|
- dataCache.putIfAbsent(sessionId, new ConcurrentHashMap<>());
|
|
|
- // 直接替换整个数据Map
|
|
|
- dataCache.get(sessionId).putAll(data);
|
|
|
- }
|
|
|
+ private String getRedisKey(String relationId) {
|
|
|
+ return KEY_PREFIX + relationId;
|
|
|
}
|
|
|
|
|
|
/**
|
|
|
- * 添加或更新单个通用数据项
|
|
|
+ * 添加或更新单个键值对
|
|
|
*/
|
|
|
- public void addDataItem(String sessionId, String key, Object value) {
|
|
|
- dataCache.computeIfAbsent(sessionId, k -> new ConcurrentHashMap<>())
|
|
|
- .put(key, value);
|
|
|
+ public void addData(String relationId, String key, Object value) {
|
|
|
+ String redisKey = getRedisKey(relationId);
|
|
|
+ try {
|
|
|
+ // 序列化value并写入Hash
|
|
|
+ hashOps.put(redisKey, key, objectMapper.writeValueAsString(value));
|
|
|
+ } catch (JsonProcessingException e) {
|
|
|
+ throw new RuntimeException("序列化数据失败: " + key, e);
|
|
|
+ }
|
|
|
}
|
|
|
|
|
|
/**
|
|
|
- * 获取通用数据
|
|
|
+ * 批量添加
|
|
|
+ */
|
|
|
+ public void addDataMap(String relationId, Map<String, Object> dataMap) {
|
|
|
+ if (MapUtils.isEmpty(dataMap)) {
|
|
|
+ return;
|
|
|
+ }
|
|
|
+ String redisKey = getRedisKey(relationId);
|
|
|
+ // 对 map 中的每个 value 进行 JSON 序列化
|
|
|
+ Map<String, String> serializedMap = new HashMap<>();
|
|
|
+ for (Map.Entry<String, Object> entry : dataMap.entrySet()) {
|
|
|
+ try {
|
|
|
+ serializedMap.put(entry.getKey(), objectMapper.writeValueAsString(entry.getValue()));
|
|
|
+ } catch (JsonProcessingException e) {
|
|
|
+ throw new RuntimeException("序列化数据失败: " + entry.getKey(), e);
|
|
|
+ }
|
|
|
+ }
|
|
|
+ // 存入序列化后的 map
|
|
|
+ hashOps.putAll(redisKey, serializedMap);
|
|
|
+ }
|
|
|
+ /**
|
|
|
+ * 获取指定relationId的所有键值对
|
|
|
*/
|
|
|
- public Map<String, Object> getData(String sessionId) {
|
|
|
- ConcurrentMap<String, Object> sessionData = dataCache.get(sessionId);
|
|
|
- return sessionData != null ? Collections.unmodifiableMap(sessionData) : Collections.emptyMap();
|
|
|
+ public Map<String, Object> getData(String relationId) {
|
|
|
+ String redisKey = getRedisKey(relationId);
|
|
|
+ // 获取Hash中所有field和value
|
|
|
+ Map<String, String> hashEntries = hashOps.entries(redisKey);
|
|
|
+ if (hashEntries.isEmpty()) {
|
|
|
+ return Collections.emptyMap();
|
|
|
+ }
|
|
|
+ // 反序列化为Object
|
|
|
+ Map<String, Object> result = new HashMap<>();
|
|
|
+ for (Map.Entry<String, String> entry : hashEntries.entrySet()) {
|
|
|
+ try {
|
|
|
+ result.put(entry.getKey(), objectMapper.readValue(entry.getValue(), Object.class));
|
|
|
+ } catch (Exception e) {
|
|
|
+ throw new RuntimeException("反序列化数据失败: " + entry.getKey(), e);
|
|
|
+ }
|
|
|
+ }
|
|
|
+ return Collections.unmodifiableMap(result); // 返回不可修改的视图
|
|
|
}
|
|
|
|
|
|
/**
|
|
|
- * 获取通用数据项
|
|
|
+ * 获取单个键的值(原始Object类型)
|
|
|
*/
|
|
|
- public Object getDataItem(String sessionId, String key) {
|
|
|
- ConcurrentMap<String, Object> sessionData = dataCache.get(sessionId);
|
|
|
- return sessionData != null ? sessionData.get(key) : null;
|
|
|
+ public Object getDataItem(String relationId, String key) {
|
|
|
+ String redisKey = getRedisKey(relationId);
|
|
|
+ String valueStr = hashOps.get(redisKey, key);
|
|
|
+ if (valueStr == null) {
|
|
|
+ return null;
|
|
|
+ }
|
|
|
+ try {
|
|
|
+ return objectMapper.readValue(valueStr, Object.class);
|
|
|
+ } catch (Exception e) {
|
|
|
+ throw new RuntimeException("反序列化数据失败: " + key, e);
|
|
|
+ }
|
|
|
}
|
|
|
|
|
|
/**
|
|
|
- * 获取指定类型的通用数据项
|
|
|
+ * 获取指定类型的单个键值
|
|
|
*/
|
|
|
@SuppressWarnings("unchecked")
|
|
|
- public <T> T getDataItem(String sessionId, String key, Class<T> type) {
|
|
|
- Object value = getDataItem(sessionId, key);
|
|
|
- return type.isInstance(value) ? (T) value : null;
|
|
|
+ public <T> T getDataItem(String relationId, String key, Class<T> type) {
|
|
|
+ Object value = getDataItem(relationId, key);
|
|
|
+ if (value == null) {
|
|
|
+ return null;
|
|
|
+ }
|
|
|
+ // 若类型匹配,直接强转;否则反序列化为指定类型
|
|
|
+ if (type.isInstance(value)) {
|
|
|
+ return (T) value;
|
|
|
+ } else {
|
|
|
+ try {
|
|
|
+ return objectMapper.convertValue(value, type);
|
|
|
+ } catch (Exception e) {
|
|
|
+ throw new RuntimeException("类型转换失败: " + key + " to " + type.getName(), e);
|
|
|
+ }
|
|
|
+ }
|
|
|
}
|
|
|
|
|
|
/**
|
|
|
- * 移除指定session的通用数据
|
|
|
+ * 移除指定relationId的所有数据,并返回被移除的数据
|
|
|
*/
|
|
|
- public Map<String, Object> removeSessionData(String sessionId) {
|
|
|
- ConcurrentMap<String, Object> removed = dataCache.remove(sessionId);
|
|
|
- return removed != null ? new HashMap<>(removed) : Collections.emptyMap();
|
|
|
+ public Map<String, Object> removeSessionData(String relationId) {
|
|
|
+ String redisKey = getRedisKey(relationId);
|
|
|
+ // 先获取数据(用于返回),再删除Hash键
|
|
|
+ Map<String, Object> removedData = getData(relationId);
|
|
|
+ hashOps.getOperations().delete(redisKey); // 删除整个Hash
|
|
|
+ return removedData;
|
|
|
}
|
|
|
|
|
|
/**
|
|
|
- * 移除指定session中的某个数据项
|
|
|
+ * 移除指定relationId中的单个键值
|
|
|
*/
|
|
|
- public Object removeDataItem(String sessionId, String key) {
|
|
|
- ConcurrentMap<String, Object> sessionData = dataCache.get(sessionId);
|
|
|
- return sessionData != null ? sessionData.remove(key) : null;
|
|
|
+ public Object removeDataItem(String relationId, String key) {
|
|
|
+ String redisKey = getRedisKey(relationId);
|
|
|
+ // 先获取值(用于返回),再删除field
|
|
|
+ Object value = getDataItem(relationId, key);
|
|
|
+ hashOps.delete(redisKey, key); // 删除Hash中的某个field
|
|
|
+ return value;
|
|
|
}
|
|
|
|
|
|
/**
|
|
|
- * 检查是否存在指定session的通用数据
|
|
|
+ * 检查relationId是否存在
|
|
|
*/
|
|
|
- public boolean hasSessionData(String sessionId) {
|
|
|
- return dataCache.containsKey(sessionId);
|
|
|
+ public boolean hasSessionData(String relationId) {
|
|
|
+ String redisKey = getRedisKey(relationId);
|
|
|
+ // 判断Hash键是否存在,且至少有一个field
|
|
|
+ return hashOps.size(redisKey) > 0;
|
|
|
}
|
|
|
|
|
|
/**
|
|
|
- * 检查指定session中是否存在某个数据项
|
|
|
+ * 检查指定relationId中是否存在某个键
|
|
|
*/
|
|
|
- public boolean hasDataItem(String sessionId, String key) {
|
|
|
- ConcurrentMap<String, Object> sessionData = dataCache.get(sessionId);
|
|
|
- return sessionData != null && sessionData.containsKey(key);
|
|
|
+ public boolean hasDataItem(String relationId, String key) {
|
|
|
+ String redisKey = getRedisKey(relationId);
|
|
|
+ return hashOps.hasKey(redisKey, key);
|
|
|
}
|
|
|
|
|
|
/**
|
|
|
- * 获取所有sessionId
|
|
|
+ * 获取所有relationId(注意:生产环境慎用KEYS命令)
|
|
|
*/
|
|
|
- public java.util.Set<String> getAllSessionIds() {
|
|
|
- return dataCache.keySet();
|
|
|
+ public Set<String> getAllrelationIds() {
|
|
|
+ String pattern = KEY_PREFIX + "*";
|
|
|
+ Set<String> redisKeys = hashOps.getOperations().keys(pattern);
|
|
|
+ if (redisKeys == null || redisKeys.isEmpty()) {
|
|
|
+ return Collections.emptySet();
|
|
|
+ }
|
|
|
+ // 从Redis键中提取relationId(去掉前缀"cache:")
|
|
|
+ Set<String> relationIds = new HashSet<>();
|
|
|
+ for (String key : redisKeys) {
|
|
|
+ relationIds.add(key.substring(KEY_PREFIX.length()));
|
|
|
+ }
|
|
|
+ return relationIds;
|
|
|
}
|
|
|
|
|
|
/**
|
|
|
* 清空所有缓存数据
|
|
|
*/
|
|
|
public void clear() {
|
|
|
- dataCache.clear();
|
|
|
+ String pattern = KEY_PREFIX + "*";
|
|
|
+ Set<String> redisKeys = hashOps.getOperations().keys(pattern);
|
|
|
+ if (redisKeys != null && !redisKeys.isEmpty()) {
|
|
|
+ hashOps.getOperations().delete(redisKeys);
|
|
|
+ }
|
|
|
}
|
|
|
}
|