提交 c78714d4 编写于 作者: ibizdev's avatar ibizdev

ibizdev提交

上级 403b399e
......@@ -38,11 +38,6 @@
git clone -b master $para2 ibzwf/
export NODE_OPTIONS=--max-old-space-size=4096
cd ibzwf/
mvn clean package -Pweb
cd ibzwf-app/ibzwf-app-web
mvn -Pweb docker:build
mvn -Pweb docker:push
docker -H $para1 stack deploy --compose-file=src/main/docker/ibzwf-app-web.yaml dev --with-registry-auth
</command>
</hudson.tasks.Shell>
</builders>
......
......@@ -9,6 +9,6 @@ CMD echo "The application will start in ${IBZ_SLEEP}s..." && \
sleep ${IBZ_SLEEP} && \
java ${JAVA_OPTS} -Djava.security.egd=file:/dev/./urandom -jar /ibzwf-app-web.jar
EXPOSE 30003
EXPOSE 8080
ADD ibzwf-app-web.jar /ibzwf-app-web.jar
......@@ -3,11 +3,9 @@ services:
ibzwf-app-web:
image: registry.cn-shanghai.aliyuncs.com/ibizsys/ibzwf-app-web:latest
ports:
- "30003:30003"
- "8080:8080"
networks:
- agent_network
environment:
SPRING_CLOUD_NACOS_DISCOVERY_IP: 172.16.180.237
deploy:
mode: replicated
replicas: 1
......
......@@ -21,7 +21,7 @@ import java.util.List;
})
@EnableDiscoveryClient
@Configuration
@EnableFeignClients
@EnableFeignClients(basePackages = {"cn.ibizlab" })
@EnableZuulProxy
@ComponentScan(basePackages = {"cn.ibizlab"})
@MapperScan("cn.ibizlab.*.mapper")
......
server:
port: 30003
\ No newline at end of file
port: 8080
\ No newline at end of file
server:
port: 30003
port: 8080
#zuul网关路由设置
zuul:
......
package cn.ibizlab.util.cache;
import com.alibaba.fastjson.parser.ParserConfig;
import com.fasterxml.jackson.annotation.JsonAutoDetect;
import com.fasterxml.jackson.annotation.PropertyAccessor;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.github.benmanes.caffeine.cache.CaffeineSpec;
import cn.ibizlab.util.enums.RedisChannelTopic;
import cn.ibizlab.util.cache.layering.LayeringCacheManager;
import cn.ibizlab.util.cache.redis.KryoRedisSerializer;
import cn.ibizlab.util.cache.redis.StringRedisSerializer;
import cn.ibizlab.util.cache.setting.FirstCacheSetting;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.cache.CacheProperties;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.cache.CacheManager;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Primary;
import org.springframework.data.redis.cache.RedisCacheConfiguration;
import org.springframework.data.redis.cache.RedisCacheManager;
import org.springframework.data.redis.cache.RedisCacheWriter;
import org.springframework.data.redis.connection.RedisConnectionFactory;
import org.springframework.cache.annotation.EnableCaching;
import org.springframework.context.annotation.Profile;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.data.redis.listener.RedisMessageListenerContainer;
import org.springframework.data.redis.listener.adapter.MessageListenerAdapter;
import org.springframework.data.redis.serializer.Jackson2JsonRedisSerializer;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.util.StringUtils;
import java.util.HashMap;
import java.util.Map;
/**
* 缓存配置类
* 1级缓存为caffeine
* 2级缓存为redis
*/
@EnableCaching
@Configuration
@EnableConfigurationProperties(CacheProperties.class)
@ConditionalOnProperty("ibiz.enableCache")
public class CacheConfig {
@Bean
public RedisCacheManager redisCacheManager(RedisConnectionFactory connectionFactory) {
return RedisCacheManager.create(connectionFactory);
}
@Bean
public RedisCacheWriter redisCacheWriter(RedisConnectionFactory connectionFactory){
RedisCacheWriter redisCacheWriter = RedisCacheWriter.nonLockingRedisCacheWriter(connectionFactory);
return redisCacheWriter;
}
@Autowired
private RedisCacheWriter redisCacheWriter;
@Autowired
private RedisCacheConfiguration configuration;
/**
* 重写Redis序列化方式,使用Json方式:
* 当我们的数据存储到Redis的时候,我们的键(key)和值(value)都是通过Spring提供的Serializer序列化到数据库的。RedisTemplate默认使用的是JdkSerializationRedisSerializer,StringRedisTemplate默认使用的是StringRedisSerializer。
* Spring Data JPA为我们提供了下面的Serializer:
* GenericToStringSerializer、Jackson2JsonRedisSerializer、JacksonJsonRedisSerializer、JdkSerializationRedisSerializer、OxmSerializer、StringRedisSerializer。
* 在此我们将自己配置RedisTemplate并定义Serializer。
*
* @param redisConnectionFactory
* @return
*/
@Bean
public RedisTemplate<String, Object> redisTemplate(RedisConnectionFactory redisConnectionFactory) {
RedisTemplate<String, Object> redisTemplate = new RedisTemplate<>();
redisTemplate.setConnectionFactory(redisConnectionFactory);
Jackson2JsonRedisSerializer<Object> jackson2JsonRedisSerializer = new Jackson2JsonRedisSerializer<Object>(Object.class);
ObjectMapper om = new ObjectMapper();
om.setVisibility(PropertyAccessor.ALL, JsonAutoDetect.Visibility.ANY);
om.enableDefaultTyping(ObjectMapper.DefaultTyping.NON_FINAL);
jackson2JsonRedisSerializer.setObjectMapper(om);
ParserConfig.getGlobalInstance().setAutoTypeSupport(true);
KryoRedisSerializer<Object> kryoRedisSerializer = new KryoRedisSerializer<>(Object.class);
redisTemplate.setValueSerializer(kryoRedisSerializer);// 设置值(value)的序列化采用KryoRedisSerializer。
redisTemplate.setHashValueSerializer(kryoRedisSerializer);
redisTemplate.setKeySerializer(new StringRedisSerializer());// 设置键(key)的序列化采用StringRedisSerializer。
redisTemplate.setHashKeySerializer(new StringRedisSerializer());
redisTemplate.afterPropertiesSet();
return redisTemplate;
}
@Bean
@Primary
public CacheManager cacheManager(RedisTemplate<String, Object> redisTemplate) {
LayeringCacheManager layeringCacheManager = new LayeringCacheManager(redisTemplate);
setFirstCacheConfig(layeringCacheManager);//Caffeine缓存设置
layeringCacheManager.setAllowNullValues(true); //允许存null,防止缓存击穿
layeringCacheManager.setRedisCacheWriter(redisCacheWriter);
layeringCacheManager.setRedisConfiguration(configuration);
return layeringCacheManager;
}
@Autowired
private CacheProperties cacheProperties;
private void setFirstCacheConfig(LayeringCacheManager layeringCacheManager) {
String specification = cacheProperties.getCaffeine().getSpec();
if (StringUtils.hasText(specification)) {
layeringCacheManager.setCaffeineSpec(CaffeineSpec.parse(specification));
}
Map<String, FirstCacheSetting> firstCacheSettings = new HashMap<>();
layeringCacheManager.setFirstCacheSettings(firstCacheSettings);
}
}
\ No newline at end of file
package cn.ibizlab.util.cache.layering;
import cn.ibizlab.util.enums.RedisChannelTopic;
import cn.ibizlab.util.cache.listener.RedisPublisher;
import cn.ibizlab.util.cache.redis.CustomizedRedisCache;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.cache.caffeine.CaffeineCache;
import org.springframework.cache.support.AbstractValueAdaptingCache;
import org.springframework.cache.support.NullValue;
import org.springframework.data.redis.cache.RedisCache;
import org.springframework.data.redis.cache.RedisCacheConfiguration;
import org.springframework.data.redis.cache.RedisCacheWriter;
import org.springframework.data.redis.core.RedisOperations;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.Callable;
/**
* 缓存分层类
* 1级缓存为caffeine
* 2级缓存为redis
*/
public class LayeringCache extends AbstractValueAdaptingCache {
Logger logger = LoggerFactory.getLogger(LayeringCache.class);
/**
* 缓存的名称
*/
private final String name;
/**
* 是否使用一级缓存
*/
private boolean usedFirstCache = true;
/**
* redi缓存
*/
private RedisCache redisCache;
/**
* Caffeine缓存
*/
private final CaffeineCache caffeineCache;
RedisOperations<? extends Object, ? extends Object> redisOperations;
/**
* @param name 缓存名称
* @param allowNullValues 是否允许存NULL,默认是false
* @param usedFirstCache 是否使用一级缓存,默认是true
* @param caffeineCache Caffeine缓存
*/
public LayeringCache(String name , boolean allowNullValues,RedisOperations redisOperations, boolean usedFirstCache, com.github.benmanes.caffeine.cache.Cache<Object, Object> caffeineCache,
RedisCacheWriter redisCacheWriter, RedisCacheConfiguration configuration) {
super(allowNullValues);
this.name = name;
this.usedFirstCache = usedFirstCache;
this.redisCache = new CustomizedRedisCache(name, redisCacheWriter, configuration);
this.caffeineCache = new CaffeineCache(name, caffeineCache, allowNullValues);
this.redisOperations=redisOperations;
}
public CaffeineCache getFirstCache() {
return this.caffeineCache;
}
@Override
public String getName() {
return this.name;
}
@Override
public Object getNativeCache() {
return this;
}
@Override
public ValueWrapper get(Object key) {
ValueWrapper wrapper = null;
if (usedFirstCache) {
// 查询一级缓存
wrapper = caffeineCache.get(key);
logger.debug("查询一级缓存 key:{},value:{}", key,wrapper);
}
if (wrapper == null) {
// 查询二级缓存
wrapper = redisCache.get(key);
caffeineCache.put(key, wrapper == null ? null : wrapper.get());
logger.debug("查询二级缓存,并将数据放到一级缓存。 key:{}", key);
}
return wrapper;
}
@Override
public <T> T get(Object key, Class<T> type) {
T value = null;
if (usedFirstCache) {
// 查询一级缓存
value = caffeineCache.get(key, type);
logger.debug("查询一级缓存 key:{}", key);
}
if (value == null) {
// 查询二级缓存
value = redisCache.get(key, type);
caffeineCache.put(key, value);
logger.debug("查询二级缓存,并将数据放到一级缓存。 key:{}", key);
}
return value;
}
@SuppressWarnings("unchecked")
@Override
public <T> T get(Object key, Callable<T> valueLoader) {
T value = null;
if (usedFirstCache) {
// 查询一级缓存,如果一级缓存没有值则调用getForSecondaryCache(k, valueLoader)查询二级缓存
value = (T) caffeineCache.getNativeCache().get(key, k -> getForSecondaryCache(k, valueLoader));
} else {
// 直接查询二级缓存
value = (T) getForSecondaryCache(key, valueLoader);
}
if (value instanceof NullValue) {
return null;
}
return value;
}
@Override
public void put(Object key, Object value) {
if (usedFirstCache) {
caffeineCache.put(key, value);
}
redisCache.put(key, value);
}
@Override
public ValueWrapper putIfAbsent(Object key, Object value) {
if (usedFirstCache) {
caffeineCache.putIfAbsent(key, value);
}
return redisCache.putIfAbsent(key, value);
}
@Override
public void evict(Object key) {
redisCache.evict(key); //清除redis中的二级缓存
if (usedFirstCache) {
caffeineCache.evict(key);//清除本机一级缓存
Map<String, Object> message = new HashMap<>();
message.put("cacheName", name);
message.put("key", key);
RedisPublisher redisPublisher = new RedisPublisher(redisOperations, RedisChannelTopic.REDIS_CACHE_DELETE_TOPIC.getChannelTopic());// 创建redis发布者
redisPublisher.publisher(message);//发布消息,清除其它集群机器中的一级缓存
}
logger.debug(String.format("清除二级缓存数据[%s]", key));
}
@Override
public void clear() {
redisCache.clear(); //清除redis中的二级缓存
if (usedFirstCache) {
caffeineCache.clear();//清除本机一级缓存
Map<String, Object> message = new HashMap<>();
message.put("cacheName", name);
RedisPublisher redisPublisher = new RedisPublisher(redisOperations, RedisChannelTopic.REDIS_CACHE_CLEAR_TOPIC.getChannelTopic());// 创建redis发布者
redisPublisher.publisher(message);//发布消息,清除其它集群机器中的一级缓存
}
}
@Override
protected Object lookup(Object key) {
Object value = null;
if (usedFirstCache) {
value = caffeineCache.get(key);
logger.debug("查询一级缓存 key:{}", key);
}
if (value == null) {
value = redisCache.get(key);
logger.debug("查询二级缓存 key:{}", key);
}
return value;
}
/**
* 查询二级缓存
* @param key
* @param valueLoader
* @return
*/
private <T> Object getForSecondaryCache(Object key, Callable<T> valueLoader) {
T value = redisCache.get(key, valueLoader);
logger.debug("查询二级缓存 key:{}", key);
return toStoreValue(value);
}
}
package cn.ibizlab.util.cache.layering;
import com.github.benmanes.caffeine.cache.Caffeine;
import com.github.benmanes.caffeine.cache.CaffeineSpec;
import cn.ibizlab.util.cache.setting.FirstCacheSetting;
import cn.ibizlab.util.cache.setting.SecondaryCacheSetting;
import lombok.Data;
import org.springframework.cache.Cache;
import org.springframework.cache.CacheManager;
import org.springframework.data.redis.cache.RedisCacheConfiguration;
import org.springframework.data.redis.cache.RedisCacheWriter;
import org.springframework.data.redis.core.RedisOperations;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.util.CollectionUtils;
import org.springframework.util.ObjectUtils;
import java.util.Collection;
import java.util.Collections;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.TimeUnit;
import org.springframework.util.StringUtils;
/**
* 缓存分层类
* 1级缓存为caffeine
* 2级缓存为redis
*/
@Data
public class LayeringCacheManager implements CacheManager {
static final int DEFAULT_EXPIRE_AFTER_WRITE = 2;
static final int DEFAULT_INITIAL_CAPACITY = 5;
static final int DEFAULT_MAXIMUM_SIZE = 1_000;
private final ConcurrentMap<String, Cache> cacheMap = new ConcurrentHashMap<String, Cache>(16);
/**
* 一级缓存配置
*/
private Map<String, FirstCacheSetting> firstCacheSettings = null;
/**
* 二级缓存配置
*/
private Map<String, SecondaryCacheSetting> secondaryCacheSettings = null;
public RedisCacheWriter redisCacheWriter;
public RedisCacheConfiguration redisConfiguration;
public RedisOperations redisOperations;
/**
* 是否允许动态创建缓存,默认是true
*/
private boolean dynamic = true;
/**
* 缓存值是否允许为NULL
*/
private boolean allowNullValues = false;
/**
* expireAfterWrite:2 小时
* initialCapacity:5
* maximumSize: 1_000
*/
private Caffeine<Object, Object> cacheBuilder = Caffeine.newBuilder()
.expireAfterAccess(DEFAULT_EXPIRE_AFTER_WRITE, TimeUnit.HOURS)
.initialCapacity(DEFAULT_INITIAL_CAPACITY)
.maximumSize(DEFAULT_MAXIMUM_SIZE);
public LayeringCacheManager(RedisTemplate<String, Object> redisTemplate) {
this.redisOperations=redisTemplate;
}
/**
* 获取缓存对象
* @param name
* @return
*/
@Override
public Cache getCache(String name) {
Cache cache = this.cacheMap.get(name);
if (cache == null && this.dynamic) {
synchronized (this.cacheMap) {
cache = this.cacheMap.get(name);
if (cache == null) {
cache = createCache(name);
this.cacheMap.put(name, cache);
}
}
}
return cache;
}
@Override
public Collection<String> getCacheNames() {
return Collections.unmodifiableSet(this.cacheMap.keySet());
}
protected Cache createCache(String name) {
return new LayeringCache(name,isAllowNullValues(),this.redisOperations, getUsedFirstCache(name),createNativeCaffeineCache(name),redisCacheWriter,redisConfiguration);
}
/**
* Create a native Caffeine Cache instance for the specified cache name.
*
* @param name the name of the cache
* @return the native Caffeine Cache instance
*/
protected com.github.benmanes.caffeine.cache.Cache<Object, Object> createNativeCaffeineCache(String name) {
return getCaffeine(name).build();
}
/**
* 使用该CacheManager的当前状态重新创建已知的缓存。
*/
private void refreshKnownCaches() {
for (Map.Entry<String, Cache> entry : this.cacheMap.entrySet()) {
entry.setValue(createCache(entry.getKey()));
}
}
/**
* 设置是否允许Cache的值为null
*
* @param allowNullValues
*/
public void setAllowNullValues(boolean allowNullValues) {
if (this.allowNullValues != allowNullValues) {
this.allowNullValues = allowNullValues;
refreshKnownCaches();
}
}
/**
* 获取是否允许Cache的值为null
*
* @return
*/
public boolean isAllowNullValues() {
return this.allowNullValues;
}
/**
* 根据缓存名称设置一级缓存的有效时间和刷新时间,单位秒
*
* @param firstCacheSettings
*/
public void setFirstCacheSettings(Map<String, FirstCacheSetting> firstCacheSettings) {
this.firstCacheSettings = (!CollectionUtils.isEmpty(firstCacheSettings) ? new ConcurrentHashMap<>(firstCacheSettings) : null);
}
/**
* 获取是否使用一级缓存,默认是true
*/
public boolean getUsedFirstCache(String name) {
SecondaryCacheSetting secondaryCacheSetting = null;
if (!CollectionUtils.isEmpty(secondaryCacheSettings)) {
secondaryCacheSetting = secondaryCacheSettings.get(name);
}
return secondaryCacheSetting != null ? secondaryCacheSetting.getUsedFirstCache() : true;
}
public void setCaffeineSpec(CaffeineSpec caffeineSpec) {
Caffeine<Object, Object> cacheBuilder = Caffeine.from(caffeineSpec);
if (!ObjectUtils.nullSafeEquals(this.cacheBuilder, cacheBuilder)) {
this.cacheBuilder = cacheBuilder;
refreshKnownCaches();
}
}
private Caffeine<Object, Object> getCaffeine(String name) {
if (!CollectionUtils.isEmpty(firstCacheSettings)) {
FirstCacheSetting firstCacheSetting = firstCacheSettings.get(name);
if (firstCacheSetting != null && StringUtils.isEmpty(firstCacheSetting.getCacheSpecification())) {
// 根据缓存名称获取一级缓存配置
return Caffeine.from(CaffeineSpec.parse(firstCacheSetting.getCacheSpecification()));
}
}
return this.cacheBuilder;
}
}
package cn.ibizlab.util.cache.listener;
import cn.ibizlab.util.enums.RedisChannelTopic;
import cn.ibizlab.util.cache.layering.LayeringCache;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.cache.Cache;
import org.springframework.cache.CacheManager;
import org.springframework.context.annotation.Profile;
import org.springframework.data.redis.connection.Message;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.data.redis.listener.adapter.MessageListenerAdapter;
import org.springframework.data.redis.serializer.RedisSerializer;
import org.springframework.stereotype.Component;
import org.springframework.util.StringUtils;
import java.util.Map;
/**
* redis消息的订阅者
*/
@Profile("prod")
@Component
public class RedisMessageListener extends MessageListenerAdapter {
private static final Logger logger = LoggerFactory.getLogger(RedisPublisher.class);
@Autowired
CacheManager cacheManager;
@Autowired
RedisTemplate redisTemplate;
@Override
public void onMessage(Message message, byte[] pattern) {
super.onMessage(message, pattern);
RedisChannelTopic channelTopic = RedisChannelTopic.getChannelTopicEnum(new String(message.getChannel()));
Map<String, Object> map = null;
RedisSerializer serializer=redisTemplate.getValueSerializer();
Object result=serializer.deserialize(message.getBody());
if(result instanceof Map){
map= (Map<String, Object>) result;
}
if(StringUtils.isEmpty(map)|| (!map.containsKey("cacheName"))|| (!map.containsKey("key"))){
logger.info("解析缓存数据失败,无法获取指定值!");
return ;
}
logger.debug("redis消息订阅者接收到频道【{}】发布的消息。消息内容:{}", channelTopic.getChannelTopicStr(), result.toString());
String cacheName = (String) map.get("cacheName");
Object key = map.get("key");
Cache cache = cacheManager.getCache(cacheName);// 根据缓存名称获取多级缓存
if (cache != null && cache instanceof LayeringCache) { // 判断缓存是否是多级缓存
switch (channelTopic) {
case REDIS_CACHE_DELETE_TOPIC: // 获取一级缓存,并删除一级缓存数据
((LayeringCache) cache).getFirstCache().evict(key);
logger.debug("删除一级缓存{}数据,key:{},", cacheName, key.toString());
break;
case REDIS_CACHE_CLEAR_TOPIC:// 获取一级缓存,并删除一级缓存数据
((LayeringCache) cache).getFirstCache().clear();
logger.debug("清除一级缓存{}数据", cacheName);
break;
default:
logger.debug("接收到没有定义的订阅消息频道数据");
break;
}
}
}
}
package cn.ibizlab.util.cache.listener;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.data.redis.core.RedisOperations;
import org.springframework.data.redis.listener.ChannelTopic;
/**
* redis消息的发布者
*/
public class RedisPublisher {
private static final Logger logger = LoggerFactory.getLogger(RedisPublisher.class);
RedisOperations<? extends Object, ? extends Object> redisOperations;
/**
* 频道名称
*/
ChannelTopic channelTopic;
/**
* @param redisOperations Redis客户端
* @param channelTopic 频道名称
*/
public RedisPublisher(RedisOperations<? extends Object, ? extends Object> redisOperations, ChannelTopic channelTopic) {
this.channelTopic = channelTopic;
this.redisOperations = redisOperations;
}
/**
* 发布消息到频道(Channel)
*
* @param message 消息内容
*/
public void publisher(Object message) {
redisOperations.convertAndSend(channelTopic.toString(), message);
logger.debug("redis消息发布者向频道【{}】发布了【{}】消息", channelTopic.toString(), message.toString());
}
}
package cn.ibizlab.util.cache.redis;
import org.springframework.data.redis.cache.RedisCache;
import org.springframework.data.redis.cache.RedisCacheConfiguration;
import org.springframework.data.redis.cache.RedisCacheWriter;
/**
* 自定义的redis缓存
*/
public class CustomizedRedisCache extends RedisCache {
public CustomizedRedisCache(String name, RedisCacheWriter redisCacheWriter,RedisCacheConfiguration configuration) {
super(name, redisCacheWriter, configuration);
}
}
package cn.ibizlab.util.cache.redis;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.serializer.SerializerFeature;
import org.springframework.data.redis.serializer.RedisSerializer;
import org.springframework.data.redis.serializer.SerializationException;
import java.nio.charset.Charset;
/**
* Value 序列化
*
* @author /
* @param <T>
*/
public class FastJsonRedisSerializer<T> implements RedisSerializer<T>
{
public static final Charset DEFAULT_CHARSET = Charset.forName("UTF-8");
private Class<T> clazz;
public FastJsonRedisSerializer(Class<T> clazz) {
super();
this.clazz = clazz;
}
@Override
public byte[] serialize(T t) throws SerializationException
{
if (t == null) {
return new byte[0];
}
return JSON.toJSONString(t, SerializerFeature.WriteClassName).getBytes(DEFAULT_CHARSET);
}
@Override
public T deserialize(byte[] bytes) throws SerializationException
{
if (bytes == null || bytes.length <= 0) {
return null;
}
String str = new String(bytes, DEFAULT_CHARSET);
return (T) JSON.parseObject(str, clazz);
}
}
package cn.ibizlab.util.cache.redis;
import com.esotericsoftware.kryo.Kryo;
import com.esotericsoftware.kryo.io.Input;
import com.esotericsoftware.kryo.io.Output;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.data.redis.serializer.RedisSerializer;
import org.springframework.data.redis.serializer.SerializationException;
import java.io.ByteArrayOutputStream;
/**
* @param <T>
* 序列化/反序列化工具类
*/
public class KryoRedisSerializer<T> implements RedisSerializer<T> {
Logger logger = LoggerFactory.getLogger(KryoRedisSerializer.class);
public static final byte[] EMPTY_BYTE_ARRAY = new byte[0];
private static final ThreadLocal<Kryo> kryos = ThreadLocal.withInitial(Kryo::new);
private Class<T> clazz;
public KryoRedisSerializer(Class<T> clazz) {
super();
this.clazz = clazz;
}
@Override
public byte[] serialize(T t) throws SerializationException {
if (t == null) {
return EMPTY_BYTE_ARRAY;
}
Kryo kryo = kryos.get();
kryo.setReferences(false);
kryo.register(clazz);
try (ByteArrayOutputStream baos = new ByteArrayOutputStream();
Output output = new Output(baos)) {
kryo.writeClassAndObject(output, t);
output.flush();
return baos.toByteArray();
} catch (Exception e) {
logger.error(e.getMessage(), e);
}
return EMPTY_BYTE_ARRAY;
}
@Override
public T deserialize(byte[] bytes) throws SerializationException {
if (bytes == null || bytes.length <= 0) {
return null;
}
Kryo kryo = kryos.get();
kryo.setReferences(false);
kryo.register(clazz);
try (Input input = new Input(bytes)) {
return (T) kryo.readClassAndObject(input);
} catch (Exception e) {
logger.error(e.getMessage(), e);
}
return null;
}
}
\ No newline at end of file
package cn.ibizlab.util.cache.redis;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.parser.ParserConfig;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.autoconfigure.condition.ConditionalOnClass;
import org.springframework.boot.autoconfigure.data.redis.RedisProperties;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.cache.annotation.CachingConfigurerSupport;
import org.springframework.cache.annotation.EnableCaching;
import org.springframework.cache.interceptor.KeyGenerator;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.data.redis.cache.RedisCacheConfiguration;
import org.springframework.data.redis.core.RedisOperations;
import org.springframework.data.redis.serializer.RedisSerializationContext;
import java.time.Duration;
@Slf4j
@Configuration
@ConditionalOnClass(RedisOperations.class)
@EnableConfigurationProperties(RedisProperties.class)
public class RedisConfig extends CachingConfigurerSupport {
@Value("${spring.cache.redis.time-to-live:3600}")
private long timetolive;
/**
* 设置 redis 数据默认过期时间,默认1天
* 设置@cacheable 序列化方式
* @return
*/
@Bean
public RedisCacheConfiguration redisCacheConfiguration(){
FastJsonRedisSerializer<Object> fastJsonRedisSerializer = new FastJsonRedisSerializer<>(Object.class);
RedisCacheConfiguration configuration = RedisCacheConfiguration.defaultCacheConfig();
ParserConfig.getGlobalInstance().setAutoTypeSupport(true);
configuration = configuration.serializeValuesWith(RedisSerializationContext.SerializationPair.fromSerializer(fastJsonRedisSerializer)).entryTtl(Duration.ofSeconds(timetolive));
return configuration;
}
/**
* 自定义缓存key生成策略
* 使用方法 @Cacheable(keyGenerator="keyGenerator")
* @return
*/
@Bean
@Override
public KeyGenerator keyGenerator() {
return (target, method, params) -> {
StringBuilder sb = new StringBuilder();
sb.append(target.getClass().getName());
sb.append(method.getName());
for (Object obj : params) {
// 由于参数可能不同, hashCode肯定不一样, 缓存的key也需要不一样
sb.append(JSON.toJSONString(obj).hashCode());
}
return sb.toString();
};
}
}
\ No newline at end of file
package cn.ibizlab.util.cache.redis;
import com.alibaba.fastjson.JSON;
import org.springframework.data.redis.serializer.RedisSerializer;
import org.springframework.util.Assert;
import java.nio.charset.Charset;
/**
* 必须重写序列化器,否则@Cacheable注解的key会报类型转换错误
*/
public class StringRedisSerializer implements RedisSerializer<Object> {
private final Charset charset;
private final String target = "\"";
private final String replacement = "";
public StringRedisSerializer() {
this(Charset.forName("UTF8"));
}
public StringRedisSerializer(Charset charset) {
Assert.notNull(charset, "Charset must not be null!");
this.charset = charset;
}
@Override
public String deserialize(byte[] bytes) {
return (bytes == null ? null : new String(bytes, charset));
}
@Override
public byte[] serialize(Object object) {
String string = JSON.toJSONString(object);
if (string == null) {
return null;
}
string = string.replace(target, replacement);
return string.getBytes(charset);
}
}
package cn.ibizlab.util.cache.setting;
import com.github.benmanes.caffeine.cache.CaffeineSpec;
public class FirstCacheSetting {
/**
* 一级缓存配置,配置项请点击这里 {@link CaffeineSpec#configure(String, String)}
* @param cacheSpecification
*/
public FirstCacheSetting(String cacheSpecification) {
this.cacheSpecification = cacheSpecification;
}
private String cacheSpecification;
public String getCacheSpecification() {
return cacheSpecification;
}
}
package cn.ibizlab.util.cache.setting;
public class SecondaryCacheSetting {
/**
* @param expirationSecondTime 设置redis缓存的有效时间,单位秒
* @param preloadSecondTime 设置redis缓存的自动刷新时间,单位秒
*/
public SecondaryCacheSetting(long expirationSecondTime, long preloadSecondTime) {
this.expirationSecondTime = expirationSecondTime;
this.preloadSecondTime = preloadSecondTime;
}
/**
* @param usedFirstCache 是否启用一级缓存,默认true
* @param expirationSecondTime 设置redis缓存的有效时间,单位秒
* @param preloadSecondTime 设置redis缓存的自动刷新时间,单位秒
*/
public SecondaryCacheSetting(boolean usedFirstCache, long expirationSecondTime, long preloadSecondTime) {
this.expirationSecondTime = expirationSecondTime;
this.preloadSecondTime = preloadSecondTime;
this.usedFirstCache = usedFirstCache;
}
/**
* @param expirationSecondTime 设置redis缓存的有效时间,单位秒
* @param preloadSecondTime 设置redis缓存的自动刷新时间,单位秒
* @param forceRefresh 是否使用强制刷新(走数据库),默认false
*/
public SecondaryCacheSetting(long expirationSecondTime, long preloadSecondTime, boolean forceRefresh) {
this.expirationSecondTime = expirationSecondTime;
this.preloadSecondTime = preloadSecondTime;
this.forceRefresh = forceRefresh;
}
/**
* @param expirationSecondTime 设置redis缓存的有效时间,单位秒
* @param preloadSecondTime 设置redis缓存的自动刷新时间,单位秒
* @param usedFirstCache 是否启用一级缓存,默认true
* @param forceRefresh 是否使用强制刷新(走数据库),默认false
*/
public SecondaryCacheSetting(long expirationSecondTime, long preloadSecondTime, boolean usedFirstCache, boolean forceRefresh) {
this.expirationSecondTime = expirationSecondTime;
this.preloadSecondTime = preloadSecondTime;
this.usedFirstCache = usedFirstCache;
this.forceRefresh = forceRefresh;
}
/**
* 缓存有效时间
*/
private long expirationSecondTime;
/**
* 缓存主动在失效前强制刷新缓存的时间
* 单位:秒
*/
private long preloadSecondTime = 0;
/**
* 是否使用一级缓存,默认是true
*/
private boolean usedFirstCache = true;
/**
* 是否使用强刷新(走数据库),默认是false
*/
private boolean forceRefresh = false;
public long getPreloadSecondTime() {
return preloadSecondTime;
}
public long getExpirationSecondTime() {
return expirationSecondTime;
}
public boolean getUsedFirstCache() {
return usedFirstCache;
}
public boolean getForceRefresh() {
return forceRefresh;
}
}
package cn.ibizlab.util.enums;
import org.springframework.data.redis.listener.ChannelTopic;
/**
* redis频道
*/
public enum RedisChannelTopic {
REDIS_CACHE_DELETE_TOPIC("redis:cache:delete:topic", "删除redis缓存消息频道"),
REDIS_CACHE_CLEAR_TOPIC("redis:cache:clear:topic", "清空redis缓存消息频道");
String channelTopic;
String label;
RedisChannelTopic(String channelTopic, String label) {
this.channelTopic = channelTopic;
this.label = label;
}
public ChannelTopic getChannelTopic() {
return new ChannelTopic(channelTopic);
}
public static RedisChannelTopic getChannelTopicEnum(String channelTopic) {
for (RedisChannelTopic awardTypeEnum : RedisChannelTopic.values()) {
if (awardTypeEnum.getChannelTopicStr().equals(channelTopic)) {
return awardTypeEnum;
}
}
return null;
}
public String getChannelTopicStr() {
return channelTopic;
}
}
......@@ -32,7 +32,7 @@ public class PermissionSyncJob implements ApplicationRunner {
@Value("${ibiz.enablePermissionValid:false}")
boolean enablePermissionValid; //是否开启权限校验
@Value("${ibiz.systemid:0A91C1B1-3B67-4EDA-9572-5DB491871FEE}")
@Value("${ibiz.systemid:2C40DFCD-0DF5-47BF-91A5-C45F810B0001}")
private String systemId;
@Override
......
Markdown 格式
0% or
您添加了 0 到此讨论。请谨慎行事。
先完成此消息的编辑!
想要评论请 注册