RocksDB缓存实现,并更换默认实现为RocksDB

pull/9/head
陈精华 6 years ago committed by kl
parent 7e8de7c754
commit 68aa5db66b

@ -156,6 +156,11 @@
<artifactId>concurrentlinkedhashmap-lru</artifactId>
<version>1.4.2</version>
</dependency>
<dependency>
<groupId>org.rocksdb</groupId>
<artifactId>rocksdbjni</artifactId>
<version>5.17.2</version>
</dependency>
</dependencies>
<build>
<resources>

@ -21,10 +21,11 @@ spring.http.multipart.max-file-size=100MB
#openoffice home路径
#office.home = C:\\Program Files (x86)\\OpenOffice 4
#缓存实现类型,不配默认为JDK实现可配置为redis实现需要配置spring.redisson.address等参数
#缓存实现类型,不配默认为内嵌RocksDB实现可配置为redis(type = redis)实现需要配置spring.redisson.address等参数和 JDK 内置对象实现type = jdk,
#cache.type = redis
#redis连接
#spring.redisson.address = 192.168.1.204:6379
#spring.redisson.password = xxx
#######################################可在运行时动态配置#######################################
#文本类型,默认如下,可自定义添加

@ -3,6 +3,7 @@ package cn.keking.service.cache.impl;
import cn.keking.service.cache.CacheService;
import com.googlecode.concurrentlinkedhashmap.ConcurrentLinkedHashMap;
import com.googlecode.concurrentlinkedhashmap.Weighers;
import org.rocksdb.RocksDB;
import org.springframework.boot.autoconfigure.condition.ConditionalOnExpression;
import org.springframework.stereotype.Service;
@ -17,7 +18,7 @@ import java.util.concurrent.BlockingQueue;
* @description
*/
@Service
@ConditionalOnExpression("'${cache.type:default}'.equals('default')")
@ConditionalOnExpression("'${cache.type:default}'.equals('jdk')")
public class CacheServiceJDKImpl implements CacheService {
private Map<String, String> pdfCache;

@ -0,0 +1,170 @@
package cn.keking.service.cache.impl;
import cn.keking.service.cache.CacheService;
import org.artofsolving.jodconverter.office.OfficeUtils;
import org.rocksdb.RocksDB;
import org.rocksdb.RocksDBException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.boot.autoconfigure.condition.ConditionalOnExpression;
import org.springframework.stereotype.Service;
import java.io.*;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.BlockingQueue;
/**
* @auther: chenjh
* @time: 2019/4/22 11:02
* @description
*/
@ConditionalOnExpression("'${cache.type:default}'.equals('default')")
@Service
public class CacheServiceRocksDBImpl implements CacheService {
static {
RocksDB.loadLibrary();
}
private static final String DB_PATH = OfficeUtils.getHomePath() + File.separator + "cache";
private static final int QUEUE_SIZE = 500000;
private static final Logger LOGGER = LoggerFactory.getLogger(CacheServiceRocksDBImpl.class);
private BlockingQueue blockingQueue = new ArrayBlockingQueue(QUEUE_SIZE);
private RocksDB db;
{
try {
db = RocksDB.open(DB_PATH);
if (db.get(REDIS_FILE_PREVIEW_PDF_KEY.getBytes()) == null) {
Map<String, String> initPDFCache = new HashMap<>();
db.put(REDIS_FILE_PREVIEW_PDF_KEY.getBytes(), toByteArray(initPDFCache));
}
if (db.get(REDIS_FILE_PREVIEW_IMGS_KEY.getBytes()) == null) {
Map<String, List<String>> initIMGCache = new HashMap<>();
db.put(REDIS_FILE_PREVIEW_IMGS_KEY.getBytes(), toByteArray(initIMGCache));
}
} catch (RocksDBException | IOException e) {
LOGGER.error("Uable to init RocksDB" + e);
}
}
@Override
public void initPDFCachePool(Integer capacity) {
}
@Override
public void initIMGCachePool(Integer capacity) {
}
@Override
public void putPDFCache(String key, String value) {
try {
Map<String, String> pdfCacheItem = new HashMap<>();
pdfCacheItem.put(key, value);
db.put(REDIS_FILE_PREVIEW_PDF_KEY.getBytes(), toByteArray(pdfCacheItem));
} catch (RocksDBException | IOException e) {
LOGGER.error("Put into RocksDB Exception" + e);
}
}
@Override
public void putImgCache(String key, List<String> value) {
try {
Map<String, List<String>> imgCacheItem = new HashMap<>();
imgCacheItem.put(key, value);
db.put(REDIS_FILE_PREVIEW_PDF_KEY.getBytes(), toByteArray(imgCacheItem));
} catch (RocksDBException | IOException e) {
LOGGER.error("Put into RocksDB Exception" + e);
}
}
@Override
public Map<String, String> getPDFCache() {
Map<String, String> result = new HashMap<>();
try{
result = (Map<String, String>) toObject(db.get(REDIS_FILE_PREVIEW_PDF_KEY.getBytes()));
} catch (RocksDBException | IOException | ClassNotFoundException e) {
LOGGER.error("Get from RocksDB Exception" + e);
}
return result;
}
@Override
public String getPDFCache(String key) {
String result = "";
try{
Map<String, String> map = (Map<String, String>) toObject(db.get(REDIS_FILE_PREVIEW_PDF_KEY.getBytes()));
result = map.get(key);
} catch (RocksDBException | IOException | ClassNotFoundException e) {
LOGGER.error("Get from RocksDB Exception" + e);
}
return result;
}
@Override
public Map<String, List<String>> getImgCache() {
Map<String, List<String>> result = new HashMap<>();
try{
result = (Map<String, List<String>>) toObject(db.get(REDIS_FILE_PREVIEW_IMGS_KEY.getBytes()));
} catch (RocksDBException | IOException | ClassNotFoundException e) {
LOGGER.error("Get from RocksDB Exception" + e);
}
return result;
}
@Override
public List<String> getImgCache(String key) {
List<String> result = new ArrayList<>();
Map<String, List<String>> map;
try{
map = (Map<String, List<String>>) toObject(db.get(REDIS_FILE_PREVIEW_IMGS_KEY.getBytes()));
result = map.get(key);
} catch (RocksDBException | IOException | ClassNotFoundException e) {
LOGGER.error("Get from RocksDB Exception" + e);
}
return result;
}
@Override
public void addQueueTask(String url) {
blockingQueue.add(url);
}
@Override
public String takeQueueTask() throws InterruptedException {
return String.valueOf(blockingQueue.take());
}
private byte[] toByteArray (Object obj) throws IOException {
byte[] bytes = null;
ByteArrayOutputStream bos = new ByteArrayOutputStream();
ObjectOutputStream oos = new ObjectOutputStream(bos);
oos.writeObject(obj);
oos.flush();
bytes = bos.toByteArray ();
oos.close();
bos.close();
return bytes;
}
private Object toObject (byte[] bytes) throws IOException, ClassNotFoundException {
Object obj = null;
ByteArrayInputStream bis = new ByteArrayInputStream (bytes);
ObjectInputStream ois = new ObjectInputStream (bis);
obj = ois.readObject();
ois.close();
bis.close();
return obj;
}
}
Loading…
Cancel
Save