升级:最新版本 redkale 支持

This commit is contained in:
梁显优 2023-05-06 19:02:20 +08:00
parent 8316fce151
commit 4a7b6c33e4
50 changed files with 10012 additions and 7659 deletions

View File

@ -0,0 +1,13 @@
redkale.name=zhub-dev
redkale.port=6560
redkale.server[0].protocol=HTTP
redkale.server[0].host=127.0.0.1
redkale.server[0].port=80
# redkale.server[0].root = root
redkale.server[0].rest.autoload=true
redkale.server[0].rest.path=
redkale.server[0].services[0].autoload=true
# zhub
redkale.cluster.zhub[hub].addr=47.111.150.118:6066
redkale.cluster.zhub[hub].auth=zchd@123456
redkale.cluster.zhub[hub].groupid=venue-zhub

View File

@ -1,26 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<application port="2001" name="xxx">
<zhubs>
<!--<zhub name="zhub" addr="47.111.150.118" port="6066" groupid="group-zhub"/>-->
<zhub name="zhub" addr="127.0.0.1:1216" groupid="group-zhub" auth="zchd@123456"/>
</zhubs>
<resources>
<properties load="config.properties"></properties>
<listener value="com.zdemo.ZhubListener"/>
</resources>
<server protocol="HTTP" port="80">
<request>
<remoteaddr value="request.headers.X-Real-IP"/>
</request>
<rest autoload="true" path="/"/>
<services autoload="true"/>
<servlets path="/"/>
</server>
</application>

View File

@ -1,19 +0,0 @@
# Producer
#bootstrap.servers=47.111.150.118:6062
#bootstrap.servers=121.196.17.55:6062
bootstrap.servers=39.108.56.246:9092
#bootstrap.servers=122.112.180.156:6062
acks=all
retries=0
batch.size=16384
linger.ms=1
buffer.memory=33554432
key.serializer=org.apache.kafka.common.serialization.StringSerializer
value.serializer=org.apache.kafka.common.serialization.StringSerializer
# Consumer
enable.auto.commit=true
auto.commit.interval.ms=1000
group.id=
key.deserializer=org.apache.kafka.common.serialization.StringDeserializer
value.deserializer=org.apache.kafka.common.serialization.StringDeserializer

18
conf/logging.properties Normal file
View File

@ -0,0 +1,18 @@
handlers=java.util.logging.ConsoleHandler
# handlers = java.util.logging.FileHandler
############################################################
.level=FINEST
java.level=INFO
javax.level=INFO
com.sun.level=INFO
sun.level=INFO
jdk.level=INFO
java.util.logging.FileHandler.level=FINER
#10M
java.util.logging.FileHandler.limit=10M
java.util.logging.FileHandler.count=20
java.util.logging.FileHandler.encoding=UTF-8
java.util.logging.FileHandler.pattern=${APP_HOME}/logs-%tY%tm/log-%tY%tm%td.log
java.util.logging.FileHandler.unusual=${APP_HOME}/logs-%tY%tm/log-warnerr-%tY%tm%td.log
java.util.logging.FileHandler.append=true
java.util.logging.ConsoleHandler.level=FINEST

5
conf/source.properties Normal file
View File

@ -0,0 +1,5 @@
############ ClusterSource @Resource(name="hub") ############
# redkale.cluster.zhub[hub].addr = 47.111.150.118:6066
# redkale.cluster.zhub[hub].auth = zchd@123456
# redkale.cluster.zhub[hub].groupid = venue-zhub

View File

@ -1,6 +1,7 @@
package com.zdemo;
import org.redkale.convert.json.JsonConvert;
import org.redkale.util.Resourcable;
import org.redkale.util.TypeToken;
import javax.annotation.Resource;
@ -13,7 +14,7 @@ import java.util.function.Consumer;
* @author Liang
* @data 2020-09-05 23:18
*/
public abstract class AbstractConsumer implements IConsumer {
public abstract class AbstractConsumer extends ZhubAgentProvider implements IConsumer, Resourcable {
protected JsonConvert convert = JsonConvert.root();
@ -24,10 +25,6 @@ public abstract class AbstractConsumer implements IConsumer {
protected abstract String getGroupid();
protected boolean preInit() {
return true;
}
protected final Set<String> getTopics() {
if (!eventMap.isEmpty()) {
return eventMap.keySet();
@ -76,4 +73,10 @@ public abstract class AbstractConsumer implements IConsumer {
}
}
// --------------
@Override
public String resourceName() {
return super.getName();
}
}

View File

@ -0,0 +1,69 @@
package com.zdemo;
import org.redkale.boot.Application;
import org.redkale.boot.NodeServer;
import org.redkale.cluster.CacheClusterAgent;
import org.redkale.cluster.ClusterAgent;
import org.redkale.service.Service;
import org.redkale.util.ResourceEvent;
import java.net.InetSocketAddress;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
public abstract class ZhubAgentProvider extends ClusterAgent {
@Override
public void onResourceChange(ResourceEvent[] events) {
}
@Override
public void register(Application application) {
}
@Override
public void deregister(Application application) {
}
@Override
public CompletableFuture<Map<String, Set<InetSocketAddress>>> queryMqtpAddress(String protocol, String module, String resname) {
return null;
}
@Override
public CompletableFuture<Set<InetSocketAddress>> queryHttpAddress(String protocol, String module, String resname) {
return null;
}
@Override
public CompletableFuture<Set<InetSocketAddress>> querySncpAddress(String protocol, String restype, String resname) {
return null;
}
@Override
protected CompletableFuture<Set<InetSocketAddress>> queryAddress(ClusterEntry entry) {
return null;
}
@Override
protected ClusterEntry register(NodeServer ns, String protocol, Service service) {
deregister(ns, protocol, service);
ClusterEntry clusterEntry = new ClusterEntry(ns, protocol, service);
CacheClusterAgent.AddressEntry entry = new CacheClusterAgent.AddressEntry();
entry.addr = clusterEntry.address;
entry.resname = clusterEntry.resourceName;
entry.nodeid = this.nodeid;
entry.time = System.currentTimeMillis();
//source.hset(clusterEntry.serviceName, clusterEntry.serviceid, CacheClusterAgent.AddressEntry.class, entry);
return clusterEntry;
}
@Override
protected void deregister(NodeServer ns, String protocol, Service service) {
}
}

View File

@ -1,3 +1,4 @@
/*
package com.zdemo;
import org.redkale.boot.Application;
@ -10,11 +11,14 @@ import org.redkale.util.ResourceFactory;
import java.lang.reflect.InvocationTargetException;
import java.util.concurrent.CompletableFuture;
*/
/**
* 服务监听
*
* @author: liangxy.
*/
*//*
@Deprecated
public class ZhubListener implements ApplicationListener {
@Override
@ -47,3 +51,4 @@ public class ZhubListener implements ApplicationListener {
}
}
*/

View File

@ -0,0 +1,21 @@
package com.zdemo;
import com.zdemo.zhub.ZHubClient;
import org.redkale.annotation.Priority;
import org.redkale.cluster.ClusterAgent;
import org.redkale.cluster.ClusterAgentProvider;
import org.redkale.util.AnyValue;
@Priority(1)
public class ZhubProvider implements ClusterAgentProvider {
@Override
public boolean acceptsConf(AnyValue config) {
return new ZHubClient().acceptsConf(config);
}
@Override
public ClusterAgent createInstance() {
return new ZHubClient();
}
}

File diff suppressed because it is too large Load Diff

View File

@ -1,262 +0,0 @@
package com.zdemo.cache_;
import com.zdemo.cachex.MyRedisCacheSource;
import org.redkale.convert.json.JsonFactory;
import org.redkale.net.AsyncIOGroup;
import org.redkale.util.AnyValue;
import org.redkale.util.ResourceFactory;
import java.util.Map;
import static org.redkale.boot.Application.RESNAME_APP_GROUP;
public class RedisTest {
static MyRedisCacheSource<String> source;
static MyRedisCacheSource<Integer> sourceInt;
static {
AnyValue.DefaultAnyValue conf = new AnyValue.DefaultAnyValue().addValue("maxconns", "10");
conf.addValue("node", new AnyValue.DefaultAnyValue().addValue("addr", "47.111.150.118").addValue("port", "6064").addValue("password", "*Zhong9307!").addValue("db", 1));
final AsyncIOGroup asyncGroup = new AsyncIOGroup(8192, 16);
asyncGroup.start();
ResourceFactory.root().register(RESNAME_APP_GROUP, asyncGroup);
source = new MyRedisCacheSource();
ResourceFactory.root().inject(source);
source.init(null);
source.defaultConvert = JsonFactory.root().getConvert();
source.init(conf);
// int
sourceInt = new MyRedisCacheSource<Integer>();
ResourceFactory.root().inject(sourceInt);
sourceInt.init(null);
sourceInt.defaultConvert = JsonFactory.root().getConvert();
sourceInt.init(conf);
sourceInt.initValueType(Integer.class);
}
public static void main(String[] args) {
//System.out.println(source.remove("a", "b"));
// bit
/*source.initValueType(Integer.class);
source.remove("a");
boolean a = source.getBit("a", 1);
System.out.println(a);
source.setBit("a", 1, true);
a = source.getBit("a", 1);
System.out.println("bit-a-1: " + a);
source.setBit("a", 1, false);
a = source.getBit("a", 1);
System.out.println("bit-a-1: " + a);*/
/*source.remove("a");
// setnx
System.out.println(source.setnx("a", 1));
source.remove("a");
System.out.println(source.setnx("a", 1));
// set
source.remove("abx1");
source.appendSetItems("abx1", "a", "b", "c");
List<String> list = source.srandomItems("abx1", 2);
String str = source.srandomItem("abx1"); //r
System.out.println(list);//[r1, r2] */
/*int[] arr = {0};
ExecutorService executor = Executors.newFixedThreadPool(10);
CountDownLatch latch = new CountDownLatch(1000);
for (int i = 0; i < 1000; i++) {
executor.submit(() -> {
try {
source.lock("c", 1000);
arr[0]++;
// System.out.println("Thread: " + Thread.currentThread().getName());
// Thread.sleep(10);
} catch (Exception e) {
e.printStackTrace();
} finally {
source.unlock("c");
latch.countDown();
}
});
}
try {
latch.await();
System.out.println("n=" + arr[0]);
executor.shutdown();
} catch (InterruptedException e) {
e.printStackTrace();
}*/
/*List<String> list = (List) source.getCollection("gamerank-comment-stat");
System.out.println(list);*/
/*for (int i = 0; i < 10; i++) {
String brpop = source.brpop("z", 2);
System.out.println(brpop);
}*/
// key 测试
/*source.set("a", "123321");
System.out.println(source.get("a")); // 123321
System.out.println(source.getTtl("a")); // -1
System.out.println(source.getPttl("a")); // -1
System.out.println(source.getPttl("x")); // -2*/
// hashmap 测试
/*source.remove("sk");
source.setHm("sk", "a", "1");
source.setHm("sk", "b", "2");
System.out.println(source.getHm("sk", "a")); // 1
source.remove("sk");
source.setHms("sk", Map.of("b", "5", "c", "3", "a", "1"));
source.hdel("sk", "a");
Map map = source.getHms("sk", "a", "x", "b", "c", "f"); // {b=5, c=3}
System.out.println(map);
System.out.println(source.getHmall("sk")); //{b=5, c=3}
System.out.println(source.incrHm("sk", "b", 1.1d)); // b = 6.1
System.out.println(source.incrHm("sk", "c", 1)); // c = 4
System.out.println(source.getHmall("sk")); //{b=6.1, c=4}
System.out.println("--------------");
System.out.println(source.hexists("sk", "b")); // true
System.out.println(source.getCollectionSize("sk")); // 2*/
Map<String, String> hms = source.getHms("supportusers", "5-kfeu0f", "xxxx", "3-0kbt7u8t", "95q- ");
hms.forEach((k, v) -> {
System.out.println(k + " : " + v);
});
/*MyRedisCacheSource<String> source2 = new MyRedisCacheSource();
source2.defaultConvert = JsonFactory.root().getConvert();
source2.initValueType(String.class); //value用String类型
source2.init(conf);*/
/*Map<String, String> gcMap = source.getHmall("hot-gamecomment");
gcMap.forEach((k,v) -> {
System.out.println(k + " : " + v);
});*/
//Map<String, String> gameinfo = source.getHms("gameinfo", "22759", "22838", "10097", "22751", "22632", "22711", "22195", "15821", "10099", "16313", "11345", "10534", "22768", "22647", "22924", "18461", "15871", "17099", "22640", "22644", "10744", "10264", "18032", "22815", "13584", "10031", "22818", "22452", "22810", "10513", "10557", "15848", "11923", "15920", "22808", "20073", "22809", "15840", "12332", "15803", "10597", "22624", "17113", "19578", "22664", "22621", "20722", "16226", "10523", "12304", "10597","11923","10031");
//Map<String, String> gameinfo = source.getHms("gameinfo", "22759","22838","10097","22751","22632","22711","22195","15821","10099","16313","11345","10534","22768","22647","22924","18461","15871","17099","22363","22640","22644","10744","10264","18032","22815","13584","22818","22452","22810","10513","10557","15848","15920","22808","20073","22809","15840","12332","15803","10597","22624","17113","19578","22627","22664","22621","20722","16226","10523","12304");
/*gameinfo.forEach((k,v ) -> {
System.out.println(v);
});*/
/*source.queryKeysStartsWith("articlebean:").forEach(x -> {
System.out.println(x);
//source.remove(x);
//System.out.println(source.getHmall(x));
});*/
// list 测试
/*sourceInt.remove("list");
Collection<Integer> list = sourceInt.getCollection("list");
System.out.println(list);
for (int i = 1; i <= 10; i++) {
sourceInt.appendListItem("list", i);
}
System.out.println(sourceInt.getCollection("list")); // [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
sourceInt.appendListItems("list", 11, 12, 13);
System.out.println(sourceInt.getCollection("list")); // [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13]
System.out.println(sourceInt.getCollection("list", 0, 5)); // [1, 2, 3, 4, 5]
System.out.println(sourceInt.getCollectionSize("list")); // 13
List<Integer> ids = new ArrayList<>(100);
for (int i = 0; i < 2000; i++) {
ids.add(i);
}
sourceInt.remove("abx");
sourceInt.appendListItems("abx", ids.toArray(Integer[]::new));
System.out.println(sourceInt.getCollection("abx"));
*/
/*System.out.println(sourceInt.getCollectionSize("recommend-user-quality"));
Collection<Integer> uid = sourceInt.getCollection("recommend-user-quality");
System.out.println(uid);*/
// zset 测试
/*source.initValueType(String.class); //value用Integer类型
source.remove("zx");
source.zadd("zx", Map.of("a", 1, "b", 2));
source.zadd("zx", Map.of("a", 1, "c", 5L));
source.zadd("zx", Map.of("x", 20, "j", 3.5));
source.zadd("zx", Map.of("f", System.currentTimeMillis(), "c", 5L));
source.zadd("zx", Map.of("a", 1, "c", 5L));
System.out.println(source.zincr("zx", "a", 1.34)); // 2.34
System.out.println(source.getZsetDoubleScore("zx")); // {f=1592924555704, x=20, c=5, j=3, b=2.34, a=1}
source.zrem("zx", "b", "c", "e", "x");
System.out.println(source.getZsetLongScore("zx")); // {f=1592924555704, j=3, a=2}
System.out.println("--------------");
System.out.println(source.getZsetLongScore("zx", "f"));
System.out.println(source.getZrevrank("zx", "f")); // 0
System.out.println(source.getZrank("zx", "f")); // 2
System.out.println(source.getZrank("zx", "Y")); // -1
System.out.println(source.getCollectionSize("zx")); // 3
System.out.println(source.getZset("zx"));
System.out.println(source.zexists("zx", "f", "x", "a"));*/
/*LocalDate date = LocalDate.of(2019, 12, 31);
for (int i = 0; i < 60; i++) {
LocalDate localDate = date.plusDays(-i);
String day = localDate.format(DateTimeFormatter.ISO_LOCAL_DATE);
System.out.println(String.format("mkdir %s; mv *%s*.zip %s", day, day, day));
}*/
/*MyRedisCacheSource<UserDetail> source = new MyRedisCacheSource();
source.defaultConvert = JsonFactory.root().getConvert();
source.initValueType(UserDetail.class);
source.init(conf);
Map<String, UserDetail> map = source.getHmall("user-detail");
Integer[] array = map.values().stream().map(x -> x.getUserid()).toArray(Integer[]::new);
System.out.println(JsonConvert.root().convertTo(array));
Map<Integer, UserDetail> hms = source.getHms("user-detail", 11746, 11988, 11504, 11987, 11745, 11503, 11748, 11506, 11747, 11989, 11505, 11508, 11507, 11509, 11980, 11740, 11982, 11981, 11984, 11742, 11500, 11983, 11741, 11502, 11744, 11986, 11985, 11501, 11743, 11999, 11757, 11515, 1, 11514, 11998, 11756, 2, 11517, 11516, 11758, 3, 11519, 4, 5, 11518, 6, 7, 11991, 8, 11990, 9, 11993, 11751, 11750, 11992, 11753, 11511, 11995, 11994, 11510, 11752, 11755, 11513, 11997, 11512, 11996, 11754, 11724, 11966, 11965, 11723, 11968, 11726, 11967, 11725, 11728, 11969, 11727, 11729, 11960, 11720, 11962, 11961, 11722, 11964, 11721);
System.out.println(hms.size());*/
/*source.getCollection("article-comment-list", 19, 1).forEach(x -> System.out.println(x));
while (true) {
System.out.println("---" + Utility.now() + "---");
source.getHmall("ck").forEach((k, v) -> {
System.out.println(k + ":" + v);
});
try {
Thread.sleep(60 * 1000);
} catch (InterruptedException e) {
e.printStackTrace();
}
}*/
}
}

View File

@ -1,430 +0,0 @@
package com.zdemo.cachex;
import org.redkale.convert.Convert;
import org.redkale.service.Local;
import org.redkale.source.CacheSource;
import org.redkale.util.AutoLoad;
import org.redkale.util.ResourceType;
import java.io.Serializable;
import java.lang.reflect.Type;
import java.nio.charset.StandardCharsets;
import java.util.*;
import java.util.concurrent.CompletableFuture;
import java.util.stream.Stream;
@Local
@AutoLoad(false)
@ResourceType(CacheSource.class)
public class MyRedisCacheSource<V extends Object> extends RedisCacheSource<V> {
//--------------------- oth ------------------------------
public boolean setnx(String key, Object v) {
byte[][] bytes = Stream.of(key, v).map(x -> String.valueOf(x).getBytes(StandardCharsets.UTF_8)).toArray(byte[][]::new);
Serializable rs = send("SETNX", CacheEntryType.OBJECT, (Type) null, key, bytes).join();
return rs == null ? false : (long) rs == 1;
}
//--------------------- oth ------------------------------
//--------------------- bit ------------------------------
public boolean getBit(String key, int offset) {
byte[][] bytes = Stream.of(key, offset).map(x -> String.valueOf(x).getBytes(StandardCharsets.UTF_8)).toArray(byte[][]::new);
Serializable v = send("GETBIT", CacheEntryType.OBJECT, (Type) null, key, bytes).join();
return v == null ? false : (long) v == 1;
}
public void setBit(String key, int offset, boolean bool) {
byte[][] bytes = Stream.of(key, offset, bool ? 1 : 0).map(x -> String.valueOf(x).getBytes(StandardCharsets.UTF_8)).toArray(byte[][]::new);
send("SETBIT", CacheEntryType.OBJECT, (Type) null, key, bytes).join();
}
//--------------------- bit ------------------------------
//--------------------- lock ------------------------------
// 尝试加锁成功返回0否则返回上一锁剩余毫秒值
public int tryLock(String key, int millis) {
byte[][] bytes = Stream.of("" +
"if (redis.call('exists',KEYS[1]) == 0) then " +
"redis.call('psetex', KEYS[1], ARGV[1], 1) " +
"return 0; " +
"else " +
"return redis.call('PTTL', KEYS[1]); " +
"end; ", 1, key, millis).map(x -> String.valueOf(x).getBytes(StandardCharsets.UTF_8)).toArray(byte[][]::new);
int n = (int) send("EVAL", CacheEntryType.OBJECT, (Type) null, null, bytes).join();
return n;
}
// 加锁
public void lock(String key, int millis) {
int i;
do {
i = tryLock(key, millis);
try {
Thread.sleep(10);
} catch (InterruptedException e) {
e.printStackTrace();
}
} while (i > 0);
}
// 解锁
public void unlock(String key) {
remove(key);
}
//--------------------- key ------------------------------
public long getTtl(String key) {
return (long) send("TTL", CacheEntryType.OBJECT, (Type) null, key, key.getBytes(StandardCharsets.UTF_8)).join();
}
public long getPttl(String key) {
return (long) send("PTTL", CacheEntryType.OBJECT, (Type) null, key, key.getBytes(StandardCharsets.UTF_8)).join();
}
public int remove(String... keys) {
if (keys == null || keys.length == 0) {
return 0;
}
List<String> para = new ArrayList<>();
para.add("" +
" local args = ARGV;" +
" local x = 0;" +
" for i,v in ipairs(args) do" +
" local inx = redis.call('del', v);" +
" if(inx > 0) then" +
" x = x + 1;" +
" end" +
" end" +
" return x;");
para.add("0");
for (Object field : keys) {
para.add(String.valueOf(field));
}
byte[][] bytes = para.stream().map(x -> String.valueOf(x).getBytes(StandardCharsets.UTF_8)).toArray(byte[][]::new);
return (int) send("EVAL", CacheEntryType.OBJECT, (Type) null, null, bytes).join();
}
//--------------------- hmget ------------------------------
public <T extends Object> V getHm(String key, T field) {
// return (V) send("HMGET", CacheEntryType.OBJECT, (Type) null, key, key.getBytes(StandardCharsets.UTF_8), field.getBytes(StandardCharsets.UTF_8)).join();
Map<Object, V> map = getHms(key, field);
return map.get(field);
}
public <T extends Object> Map<T, V> getHms(String key, T... field) {
if (field == null || field.length == 0) {
return new HashMap<>();
}
byte[][] bytes = Stream.concat(Stream.of(key), Stream.of(field)).map(x -> String.valueOf(x).getBytes(StandardCharsets.UTF_8)).toArray(byte[][]::new);
Map<T, V> result = new HashMap<>();
List<V> vs = (List) send("HMGET", CacheEntryType.OBJECT, (Type) null, key, bytes).join();
for (int i = 0; i < field.length; i++) { // /*vs != null && vs.size() > i &&*/
if (vs.get(i) == null) {
continue;
}
result.put(field[i], vs.get(i));
}
return result;
}
public Map<String, V> getHmall(String key) {
List<V> vs = (List) send("HGETALL", CacheEntryType.OBJECT, (Type) null, key, key.getBytes(StandardCharsets.UTF_8)).join();
Map<String, V> result = new HashMap<>(vs.size() / 2);
for (int i = 0; i < vs.size(); i += 2) {
result.put(String.valueOf(vs.get(i)), vs.get(i + 1));
}
return result;
}
//--------------------- hmsethmdelincr ------------------------------
public <T> void setHm(String key, T field, V value) {
byte[][] bytes = Stream.of(key, field, value).map(x -> x.toString().getBytes(StandardCharsets.UTF_8)).toArray(byte[][]::new);
send("HMSET", CacheEntryType.OBJECT, (Type) null, key, bytes).join();
}
public <T> void setHms(String key, Map<T, V> kv) {
List<String> args = new ArrayList();
args.add(key);
kv.forEach((k, v) -> {
args.add(String.valueOf(k));
args.add(String.valueOf(v));
});
byte[][] bytes = args.stream().map(x -> x.getBytes(StandardCharsets.UTF_8)).toArray(byte[][]::new);
send("HMSET", CacheEntryType.OBJECT, (Type) null, key, bytes).join();
}
public <T> Long incrHm(String key, T field, long n) {
byte[][] bytes = Stream.of(key, String.valueOf(field), String.valueOf(n)).map(x -> x.getBytes(StandardCharsets.UTF_8)).toArray(byte[][]::new);
return (Long) send("HINCRBY", CacheEntryType.OBJECT, (Type) null, key, bytes).join();
}
public <T> Double incrHm(String key, T field, double n) {
byte[][] bytes = Stream.of(key, String.valueOf(field), String.valueOf(n)).map(x -> x.getBytes(StandardCharsets.UTF_8)).toArray(byte[][]::new);
Serializable v = send("HINCRBYFLOAT", CacheEntryType.OBJECT, (Type) null, key, bytes).join();
if (v == null) {
return null;
}
return Double.parseDouble(String.valueOf(v));
}
public <T> void hdel(String key, T... field) {
byte[][] bytes = Stream.concat(Stream.of(key), Stream.of(field)).map(x -> String.valueOf(x).getBytes(StandardCharsets.UTF_8)).toArray(byte[][]::new);
send("HDEL", null, (Type) null, key, bytes).join();
}
public <T> List<T> zexists(String key, T... fields) {
if (fields == null || fields.length == 0) {
return new ArrayList<>();
}
List<String> para = new ArrayList<>();
para.add("" +
" local key = KEYS[1];" +
" local args = ARGV;" +
" local result = {};" +
" for i,v in ipairs(args) do" +
" local inx = redis.call('ZREVRANK', key, v);" +
" if(inx) then" +
" table.insert(result,1,v);" +
" end" +
" end" +
" return result;");
para.add("1");
para.add(key);
for (Object field : fields) {
para.add(String.valueOf(field));
}
byte[][] bytes = para.stream().map(x -> String.valueOf(x).getBytes(StandardCharsets.UTF_8)).toArray(byte[][]::new);
return (List<T>) send("EVAL", CacheEntryType.OBJECT, (Type) null, null, bytes).join();
}
//--------------------- set ------------------------------
public <T> T srandomItem(String key) {
byte[][] bytes = Stream.of(key, 1).map(x -> formatValue(CacheEntryType.OBJECT, (Convert) null, (Type) null, x)).toArray(byte[][]::new);
List<T> list = (List) send("SRANDMEMBER", null, (Type) null, key, bytes).join();
return list != null && !list.isEmpty() ? list.get(0) : null;
}
public <T> List<T> srandomItems(String key, int n) {
byte[][] bytes = Stream.of(key, n).map(x -> formatValue(CacheEntryType.OBJECT, (Convert) null, (Type) null, x)).toArray(byte[][]::new);
return (List) send("SRANDMEMBER", null, (Type) null, key, bytes).join();
}
//--------------------- list ------------------------------
public CompletableFuture<Void> appendListItemsAsync(String key, V... values) {
byte[][] bytes = Stream.concat(Stream.of(key), Stream.of(values)).map(x -> String.valueOf(x).getBytes(StandardCharsets.UTF_8)).toArray(byte[][]::new);
return (CompletableFuture) send("RPUSH", null, (Type) null, key, bytes);
}
public CompletableFuture<Void> lpushListItemAsync(String key, V value) {
return (CompletableFuture) send("LPUSH", null, (Type) null, key, key.getBytes(StandardCharsets.UTF_8), formatValue(CacheEntryType.OBJECT, (Convert) null, (Type) null, value));
}
public void lpushListItem(String key, V value) {
lpushListItemAsync(key, value).join();
}
public void appendListItems(String key, V... values) {
appendListItemsAsync(key, values).join();
}
public void appendSetItems(String key, V... values) {
// todo:
for (V v : values) {
appendSetItem(key, v);
}
}
// 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
public CompletableFuture<Collection<V>> getCollectionAsync(String key, int offset, int limit) {
return (CompletableFuture) send("OBJECT", null, (Type) null, key, "ENCODING".getBytes(StandardCharsets.UTF_8), key.getBytes(StandardCharsets.UTF_8)).thenCompose(t -> {
if (t == null) return CompletableFuture.completedFuture(null);
if (new String((byte[]) t).contains("list")) { //list
return send("LRANGE", CacheEntryType.OBJECT, (Type) null, false, key, key.getBytes(StandardCharsets.UTF_8), String.valueOf(offset).getBytes(StandardCharsets.UTF_8), String.valueOf(offset + limit - 1).getBytes(StandardCharsets.UTF_8));
} else {
return send("SMEMBERS", CacheEntryType.OBJECT, (Type) null, true, key, key.getBytes(StandardCharsets.UTF_8));
}
});
}
public Collection<V> getCollection(String key, int offset, int limit) {
return getCollectionAsync(key, offset, limit).join();
}
public V brpop(String key, int seconds) {
byte[][] bytes = Stream.concat(Stream.of(key), Stream.of(seconds)).map(x -> String.valueOf(x).getBytes(StandardCharsets.UTF_8)).toArray(byte[][]::new);
return (V) send("BRPOP", null, (Type) null, key, bytes).join();
}
//--------------------- zset ------------------------------
public <N extends Number> void zadd(String key, Map<V, N> kv) {
if (kv == null || kv.isEmpty()) {
return;
}
List<String> args = new ArrayList();
args.add(key);
kv.forEach((k, v) -> {
args.add(String.valueOf(v));
args.add(String.valueOf(k));
});
byte[][] bytes = args.stream().map(x -> x.getBytes(StandardCharsets.UTF_8)).toArray(byte[][]::new);
send("ZADD", CacheEntryType.OBJECT, (Type) null, key, bytes).join();
}
public <N extends Number> double zincr(String key, Object number, N n) {
byte[][] bytes = Stream.of(key, n, number).map(x -> String.valueOf(x).getBytes(StandardCharsets.UTF_8)).toArray(byte[][]::new);
Serializable v = send("ZINCRBY", CacheEntryType.OBJECT, (Type) null, key, bytes).join();
return Double.parseDouble(String.valueOf(v));
}
public void zrem(String key, V... vs) {
List<String> args = new ArrayList();
args.add(key);
for (V v : vs) {
args.add(String.valueOf(v));
}
byte[][] bytes = args.stream().map(x -> x.getBytes(StandardCharsets.UTF_8)).toArray(byte[][]::new);
send("ZREM", CacheEntryType.OBJECT, (Type) null, key, bytes).join();
}
public int getZrank(String key, V v) {
byte[][] bytes = Stream.of(key, v).map(x -> String.valueOf(x).getBytes(StandardCharsets.UTF_8)).toArray(byte[][]::new);
Long t = (Long) send("ZRANK", CacheEntryType.OBJECT, (Type) null, key, bytes).join();
return t == null ? -1 : (int) (long) t;
}
public int getZrevrank(String key, V v) {
byte[][] bytes = Stream.of(key, v).map(x -> String.valueOf(x).getBytes(StandardCharsets.UTF_8)).toArray(byte[][]::new);
Long t = (Long) send("ZREVRANK", CacheEntryType.OBJECT, (Type) null, key, bytes).join();
return t == null ? -1 : (int) (long) t;
}
//ZRANGE/ZREVRANGE key start stop
public List<V> getZset(String key) {
byte[][] bytes = Stream.of(key, 0, -1).map(x -> String.valueOf(x).getBytes(StandardCharsets.UTF_8)).toArray(byte[][]::new);
List<V> vs = (List<V>) send("ZREVRANGE", CacheEntryType.OBJECT, (Type) null, key, bytes).join();
List<V> vs2 = new ArrayList(vs.size());
for (int i = 0; i < vs.size(); ++i) {
if (i % 2 == 1) {
vs2.add(this.convert.convertFrom(this.objValueType, String.valueOf(vs.get(i))));
} else {
vs2.add(vs.get(i));
}
}
return vs2;
}
public List<V> getZset(String key, int offset, int limit) {
byte[][] bytes = Stream.of(key, offset, offset + limit - 1).map(x -> String.valueOf(x).getBytes(StandardCharsets.UTF_8)).toArray(byte[][]::new);
List<V> vs = (List<V>) send("ZREVRANGE", CacheEntryType.OBJECT, (Type) null, key, bytes).join();
List<V> vs2 = new ArrayList(vs.size());
for (int i = 0; i < vs.size(); ++i) {
if (i % 2 == 1) {
vs2.add(this.convert.convertFrom(this.objValueType, String.valueOf(vs.get(i))));
} else {
vs2.add(vs.get(i));
}
}
return vs2;
}
public LinkedHashMap<V, Long> getZsetLongScore(String key) {
LinkedHashMap<V, Double> map = getZsetDoubleScore(key);
if (map.isEmpty()) {
return new LinkedHashMap<>();
}
LinkedHashMap<V, Long> map2 = new LinkedHashMap<>(map.size());
map.forEach((k, v) -> map2.put(k, (long) (double) v));
return map2;
}
public LinkedHashMap<V, Long> getZsetItemsLongScore(String key) {
byte[][] bytes = Stream.of(key, 0, -1, "WITHSCORES").map(x -> String.valueOf(x).getBytes(StandardCharsets.UTF_8)).toArray(byte[][]::new);
List vs = (List) send("ZRANGE", CacheEntryType.OBJECT, (Type) null, key, bytes).join();
LinkedHashMap<V, Long> map = new LinkedHashMap<>();
for (int i = 0; i < vs.size(); i += 2) {
map.put((V) vs.get(i), (long) Double.parseDouble((String) vs.get(i + 1)));
}
return map;
}
public Long getZsetLongScore(String key, V v) {
Double score = getZsetDoubleScore(key, v);
if (score == null) {
return null;
}
return (long) (double) score;
}
public LinkedHashMap<V, Double> getZsetDoubleScore(String key) {
byte[][] bytes = Stream.of(key, 0, -1, "WITHSCORES").map(x -> String.valueOf(x).getBytes(StandardCharsets.UTF_8)).toArray(byte[][]::new);
List vs = (List) send("ZREVRANGE", CacheEntryType.OBJECT, (Type) null, key, bytes).join();
LinkedHashMap<V, Double> map = new LinkedHashMap<>();
for (int i = 0; i < vs.size(); i += 2) {
map.put((V) vs.get(i), Double.parseDouble((String) vs.get(i + 1)));
}
return map;
}
public Double getZsetDoubleScore(String key, V v) {
byte[][] bytes = Stream.of(key, v).map(x -> String.valueOf(x).getBytes(StandardCharsets.UTF_8)).toArray(byte[][]::new);
Serializable zscore = send("ZSCORE", CacheEntryType.OBJECT, (Type) null, key, bytes).join();
if (zscore == null) {
return null;
}
return Double.parseDouble(String.valueOf(zscore));
}
public LinkedHashMap<V, Long> getZsetLongScore(String key, int offset, int limit) {
byte[][] bytes = Stream.of(key, offset, offset + limit - 1, "WITHSCORES").map(x -> String.valueOf(x).getBytes(StandardCharsets.UTF_8)).toArray(byte[][]::new);
List vs = (List) send("ZREVRANGE", CacheEntryType.OBJECT, (Type) null, key, bytes).join();
LinkedHashMap<V, Long> map = new LinkedHashMap<>();
for (int i = 0; i < vs.size(); i += 2) {
map.put((V) vs.get(i), (long) Double.parseDouble((String) vs.get(i + 1)));
}
return map;
}
public LinkedHashMap<V, Double> getZsetDoubleScore(String key, int offset, int limit) {
byte[][] bytes = Stream.of(key, offset, offset + limit - 1, "WITHSCORES").map(x -> String.valueOf(x).getBytes(StandardCharsets.UTF_8)).toArray(byte[][]::new);
List vs = (List) send("ZREVRANGE", CacheEntryType.OBJECT, (Type) null, key, bytes).join();
LinkedHashMap<V, Double> map = new LinkedHashMap<>();
for (int i = 0; i < vs.size(); i += 2) {
map.put((V) vs.get(i), Double.parseDouble(vs.get(i + 1) + ""));
}
return map;
}
// ----------
protected byte[] formatValue(CacheEntryType cacheType, Convert convert0, Type resultType, Object value) {
if (value == null) return "null".getBytes(StandardCharsets.UTF_8);
if (convert0 == null) convert0 = convert;
if (cacheType == CacheEntryType.LONG || cacheType == CacheEntryType.ATOMIC)
return String.valueOf(value).getBytes(StandardCharsets.UTF_8);
if (cacheType == CacheEntryType.STRING) return convert0.convertToBytes(String.class, value);
if (value instanceof String) return String.valueOf(value).getBytes(StandardCharsets.UTF_8);
if (value instanceof Number) return String.valueOf(value).getBytes(StandardCharsets.UTF_8);
return convert0.convertToBytes(resultType == null ? objValueType : resultType, value);
}
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -1,93 +0,0 @@
package com.zdemo.kafak;
import com.zdemo.AbstractConsumer;
import com.zdemo.IConsumer;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.errors.WakeupException;
import org.redkale.service.Service;
import org.redkale.util.AnyValue;
import org.redkale.util.AutoLoad;
import javax.annotation.Resource;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.time.Duration;
import java.util.Properties;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* 消费
*/
@AutoLoad(false)
public abstract class KafakConsumer extends AbstractConsumer implements IConsumer, Service {
public Logger logger = Logger.getLogger(this.getClass().getSimpleName());
@Resource(name = "APP_HOME")
protected File APP_HOME;
private final LinkedBlockingQueue<Runnable> queue = new LinkedBlockingQueue<>();
@Override
public final void init(AnyValue config) {
if (!preInit()) {
return;
}
try (FileInputStream fis = new FileInputStream(new File(APP_HOME, "conf/kafak.properties"));) {
Properties props = new Properties();
props.load(fis);
new Thread(() -> {
try {
props.put("group.id", getGroupid());
KafkaConsumer<String, String> consumer = new KafkaConsumer<>(props);
consumer.subscribe(getTopics());
while (true) {
ConsumerRecords<String, String> records = consumer.poll(Duration.ofMillis(1_000));
records.forEach(record -> {
String topic = record.topic();
long offset = record.offset();
String value = record.value();
try {
accept(topic, value);
} catch (Exception e) {
logger.log(Level.WARNING, String.format("topic[%s] event accept error, offset=%s,value:%s", topic, offset, value), e);
}
});
if (!queue.isEmpty()) {
Runnable runnable;
while ((runnable = queue.poll()) != null) {
runnable.run();
}
consumer.unsubscribe();
consumer.subscribe(getTopics());
}
}
} catch (WakeupException ex) {
System.out.println("WakeupException !!!!");
}
}, "thread-consumer-[" + getGroupid() + "]").start();
} catch (IOException e) {
logger.log(Level.WARNING, "", e);
}
}
@Override
public void unsubscribe(String topic) {
queue.add(() -> super.removeEventType(topic)); // 加入延时执行队列下一次订阅变更检查周期执行
}
@Override
protected void subscribe(String topic) {
queue.add(() -> {
// just set flag, nothing to do
});
}
}

View File

@ -1,59 +0,0 @@
package com.zdemo.kafak;
import com.zdemo.IProducer;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.redkale.convert.json.JsonConvert;
import org.redkale.service.Service;
import org.redkale.util.AnyValue;
import org.redkale.util.AutoLoad;
import javax.annotation.Resource;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.util.Properties;
import java.util.logging.Level;
/**
* 生产
*
* @param
*/
@AutoLoad(false)
public class KafakProducer implements IProducer, Service {
private KafkaProducer<String, String> producer;
@Resource(name = "APP_HOME")
protected File APP_HOME;
@Override
public void init(AnyValue config) {
File file = new File(APP_HOME, "conf/kafak.properties");
try (FileInputStream fis = new FileInputStream(file)) {
Properties props = new Properties();
props.load(fis);
producer = new KafkaProducer(props);
} catch (IOException e) {
logger.log(Level.WARNING, "未初始化kafak 生产者kafak发布消息不可用", e);
}
}
@Override
public boolean publish(String topic, Object v) {
producer.send(new ProducerRecord(topic, toStr(v)));
return true;
}
@Override
public void destroy(AnyValue config) {
producer.close();
}
private <V> String toStr(V v) {
if (v instanceof String) {
return (String) v;
}
return JsonConvert.root().convertTo(v);
}
}

View File

@ -1,29 +0,0 @@
package com.zdemo.pulsar;
import com.zdemo.zhub.ZHubClient;
import org.redkale.net.http.RestMapping;
import org.redkale.net.http.RestService;
import org.redkale.service.Service;
import org.redkale.util.AnyValue;
import org.redkale.util.Utility;
import javax.annotation.Resource;
@RestService
public class AService implements Service {
@Resource(name = "zhub")
private ZHubClient zhub;
@Override
public void init(AnyValue config) {
zhub.timer("a", () -> {
System.out.println(Utility.now() + " timer RANK-DATA-RELOADALL 执行了");
});
}
@RestMapping
public void x() {
}
}

View File

@ -1,91 +0,0 @@
package com.zdemo.pulsar;
import com.zdemo.AbstractConsumer;
import com.zdemo.EventType;
import com.zdemo.IConsumer;
import org.apache.pulsar.client.api.*;
import org.redkale.service.Service;
import org.redkale.util.AnyValue;
import javax.annotation.Resource;
import java.util.ArrayList;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import java.util.logging.Level;
public abstract class PulsarConsumer extends AbstractConsumer implements IConsumer, Service {
@Resource(name = "property.pulsar.serviceurl")
private String serviceurl = "pulsar://127.0.0.1:6650";
private PulsarClient client;
private Consumer consumer;
public abstract String getGroupid();
private final LinkedBlockingQueue<Runnable> queue = new LinkedBlockingQueue<>();
@Override
public void addEventType(EventType... eventTypes) {
super.addEventType(eventTypes);
// 增加变更标记
queue.add(() -> logger.info("PulsarConsumer add new topic!"));
}
@Override
public void init(AnyValue config) {
if (!preInit()) {
return;
}
queue.add(() -> logger.info("PulsarConsumer starting ..."));
new Thread(() -> {
try {
client = PulsarClient.builder()
.serviceUrl(serviceurl)
.build();
while (true) {
// 动态新增订阅
if (!queue.isEmpty()) {
Runnable runnable;
while ((runnable = queue.poll()) != null) {
runnable.run();
}
consumer.unsubscribe();
consumer = client.newConsumer()
.topics(new ArrayList<>(getTopics()))
.subscriptionName(getGroupid())
.subscriptionType(SubscriptionType.Shared)
.subscribe();
}
// Wait for a message
Message msg = consumer.receive(10, TimeUnit.SECONDS);
if (msg == null) {
continue;
}
String topic = msg.getTopicName().replace("persistent://public/default/", "");
long offset = 0;
String value = new String(msg.getData());
try {
accept(topic, value);
consumer.acknowledge(msg); // Acknowledge the message so that it can be deleted by the message broker
} catch (Exception e) {
logger.log(Level.WARNING, String.format("topic[%s] event accept error, offset=%s,value:%s", topic, offset, value), e);
consumer.negativeAcknowledge(msg); // Message failed to process, redeliver later
}
}
} catch (PulsarClientException e) {
e.printStackTrace();
}
}).start();
}
@Override
public void unsubscribe(String topic) {
}
}

View File

@ -1,82 +0,0 @@
package com.zdemo.pulsar;
import com.zdemo.Event;
import com.zdemo.IProducer;
import org.apache.pulsar.client.api.Producer;
import org.apache.pulsar.client.api.PulsarClient;
import org.apache.pulsar.client.api.PulsarClientException;
import org.redkale.convert.json.JsonConvert;
import org.redkale.service.Service;
import org.redkale.util.AnyValue;
import org.redkale.util.Comment;
import javax.annotation.Resource;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import java.util.logging.Level;
public class PulsarProducer<T extends Event> implements IProducer<T>, Service {
@Resource(name = "property.pulsar.serviceurl")
private String serviceurl = "pulsar://127.0.0.1:6650";
@Comment("消息生产者")
private Map<String, Producer<byte[]>> producerMap = new HashMap();
private PulsarClient client;
@Override
public void init(AnyValue config) {
try {
client = PulsarClient.builder()
.serviceUrl(serviceurl)
.build();
} catch (PulsarClientException e) {
e.printStackTrace();
}
}
public Producer<byte[]> getProducer(String topic) {
Producer<byte[]> producer = producerMap.get(topic);
if (producer != null) {
return producer;
}
synchronized (this) {
if ((producer = producerMap.get(topic)) == null) {
try {
producer = client.newProducer()
.topic(topic)
.batchingMaxPublishDelay(10, TimeUnit.MILLISECONDS)
.sendTimeout(10, TimeUnit.SECONDS)
.blockIfQueueFull(true)
.create();
producerMap.put(topic, producer);
return producer;
} catch (PulsarClientException e) {
e.printStackTrace();
}
}
}
return producer;
}
@Override
public void send(T t) {
try {
Producer<byte[]> producer = getProducer(t.topic);
String v = JsonConvert.root().convertTo(t.value);
if (v.startsWith("\"") && v.endsWith("\"")) {
v = v.substring(1, v.length() - 1);
}
producer.newMessage()
.key("")
.value(v.getBytes())
.send();
} catch (Exception e) {
logger.log(Level.WARNING, "", e);
}
}
}

View File

@ -1,114 +0,0 @@
package com.zdemo.redis;
import com.zdemo.AbstractConsumer;
import com.zdemo.IConsumer;
import org.redkale.service.Service;
import org.redkale.util.AnyValue;
import org.redkale.util.AutoLoad;
import javax.annotation.Resource;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.net.InetSocketAddress;
import java.net.Socket;
import java.util.logging.Level;
import java.util.logging.Logger;
@AutoLoad(false)
public class RedisConsumer extends AbstractConsumer implements IConsumer, Service {
public Logger logger = Logger.getLogger(this.getClass().getSimpleName());
@Resource(name = "property.redis.host")
private String host = "127.0.0.1";
@Resource(name = "property.redis.password")
private String password = "";
@Resource(name = "property.redis.port")
private int port = 6379;
private Socket client;
private OutputStreamWriter writer;
private BufferedReader reader;
@Override
public void init(AnyValue config) {
try {
client = new Socket();
client.connect(new InetSocketAddress(host, port));
client.setKeepAlive(true);
writer = new OutputStreamWriter(client.getOutputStream());
writer.write("AUTH " + password + "\r\n");
writer.flush();
StringBuffer buf = new StringBuffer("SUBSCRIBE");
for (String topic : getTopics()) {
buf.append(" ").append(topic);
}
buf.append("\r\n");
writer.write(buf.toString());
writer.flush();
reader = new BufferedReader(new InputStreamReader(client.getInputStream()));
} catch (IOException e) {
logger.log(Level.WARNING, "Redis Consumer 初始化失败!", e);
}
new Thread(() -> {
try {
while (true) {
String readLine = reader.readLine();
String type = "";
if ("*3".equals(readLine)) {
readLine = reader.readLine(); // $7 len()
type = reader.readLine(); // message
if (!"message".equals(type)) {
continue;
}
reader.readLine(); //$n len(key)
String topic = reader.readLine(); // topic
reader.readLine(); //$n len(value)
String value = reader.readLine(); // value
try {
accept(topic, value);
} catch (Exception e) {
logger.log(Level.WARNING, "topic[" + topic + "] event accept error :" + value, e);
}
}
}
} catch (IOException e) {
logger.log(Level.WARNING, "", e);
}
}).start();
}
@Override
protected String getGroupid() {
return null;
}
@Override
public void unsubscribe(String topic) {
try {
writer.write("UNSUBSCRIBE " + topic + "\r\n");
writer.flush();
} catch (IOException e) {
logger.log(Level.WARNING, "", e);
}
super.removeEventType(topic);
}
@Override
protected void subscribe(String topic) {
//新增订阅
try {
writer.write("SUBSCRIBE " + topic + "\r\n");
writer.flush();
} catch (IOException e) {
logger.log(Level.WARNING, "", e);
}
}
}

View File

@ -1,61 +0,0 @@
package com.zdemo.redis;
import com.zdemo.IProducer;
import org.redkale.convert.json.JsonConvert;
import org.redkale.service.Service;
import org.redkale.util.AnyValue;
import org.redkale.util.AutoLoad;
import javax.annotation.Resource;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.net.InetSocketAddress;
import java.net.Socket;
import java.util.logging.Level;
@AutoLoad(false)
public class RedisProducer implements IProducer, Service {
@Resource(name = "property.redis.host")
private String host = "127.0.0.1";
@Resource(name = "property.redis.password")
private String password = "";
@Resource(name = "property.redis.port")
private int port = 6379;
private OutputStreamWriter osw;
@Override
public void init(AnyValue config) {
try {
Socket client = new Socket();
client.connect(new InetSocketAddress(host, port));
client.setKeepAlive(true);
osw = new OutputStreamWriter(client.getOutputStream());
osw.write("AUTH " + password + "\r\n");
osw.flush();
} catch (IOException e) {
logger.log(Level.WARNING, "", e);
}
}
@Override
public boolean publish(String topic, Object v) {
try {
osw.write("PUBLISH " + topic + " '" + toStr(v) + "' \r\n");
osw.flush();
return true;
} catch (IOException e) {
logger.log(Level.WARNING, "", e);
}
return false;
}
private String toStr(Object v) {
if (v instanceof String) {
return (String) v;
}
return JsonConvert.root().convertTo(v);
}
}

View File

@ -2,8 +2,14 @@ package com.zdemo.zhub;
import com.zdemo.*;
import net.tccn.timer.Timers;
import org.redkale.annotation.AutoLoad;
import org.redkale.annotation.ResourceType;
import org.redkale.service.Local;
import org.redkale.service.Service;
import org.redkale.util.*;
import org.redkale.util.AnyValue;
import org.redkale.util.Comment;
import org.redkale.util.TypeToken;
import org.redkale.util.Utility;
import java.io.BufferedReader;
import java.io.IOException;
@ -22,7 +28,9 @@ import java.util.function.Function;
import java.util.logging.Level;
import java.util.logging.Logger;
@AutoLoad(value = false)
@Local
@AutoLoad(false)
@ResourceType(ZHubClient.class)
public class ZHubClient extends AbstractConsumer implements IConsumer, IProducer, Service {
public Logger logger = Logger.getLogger(ZHubClient.class.getSimpleName());
@ -47,16 +55,27 @@ public class ZHubClient extends AbstractConsumer implements IConsumer, IProducer
}
};*/
/*private static boolean isFirst = true;
private boolean isMain = false;*/
private static Map<String, ZHubClient> mainHub = new HashMap<>(); // 127.0.0.1:1216 - ZHubClient
@Override
public void init(AnyValue config) {
if (!preInit()) {
/*if (!preInit()) {
return;
}*/
if (config == null) {
initClient(null);
return;
}
Map<String, AnyValue> nodes = getNodes(config);
for (String rsName : nodes.keySet()) {
ZHubClient client = new ZHubClient().initClient(nodes.get(rsName));
application.getResourceFactory().register(rsName, client);
}
}
private ZHubClient initClient(AnyValue config) {
// 自动注入
if (config != null) {
addr = config.getValue("addr", addr);
@ -72,19 +91,16 @@ public class ZHubClient extends AbstractConsumer implements IConsumer, IProducer
}
// 设置第一个启动的 实例为主实例
/*if (isFirst) {
isMain = true;
isFirst = false;
}*/
if (!mainHub.containsKey(addr)) { // 确保同步执行此 init 逻辑
mainHub.put(addr, this);
}
// 消息 事件接收
new Thread(() -> {
if (!initSocket(0)) {
return;
}
// 消息 事件接收
new Thread(() -> {
while (true) {
try {
String readLine = reader.readLine();
@ -289,6 +305,32 @@ public class ZHubClient extends AbstractConsumer implements IConsumer, IProducer
}
}).start();
return this;
}
public boolean acceptsConf(AnyValue config) {
if (config == null) {
return false;
}
if (!getNodes(config).isEmpty()) {
return true;
}
return false;
}
private HashMap<String, AnyValue> getNodes(AnyValue config) {
AnyValue[] zhubs = config.getAnyValues("zhub");
HashMap<String, AnyValue> confMap = new HashMap<>();
for (AnyValue zhub : zhubs) {
String[] names = zhub.getNames();
for (String name : names) {
confMap.put(name, zhub.getAnyValue(name));
}
}
return confMap;
}
// ---------------------
@ -366,8 +408,6 @@ public class ZHubClient extends AbstractConsumer implements IConsumer, IProducer
send("groupid " + groupid);
StringBuffer buf = new StringBuffer("subscribe lock");
/*if (isMain) {
}*/
if (mainHub.containsValue(this)) {
buf.append(" " + APP_NAME);
}

View File

@ -0,0 +1,131 @@
/*
* Click nbfs://nbhost/SystemFileSystem/Templates/Licenses/license-default.txt to change this license
* Click nbfs://nbhost/SystemFileSystem/Templates/Classes/Class.java to edit this template
*/
package org.redkalex.cache.redis;
import org.redkale.annotation.Resource;
import org.redkale.convert.Convert;
import org.redkale.convert.json.JsonConvert;
import org.redkale.source.AbstractCacheSource;
import org.redkale.util.AnyValue;
import org.redkale.util.RedkaleClassLoader;
import org.redkale.util.RedkaleException;
import org.redkale.util.ResourceFactory;
import java.lang.reflect.Type;
import java.nio.charset.StandardCharsets;
/**
* @author zhangjx
* @since 2.8.0
*/
public abstract class AbstractRedisSource extends AbstractCacheSource {
public static final String CACHE_SOURCE_CRYPTOR = "cryptor";
protected String name;
@Resource(required = false)
protected ResourceFactory resourceFactory;
@Resource(required = false)
protected JsonConvert defaultConvert;
@Resource(name = "$_convert", required = false)
protected JsonConvert convert;
protected int db;
protected RedisCryptor cryptor;
protected AnyValue config;
@Override
public void init(AnyValue conf) {
this.config = conf;
super.init(conf);
this.name = conf.getValue("name", "");
if (this.convert == null) this.convert = this.defaultConvert;
if (conf != null) {
String cryptStr = conf.getValue(CACHE_SOURCE_CRYPTOR, "").trim();
if (!cryptStr.isEmpty()) {
try {
Class<RedisCryptor> cryptClass = (Class) getClass().getClassLoader().loadClass(cryptStr);
RedkaleClassLoader.putReflectionPublicConstructors(cryptClass, cryptClass.getName());
this.cryptor = cryptClass.getConstructor().newInstance();
} catch (ReflectiveOperationException e) {
throw new RedkaleException(e);
}
}
}
if (cryptor != null) {
if (resourceFactory != null) {
resourceFactory.inject(cryptor);
}
cryptor.init(conf);
}
}
@Override
public void destroy(AnyValue conf) {
super.destroy(conf);
if (cryptor != null) {
cryptor.destroy(conf);
}
}
@Override
public void close() throws Exception { // Application 关闭时调用
destroy(null);
}
@Override
public String resourceName() {
return name;
}
protected String decryptValue(String key, RedisCryptor cryptor, String value) {
return cryptor != null ? cryptor.decrypt(key, value) : value;
}
protected <T> T decryptValue(String key, RedisCryptor cryptor, Type type, byte[] bs) {
return decryptValue(key, cryptor, convert, type, bs);
}
protected <T> T decryptValue(String key, RedisCryptor cryptor, Convert c, Type type, byte[] bs) {
if (bs == null) return null;
if (type == byte[].class) return (T) bs;
if (cryptor == null || (type instanceof Class && (((Class) type).isPrimitive() || Number.class.isAssignableFrom((Class) type)))) {
return (T) (c == null ? this.convert : c).convertFrom(type, bs);
}
String deval = cryptor.decrypt(key, new String(bs, StandardCharsets.UTF_8));
return deval == null ? null : (T) (c == null ? this.convert : c).convertFrom(type, deval.getBytes(StandardCharsets.UTF_8));
}
protected String encryptValue(String key, RedisCryptor cryptor, String value) {
return cryptor != null ? cryptor.encrypt(key, value) : value;
}
protected <T> byte[] encryptValue(String key, RedisCryptor cryptor, Convert c, T value) {
return encryptValue(key, cryptor, null, c, value);
}
protected <T> byte[] encryptValue(String key, RedisCryptor cryptor, Type type, Convert c, T value) {
if (value == null) return null;
Type t = type == null ? value.getClass() : type;
if (cryptor == null && type == String.class) {
return value.toString().getBytes(StandardCharsets.UTF_8);
}
return encryptValue(key, cryptor, t, (c == null ? this.convert : c).convertToBytes(t, value));
}
protected byte[] encryptValue(String key, RedisCryptor cryptor, Type type, byte[] bs) {
if (bs == null) return null;
if (cryptor == null || (type instanceof Class && (((Class) type).isPrimitive() || Number.class.isAssignableFrom((Class) type)))) {
return bs;
}
String enval = cryptor.encrypt(key, new String(bs, StandardCharsets.UTF_8));
return enval == null ? null : enval.getBytes(StandardCharsets.UTF_8);
}
}

View File

@ -0,0 +1,137 @@
package org.redkalex.cache.redis;
import org.redkale.annotation.AutoLoad;
import org.redkale.annotation.ResourceType;
import org.redkale.service.Local;
import org.redkale.source.CacheSource;
import org.redkale.util.AnyValue;
import java.io.Serializable;
import java.nio.charset.StandardCharsets;
import java.util.*;
import java.util.concurrent.CompletableFuture;
@Local
@AutoLoad(false)
@ResourceType(CacheSource.class)
public class MyRedisCacheSource extends RedisCacheSource {
@Override
public void init(AnyValue conf) {
super.init(conf);
}
//--------------------- bit ------------------------------
public boolean getBit(String key, int offset) {
return sendAsync("GETBIT", key, key.getBytes(StandardCharsets.UTF_8), String.valueOf(offset).getBytes(StandardCharsets.UTF_8)).thenApply(v -> v.getIntValue(0) > 0).join();
}
public void setBit(String key, int offset, boolean bool) {
sendAsync("SETBIT", key, offset, bool ? 1 : 0).join();
}
//--------------------- bit ------------------------------
//--------------------- lock ------------------------------
// 尝试加锁成功返回0否则返回上一锁剩余毫秒值
public long tryLock(String key, int millis) {
Serializable[] obj = {"" +
"if (redis.call('EXISTS',KEYS[1]) == 0) then " +
"redis.call('PSETEX',KEYS[1],ARGV[1],1); " +
"return 0; " +
"else " +
"return redis.call('PTTL',KEYS[1]); " +
"end;", 1, key, millis
};
return sendAsync("EVAL", null, obj).thenApply(v -> v.getIntValue(1)).join();
}
// 加锁
public void lock(String key, int millis) {
long i;
do {
i = tryLock(key, millis);
try {
Thread.sleep(10);
} catch (InterruptedException e) {
e.printStackTrace();
}
} while (i > 0);
}
// 解锁
public void unlock(String key) {
remove(key);
}
//--------------------- key ------------------------------
public String get(String key) {
return get(key, String.class);
}
//--------------------- set ------------------------------
/*public <T> void sadd(String key, Collection<T> args) {
saddAsync(key, args.toArray(Serializable[]::new)).join();
}*/
public void sadd(String key, Serializable... args) {
saddAsync(key, Arrays.stream(args).toArray(Serializable[]::new)).join();
}
public void srem(String key, Serializable... args) {
sremAsync(key, args).join();
}
public CompletableFuture<RedisCacheResult> saddAsync(String key, Serializable... args) {
return sendAsync("SADD", key, args);
}
public CompletableFuture<RedisCacheResult> sremAsync(String key, Serializable... args) {
return sendAsync("SREM", key, args);
}
//--------------------- hm ------------------------------
public void setHms(String key, Map kv) {
setHmsAsync(key, kv).join();
}
public CompletableFuture<RedisCacheResult> setHmsAsync(String key, Map<Serializable, Serializable> kv) {
List<Serializable> args = new ArrayList();
kv.forEach((k, v) -> {
args.add(k);
args.add(v);
});
return sendAsync("HMSET", key, args.toArray(Serializable[]::new));
}
public String getHm(String key, String field) {
return getHm(key, String.class, field);
}
public <T extends Serializable> T getHm(String key, Class<T> type, String field) {
List<Serializable> list = super.hmget(key, type, field);
if (list == null && list.isEmpty()) {
return null;
}
return (T) list.get(0);
}
public Map<String, String> getHms(String key, String... field) {
return getHms(key, String.class, field);
}
public <T extends Serializable> Map<String, T> getHms(String key, Class<T> type, String... field) {
List<Serializable> list = super.hmget(key, type, field);
if (list == null && list.isEmpty()) {
return null;
}
Map<String, T> map = new HashMap<>(field.length);
for (int i = 0; i < field.length; i++) {
map.put(field[i], (T) list.get(i));
}
return map;
}
}

View File

@ -0,0 +1,78 @@
/*
*
*/
package org.redkalex.cache.redis;
import org.redkale.util.Utility;
import java.util.Arrays;
/**
* @author zhangjx
*/
public class RedisCRC16 {
private static final int[] LOOKUP_TABLE = {0x0000, 0x1021, 0x2042, 0x3063, 0x4084, 0x50A5, 0x60C6,
0x70E7, 0x8108, 0x9129, 0xA14A, 0xB16B, 0xC18C, 0xD1AD, 0xE1CE, 0xF1EF, 0x1231, 0x0210, 0x3273,
0x2252, 0x52B5, 0x4294, 0x72F7, 0x62D6, 0x9339, 0x8318, 0xB37B, 0xA35A, 0xD3BD, 0xC39C, 0xF3FF,
0xE3DE, 0x2462, 0x3443, 0x0420, 0x1401, 0x64E6, 0x74C7, 0x44A4, 0x5485, 0xA56A, 0xB54B, 0x8528,
0x9509, 0xE5EE, 0xF5CF, 0xC5AC, 0xD58D, 0x3653, 0x2672, 0x1611, 0x0630, 0x76D7, 0x66F6, 0x5695,
0x46B4, 0xB75B, 0xA77A, 0x9719, 0x8738, 0xF7DF, 0xE7FE, 0xD79D, 0xC7BC, 0x48C4, 0x58E5, 0x6886,
0x78A7, 0x0840, 0x1861, 0x2802, 0x3823, 0xC9CC, 0xD9ED, 0xE98E, 0xF9AF, 0x8948, 0x9969, 0xA90A,
0xB92B, 0x5AF5, 0x4AD4, 0x7AB7, 0x6A96, 0x1A71, 0x0A50, 0x3A33, 0x2A12, 0xDBFD, 0xCBDC, 0xFBBF,
0xEB9E, 0x9B79, 0x8B58, 0xBB3B, 0xAB1A, 0x6CA6, 0x7C87, 0x4CE4, 0x5CC5, 0x2C22, 0x3C03, 0x0C60,
0x1C41, 0xEDAE, 0xFD8F, 0xCDEC, 0xDDCD, 0xAD2A, 0xBD0B, 0x8D68, 0x9D49, 0x7E97, 0x6EB6, 0x5ED5,
0x4EF4, 0x3E13, 0x2E32, 0x1E51, 0x0E70, 0xFF9F, 0xEFBE, 0xDFDD, 0xCFFC, 0xBF1B, 0xAF3A, 0x9F59,
0x8F78, 0x9188, 0x81A9, 0xB1CA, 0xA1EB, 0xD10C, 0xC12D, 0xF14E, 0xE16F, 0x1080, 0x00A1, 0x30C2,
0x20E3, 0x5004, 0x4025, 0x7046, 0x6067, 0x83B9, 0x9398, 0xA3FB, 0xB3DA, 0xC33D, 0xD31C, 0xE37F,
0xF35E, 0x02B1, 0x1290, 0x22F3, 0x32D2, 0x4235, 0x5214, 0x6277, 0x7256, 0xB5EA, 0xA5CB, 0x95A8,
0x8589, 0xF56E, 0xE54F, 0xD52C, 0xC50D, 0x34E2, 0x24C3, 0x14A0, 0x0481, 0x7466, 0x6447, 0x5424,
0x4405, 0xA7DB, 0xB7FA, 0x8799, 0x97B8, 0xE75F, 0xF77E, 0xC71D, 0xD73C, 0x26D3, 0x36F2, 0x0691,
0x16B0, 0x6657, 0x7676, 0x4615, 0x5634, 0xD94C, 0xC96D, 0xF90E, 0xE92F, 0x99C8, 0x89E9, 0xB98A,
0xA9AB, 0x5844, 0x4865, 0x7806, 0x6827, 0x18C0, 0x08E1, 0x3882, 0x28A3, 0xCB7D, 0xDB5C, 0xEB3F,
0xFB1E, 0x8BF9, 0x9BD8, 0xABBB, 0xBB9A, 0x4A75, 0x5A54, 0x6A37, 0x7A16, 0x0AF1, 0x1AD0, 0x2AB3,
0x3A92, 0xFD2E, 0xED0F, 0xDD6C, 0xCD4D, 0xBDAA, 0xAD8B, 0x9DE8, 0x8DC9, 0x7C26, 0x6C07, 0x5C64,
0x4C45, 0x3CA2, 0x2C83, 0x1CE0, 0x0CC1, 0xEF1F, 0xFF3E, 0xCF5D, 0xDF7C, 0xAF9B, 0xBFBA, 0x8FD9,
0x9FF8, 0x6E17, 0x7E36, 0x4E55, 0x5E74, 0x2E93, 0x3EB2, 0x0ED1, 0x1EF0};
private RedisCRC16() {
}
public static int crc16(byte[] bytes) {
int crc = 0x0000;
for (byte b : bytes) {
crc = (crc << 8) ^ LOOKUP_TABLE[((crc >>> 8) ^ (b & 0xFF)) & 0xFF];
}
return crc & 0xFFFF;
}
public static int calcSlot(int maxSlot, byte[] key) {
if (key == null) {
return 0;
}
int start = Utility.indexOf(key, (byte) '{');
if (start != -1) {
int end = Utility.indexOf(key, start + 1, (byte) '}');
if (end != -1) {
key = Arrays.copyOfRange(key, start + 1, end);
}
}
int result = crc16(key) % maxSlot;
return result;
}
public static int calcSlot(int maxSlot, String key) {
if (key == null) {
return 0;
}
int start = key.indexOf('{');
if (start != -1) {
int end = key.indexOf('}');
if (end != -1 && start + 1 < end) {
key = key.substring(start + 1, end);
}
}
int result = crc16(key.getBytes()) % maxSlot;
return result;
}
}

View File

@ -0,0 +1,36 @@
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package org.redkalex.cache.redis;
import org.redkale.net.AsyncConnection;
import org.redkale.net.AsyncGroup;
import org.redkale.net.client.Client;
import org.redkale.net.client.ClientAddress;
/**
* @author zhangjx
*/
public class RedisCacheClient extends Client<RedisCacheConnection, RedisCacheRequest, RedisCacheResult> {
public RedisCacheClient(String name, AsyncGroup group, String key, ClientAddress address, int maxConns, int maxPipelines, RedisCacheReqAuth authReq, RedisCacheReqDB dbReq) {
super(name, group, true, address, maxConns, maxPipelines, () -> new RedisCacheReqPing(), () -> new RedisCacheReqClose(), null); //maxConns
if (authReq != null || dbReq != null) {
if (authReq != null && dbReq != null) {
this.authenticate = conn -> writeChannel(conn, authReq).thenCompose(v -> writeChannel(conn, dbReq)).thenApply(v -> conn);
} else if (authReq != null) {
this.authenticate = conn -> writeChannel(conn, authReq).thenApply(v -> conn);
} else {
this.authenticate = conn -> writeChannel(conn, dbReq).thenApply(v -> conn);
}
}
}
@Override
protected RedisCacheConnection createClientConnection(final int index, AsyncConnection channel) {
return new RedisCacheConnection(this, index, channel);
}
}

View File

@ -0,0 +1,249 @@
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package org.redkalex.cache.redis;
import org.redkale.net.client.ClientCodec;
import org.redkale.net.client.ClientConnection;
import org.redkale.util.ByteArray;
import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.List;
import java.util.logging.Logger;
/**
* @author zhangjx
*/
public class RedisCacheCodec extends ClientCodec<RedisCacheRequest, RedisCacheResult> {
protected static final byte TYPE_STRING = '+'; //简单字符串(不包含CRLF)类型
protected static final byte TYPE_ERROR = '-'; //错误(不包含CRLF)类型
protected static final byte TYPE_NUMBER = ':'; //整型
protected static final byte TYPE_BULK = '$'; //块字符串
protected static final byte TYPE_ARRAY = '*'; //数组
protected static final Logger logger = Logger.getLogger(RedisCacheCodec.class.getSimpleName());
protected byte halfFrameCmd;
protected int halfFrameBulkLength = -10;
protected int halfFrameArraySize = -10;
protected int halfFrameArrayIndex; //从0开始
protected int halfFrameArrayItemLength = -10;
protected ByteArray halfFrameBytes;
protected byte frameType;
protected byte[] frameValue; //(不包含CRLF)
protected List<byte[]> frameList; //(不包含CRLF)
private ByteArray recyclableArray;
public RedisCacheCodec(ClientConnection connection) {
super(connection);
}
protected ByteArray pollArray(ByteArray array) {
if (recyclableArray == null) {
recyclableArray = new ByteArray();
} else {
recyclableArray.clear();
}
recyclableArray.clear();
if (array != null) {
recyclableArray.put(array, 0, array.length());
}
return recyclableArray;
}
private boolean checkBytesFrame(RedisCacheConnection conn, ByteBuffer buffer, ByteArray array) {
// byte[] dbs = new byte[buffer.remaining()];
// for (int i = 0; i < dbs.length; i++) {
// dbs[i] = buffer.get(buffer.position() + i);
// }
// ArrayDeque<ClientFuture> deque = (ArrayDeque) responseQueue(conn);
// logger.log(Level.FINEST, "[" + Utility.nowMillis() + "] [" + Thread.currentThread().getName() + "]: " + conn + ", 原始数据: " + new String(dbs).replace("\r\n", " ") + ", req=" + deque.getFirst().getRequest());
array.clear();
byte type = halfFrameCmd == 0 ? buffer.get() : halfFrameCmd;
if (halfFrameBytes != null) {
array.put(halfFrameBytes, 0, halfFrameBytes.length());
}
frameType = type;
if (type == TYPE_STRING || type == TYPE_ERROR || type == TYPE_NUMBER) {
if (readComplete(buffer, array)) {
frameValue = array.getBytes();
} else {
halfFrameCmd = type;
halfFrameBytes = pollArray(array);
return false;
}
} else if (type == TYPE_BULK) {
int bulkLength = halfFrameBulkLength;
if (bulkLength < -2) {
if (!readComplete(buffer, array)) { //没有读到bulkLength
halfFrameCmd = type;
halfFrameBulkLength = -10;
halfFrameBytes = pollArray(array);
return false;
}
bulkLength = Integer.parseInt(array.toString(StandardCharsets.UTF_8));
array.clear();
}
if (bulkLength == -1) {
frameValue = null;
} else if (readComplete(buffer, array)) {
frameValue = array.getBytes();
} else {
halfFrameCmd = type;
halfFrameBulkLength = bulkLength;
halfFrameBytes = pollArray(array);
return false;
}
} else if (type == TYPE_ARRAY) {
int arraySize = halfFrameArraySize;
if (arraySize < -2) {
if (!readComplete(buffer, array)) { //没有读到arraySize
halfFrameCmd = type;
halfFrameArraySize = -10;
halfFrameArrayIndex = 0;
halfFrameArrayItemLength = -10;
halfFrameBytes = pollArray(array);
return false;
}
arraySize = Integer.parseInt(array.toString(StandardCharsets.UTF_8));
array.clear();
}
int arrayIndex = halfFrameArrayIndex;
for (int i = arrayIndex; i < arraySize; i++) {
int itemLength = halfFrameArrayItemLength;
halfFrameArrayItemLength = -10;
if (itemLength < -2) {
if (!readComplete(buffer, array)) { //没有读到bulkLength
halfFrameCmd = type;
halfFrameArraySize = arraySize;
halfFrameArrayIndex = i;
halfFrameArrayItemLength = -10;
halfFrameBytes = pollArray(array);
return false;
}
byte sign = array.get(0);
itemLength = Integer.parseInt(array.toString(1, StandardCharsets.UTF_8));
array.clear();
if (sign == TYPE_ARRAY) { //数组中嵌套数组目前有 HSCAN
frameValue = null;
if (frameList != null) {
frameList.clear();
}
clearHalfFrame();
if (itemLength == 0) {
return true;
}
halfFrameCmd = sign;
halfFrameArraySize = itemLength;
if (!buffer.hasRemaining()) {
return false;
}
return checkBytesFrame(conn, buffer, array);
}
}
int cha = itemLength - array.length();
if (itemLength == -1) {
if (frameList == null) {
frameList = new ArrayList<>();
}
frameList.add(null);
array.clear();
} else if (buffer.remaining() >= cha + 2) {
for (int j = 0; j < cha; j++) array.put(buffer.get());
buffer.get(); //\r
buffer.get(); //\n
if (frameList == null) {
frameList = new ArrayList<>();
}
frameList.add(array.getBytes());
array.clear();
} else {
while (buffer.hasRemaining()) array.put(buffer.get());
halfFrameCmd = type;
halfFrameArraySize = arraySize;
halfFrameArrayIndex = i;
halfFrameArrayItemLength = itemLength;
halfFrameBytes = pollArray(array);
return false;
}
}
}
clearHalfFrame();
return true;
}
protected void clearHalfFrame() {
halfFrameCmd = 0;
halfFrameBulkLength = -10;
halfFrameArraySize = -10;
halfFrameArrayIndex = 0;
halfFrameArrayItemLength = -10;
halfFrameBytes = null;
}
@Override
public void decodeMessages(ByteBuffer realbuf, ByteArray array) {
RedisCacheConnection conn = (RedisCacheConnection) connection;
if (!realbuf.hasRemaining()) {
return;
}
ByteBuffer buffer = realbuf;
if (!checkBytesFrame(conn, buffer, array)) {
return;
}
//buffer必然包含一个完整的frame数据
boolean first = true;
RedisCacheRequest request = null;
while (first || buffer.hasRemaining()) {
if (request == null) {
request = nextRequest();
}
if (!first && !checkBytesFrame(conn, buffer, array)) {
break;
}
if (frameType == TYPE_ERROR) {
addMessage(request, new RuntimeException(new String(frameValue, StandardCharsets.UTF_8)));
} else {
addMessage(request, conn.pollResultSet(request).prepare(frameType, frameValue, frameList));
}
frameType = 0;
frameValue = null;
frameList = null;
halfFrameCmd = 0;
halfFrameBytes = null;
first = false;
buffer = realbuf;
}
}
protected boolean readComplete(ByteBuffer buffer, ByteArray array) {
while (buffer.hasRemaining()) {
byte b = buffer.get();
if (b == '\n') {
array.removeLastByte(); //移除 \r
return true;
}
array.put(b);
}
return false;
}
}

View File

@ -0,0 +1,49 @@
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package org.redkalex.cache.redis;
import org.redkale.net.AsyncConnection;
import org.redkale.net.WorkThread;
import org.redkale.net.client.Client;
import org.redkale.net.client.ClientCodec;
import org.redkale.net.client.ClientConnection;
import java.util.concurrent.CompletableFuture;
import java.util.function.Function;
/**
* @author zhangjx
*/
public class RedisCacheConnection extends ClientConnection<RedisCacheRequest, RedisCacheResult> {
public RedisCacheConnection(Client client, int index, AsyncConnection channel) {
super(client, index, channel);
}
@Override
protected ClientCodec createCodec() {
return new RedisCacheCodec(this);
}
protected CompletableFuture<RedisCacheResult> writeRequest(RedisCacheRequest request) {
return super.writeChannel(request);
}
protected <T> CompletableFuture<T> writeRequest(RedisCacheRequest request, Function<RedisCacheResult, T> respTransfer) {
return super.writeChannel(request, respTransfer);
}
public RedisCacheResult pollResultSet(RedisCacheRequest request) {
RedisCacheResult rs = new RedisCacheResult();
return rs;
}
public RedisCacheRequest pollRequest(WorkThread workThread) {
RedisCacheRequest rs = new RedisCacheRequest().currThread(workThread);
return rs;
}
}

View File

@ -0,0 +1,45 @@
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package org.redkalex.cache.redis;
import org.redkale.net.client.ClientConnection;
import org.redkale.util.ByteArray;
import java.nio.charset.StandardCharsets;
/**
* @author zhangjx
*/
public class RedisCacheReqAuth extends RedisCacheRequest {
private static final byte[] PS = "AUTH".getBytes(StandardCharsets.UTF_8);
protected String password;
public RedisCacheReqAuth(String password) {
this.password = password;
}
@Override
public void writeTo(ClientConnection conn, ByteArray writer) {
byte[] pwd = password.getBytes();
writer.put((byte) '*');
writer.put((byte) '2');
writer.put((byte) '\r', (byte) '\n');
writer.put((byte) '$');
writer.put((byte) '4');
writer.put((byte) '\r', (byte) '\n');
writer.put(PS);
writer.put((byte) '\r', (byte) '\n');
writer.put((byte) '$');
writer.put(String.valueOf(pwd.length).getBytes(StandardCharsets.UTF_8));
writer.put((byte) '\r', (byte) '\n');
writer.put(pwd);
writer.put((byte) '\r', (byte) '\n');
}
}

View File

@ -0,0 +1,36 @@
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package org.redkalex.cache.redis;
import org.redkale.net.client.ClientConnection;
import org.redkale.util.ByteArray;
import java.nio.charset.StandardCharsets;
/**
* @author zhangjx
*/
public class RedisCacheReqClose extends RedisCacheRequest {
private static final byte[] PS = "QUIT".getBytes(StandardCharsets.UTF_8);
@Override
public final boolean isCloseType() {
return true;
}
@Override
public void writeTo(ClientConnection conn, ByteArray writer) {
writer.put((byte) '*');
writer.put((byte) '1');
writer.put((byte) '\r', (byte) '\n');
writer.put((byte) '$');
writer.put((byte) '4');
writer.put((byte) '\r', (byte) '\n');
writer.put(PS);
writer.put((byte) '\r', (byte) '\n');
}
}

View File

@ -0,0 +1,43 @@
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package org.redkalex.cache.redis;
import org.redkale.net.client.ClientConnection;
import org.redkale.util.ByteArray;
import java.nio.charset.StandardCharsets;
/**
* @author zhangjx
*/
public class RedisCacheReqDB extends RedisCacheRequest {
protected int db;
public RedisCacheReqDB(int db) {
this.db = db;
}
@Override
public void writeTo(ClientConnection conn, ByteArray writer) {
writer.put((byte) '*');
writer.put((byte) '2');
writer.put((byte) '\r', (byte) '\n');
writer.put((byte) '$');
writer.put((byte) '6');
writer.put((byte) '\r', (byte) '\n');
writer.put("SELECT".getBytes(StandardCharsets.UTF_8));
writer.put((byte) '\r', (byte) '\n');
byte[] dbs = String.valueOf(db).getBytes(StandardCharsets.UTF_8);
writer.put((byte) '$');
writer.put(String.valueOf(dbs.length).getBytes(StandardCharsets.UTF_8));
writer.put((byte) '\r', (byte) '\n');
writer.put(dbs);
writer.put((byte) '\r', (byte) '\n');
}
}

View File

@ -0,0 +1,31 @@
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package org.redkalex.cache.redis;
import org.redkale.net.client.ClientConnection;
import org.redkale.util.ByteArray;
import java.nio.charset.StandardCharsets;
/**
* @author zhangjx
*/
public class RedisCacheReqPing extends RedisCacheRequest {
private static final byte[] PS = "PING".getBytes(StandardCharsets.UTF_8);
@Override
public void writeTo(ClientConnection conn, ByteArray writer) {
writer.put((byte) '*');
writer.put((byte) '1');
writer.put((byte) '\r', (byte) '\n');
writer.put((byte) '$');
writer.put((byte) '4');
writer.put((byte) '\r', (byte) '\n');
writer.put(PS);
writer.put((byte) '\r', (byte) '\n');
}
}

View File

@ -0,0 +1,61 @@
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package org.redkalex.cache.redis;
import org.redkale.net.client.ClientConnection;
import org.redkale.net.client.ClientRequest;
import org.redkale.util.ByteArray;
import java.nio.charset.StandardCharsets;
/**
* @author zhangjx
*/
public class RedisCacheRequest extends ClientRequest {
static final byte[] TRUE = new byte[]{'t'};
static final byte[] FALSE = new byte[]{'f'};
protected String key;
protected String command;
protected byte[][] args;
public <T> RedisCacheRequest prepare(String command, String key, byte[]... args) {
super.prepare();
this.command = command;
this.key = key;
this.args = args;
return this;
}
@Override
public void writeTo(ClientConnection conn, ByteArray writer) {
writer.put((byte) '*');
writer.put(String.valueOf(args.length + 1).getBytes(StandardCharsets.UTF_8));
writer.put((byte) '\r', (byte) '\n');
writer.put((byte) '$');
writer.put(String.valueOf(command.length()).getBytes(StandardCharsets.UTF_8));
writer.put((byte) '\r', (byte) '\n');
writer.put(command.getBytes(StandardCharsets.UTF_8));
writer.put((byte) '\r', (byte) '\n');
for (final byte[] arg : args) {
writer.put((byte) '$');
writer.put(String.valueOf(arg.length).getBytes(StandardCharsets.UTF_8));
writer.put((byte) '\r', (byte) '\n');
writer.put(arg);
writer.put((byte) '\r', (byte) '\n');
}
}
@Override
public String toString() {
return getClass().getSimpleName() + "{command=" + command + ", key=" + key + "}";
}
}

View File

@ -0,0 +1,151 @@
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package org.redkalex.cache.redis;
import org.redkale.convert.json.JsonConvert;
import java.lang.reflect.Type;
import java.nio.charset.StandardCharsets;
import java.util.*;
/**
* @author zhangjx
*/
public class RedisCacheResult {
//+ 简单字符串类型 (不包含CRLF)
//- 错误类型 (不包含CRLF)
//': 整型
//$ 块字符串
//* 数组
protected byte frameType;
protected byte[] frameValue; //(不包含CRLF)
protected List<byte[]> frameList; //(不包含CRLF)
public RedisCacheResult prepare(byte byteType, byte[] val, List<byte[]> bytesList) {
this.frameType = byteType;
this.frameValue = val;
this.frameList = bytesList;
return this;
}
public Void getVoidValue() {
return null;
}
public byte[] getFrameValue() {
return frameValue;
}
public Boolean getBoolValue() {
if (frameValue == null) {
return false;
}
String val = new String(frameValue, StandardCharsets.UTF_8);
if ("OK".equals(val)) {
return true;
}
return Integer.parseInt(val) > 0;
}
public String getStringValue(String key, RedisCryptor cryptor) {
if (frameValue == null) {
return null;
}
String val = new String(frameValue, StandardCharsets.UTF_8);
if (cryptor != null) {
val = cryptor.decrypt(key, val);
}
return val;
}
public Double getDoubleValue(Double defvalue) {
return frameValue == null ? defvalue : Double.parseDouble(new String(frameValue, StandardCharsets.UTF_8));
}
public Long getLongValue(Long defvalue) {
return frameValue == null ? defvalue : Long.parseLong(new String(frameValue, StandardCharsets.UTF_8));
}
public Integer getIntValue(Integer defvalue) {
return frameValue == null ? defvalue : Integer.parseInt(new String(frameValue, StandardCharsets.UTF_8));
}
public <T> T getObjectValue(String key, RedisCryptor cryptor, Type type) {
return formatValue(key, cryptor, frameValue, type);
}
protected <T> Set<T> getSetValue(String key, RedisCryptor cryptor, Type type) {
if (frameList == null || frameList.isEmpty()) {
return new LinkedHashSet<>();
}
Set<T> set = new LinkedHashSet<>();
for (byte[] bs : frameList) {
set.add(formatValue(key, cryptor, bs, type));
}
return set;
}
protected <T> List<T> getListValue(String key, RedisCryptor cryptor, Type type) {
if (frameList == null || frameList.isEmpty()) {
return new ArrayList<>();
}
List<T> list = new ArrayList<>();
for (byte[] bs : frameList) {
list.add(formatValue(key, cryptor, bs, type));
}
return list;
}
protected <T> Map<String, T> getMapValue(String key, RedisCryptor cryptor, Type type) {
if (frameList == null || frameList.isEmpty()) {
return new LinkedHashMap<>();
}
Map<String, T> map = new LinkedHashMap<>();
for (int i = 0; i < frameList.size(); i += 2) {
byte[] bs1 = frameList.get(i);
byte[] bs2 = frameList.get(i + 1);
T val = formatValue(key, cryptor, bs2, type);
if (val != null) {
map.put(formatValue(key, cryptor, bs1, String.class).toString(), val);
}
}
return map;
}
protected static <T> T formatValue(String key, RedisCryptor cryptor, byte[] frames, Type type) {
if (frames == null) {
return null;
}
if (type == byte[].class) {
return (T) frames;
}
if (type == String.class) {
String val = new String(frames, StandardCharsets.UTF_8);
if (cryptor != null) {
val = cryptor.decrypt(key, val);
}
return (T) val;
}
if (type == boolean.class || type == Boolean.class) {
return (T) (Boolean) "t".equalsIgnoreCase(new String(frames, StandardCharsets.UTF_8));
}
if (type == long.class || type == Long.class) {
return (T) (Long) Long.parseLong(new String(frames, StandardCharsets.UTF_8));
}
if (type == double.class || type == Double.class) {
return (T) (Double) Double.parseDouble(new String(frames, StandardCharsets.UTF_8));
}
if (cryptor != null) {
String val = cryptor.decrypt(key, new String(frames, StandardCharsets.UTF_8));
return (T) JsonConvert.root().convertFrom(type, val);
}
return (T) JsonConvert.root().convertFrom(type, frames);
}
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,29 @@
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package org.redkalex.cache.redis;
import org.redkale.annotation.Priority;
import org.redkale.source.CacheSource;
import org.redkale.source.CacheSourceProvider;
import org.redkale.util.AnyValue;
/**
* @author zhangjx
*/
@Priority(10001)
public class RedisCacheSourceProvider implements CacheSourceProvider {
@Override
public boolean acceptsConf(AnyValue config) {
return new MyRedisCacheSource().acceptsConf(config);
}
@Override
public CacheSource createInstance() {
return new MyRedisCacheSource();
}
}

View File

@ -0,0 +1,46 @@
/*
* Click nbfs://nbhost/SystemFileSystem/Templates/Licenses/license-default.txt to change this license
* Click nbfs://nbhost/SystemFileSystem/Templates/Classes/Class.java to edit this template
*/
package org.redkalex.cache.redis;
import org.redkale.util.AnyValue;
/**
* @author zhangjx
*/
public interface RedisCryptor {
/**
* 初始化
*
* @param conf 配置
*/
public void init(AnyValue conf);
/**
* 加密, 无需加密的key对应的值需要直接返回value
*
* @param key key
* @param value 明文
* @return 密文
*/
public String encrypt(String key, String value);
/**
* 解密, 无需解密的key对应的值需要直接返回value
*
* @param key key
* @param value 密文
* @return 明文
*/
public String decrypt(String key, String value);
/**
* 销毁
*
* @param conf 配置
*/
public void destroy(AnyValue conf);
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,35 @@
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package org.redkalex.cache.redis.lettuce;
import org.redkale.annotation.Priority;
import org.redkale.source.CacheSource;
import org.redkale.source.CacheSourceProvider;
import org.redkale.util.AnyValue;
/**
*
* @author zhangjx
*/
@Priority(-100)
public class RedisLettuceCacheSourceProvider implements CacheSourceProvider {
@Override
public boolean acceptsConf(AnyValue config) {
try {
Object.class.isAssignableFrom(io.lettuce.core.support.BoundedPoolConfig.class); //试图加载Lettuce相关类
return new RedisLettuceCacheSource().acceptsConf(config);
} catch (Throwable e) {
return false;
}
}
@Override
public CacheSource createInstance() {
return new RedisLettuceCacheSource();
}
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,35 @@
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package org.redkalex.cache.redis.redission;
import org.redkale.annotation.Priority;
import org.redkale.source.CacheSource;
import org.redkale.source.CacheSourceProvider;
import org.redkale.util.AnyValue;
/**
*
* @author zhangjx
*/
@Priority(-300)
public class RedissionCacheSourceProvider implements CacheSourceProvider {
@Override
public boolean acceptsConf(AnyValue config) {
try {
Object.class.isAssignableFrom(org.redisson.config.Config.class); //试图加载Redission相关类
return new RedissionCacheSource().acceptsConf(config);
} catch (Throwable e) {
return false;
}
}
@Override
public CacheSource createInstance() {
return new RedissionCacheSource();
}
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,32 @@
/*
*/
package org.redkalex.cache.redis.vertx;
import org.redkale.annotation.Priority;
import org.redkale.source.CacheSource;
import org.redkale.source.CacheSourceProvider;
import org.redkale.util.AnyValue;
/**
*
* @author zhangjx
*/
@Priority(-200)
public class RedisVertxCacheSourceProvider implements CacheSourceProvider {
@Override
public boolean acceptsConf(AnyValue config) {
try {
Object.class.isAssignableFrom(io.vertx.redis.client.RedisOptions.class); //试图加载vertx-redis相关类
return new RedisVertxCacheSource().acceptsConf(config);
} catch (Throwable e) {
return false;
}
}
@Override
public CacheSource createInstance() {
return new RedisVertxCacheSource();
}
}

View File

@ -0,0 +1 @@
com.zdemo.ZhubProvider

View File

@ -0,0 +1 @@
org.redkalex.cache.redis.RedisCacheSourceProvider

View File

@ -1,25 +1,121 @@
package com.zdemo.cachex;
package org.redkalex.cache.redis.test;
import org.redkale.convert.json.JsonFactory;
import org.redkale.net.AsyncIOGroup;
import org.redkale.util.AnyValue;
import org.redkale.util.ResourceFactory;
import org.redkalex.cache.redis.MyRedisCacheSource;
import java.awt.*;
import java.io.Serializable;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import static org.redkale.boot.Application.RESNAME_APP_CLIENT_ASYNCGROUP;
import static org.redkale.source.AbstractCacheSource.*;
public class RedisTest {
static MyRedisCacheSource<String> source = new MyRedisCacheSource();
static MyRedisCacheSource source = new MyRedisCacheSource();
static {
AnyValue.DefaultAnyValue conf = new AnyValue.DefaultAnyValue();
static { // redis://:*Zhong9307!@47.111.150.118:6064?db=2
AnyValue.DefaultAnyValue conf = new AnyValue.DefaultAnyValue().addValue(CACHE_SOURCE_MAXCONNS, "1");
conf.addValue(CACHE_SOURCE_NODE, new AnyValue.DefaultAnyValue().addValue(CACHE_SOURCE_URL, "redis://:*Zhong9307!@47.111.150.118:6064?db=0"));
final ResourceFactory factory = ResourceFactory.create();
final AsyncIOGroup asyncGroup = new AsyncIOGroup(8192, 16);
asyncGroup.start();
factory.register(RESNAME_APP_CLIENT_ASYNCGROUP, asyncGroup);
factory.inject(source);
//source.defaultConvert = JsonFactory.root().getConvert();
source.init(conf);
//--------------------- bit ------------------------------
/*boolean ax = source.getBit("ax", 6);
System.out.println("ax:"+ ax);
source.setBit("ax", 6, true);
ax = source.getBit("ax", 6);
System.out.println("ax:"+ ax);
source.setBit("ax", 6, false);
ax = source.getBit("ax", 6);
System.out.println("ax:"+ ax);*/
//--------------------- bit ------------------------------
//--------------------- bit ------------------------------
/*
source.lock("lockx", 5000);
*/
//--------------------- set ------------------------------
source.del("setx");
/*
int[] ints = {1, 2, 3};
source.sadd("setx", ints);
*/
//source.sadd("setx", list.toArray(Integer[]::new));
List<Integer> list = List.of(2, 3, 5);
// source.sadd("setx", list.toArray(Integer[]::new));
source.sadd("setx", list.toArray(Integer[]::new));
source.sadd("setx", 12,2312,213);
source.keys("setx*").forEach(x -> {
System.out.println(x);
});
source.srem("setx", 213, 2312);
Collection<String> setx1 = source.getCollection("setx", String.class);
System.out.println(setx1);
//source.getexLong()
source.setHms("hmx", Map.of("a", "5","b", "51", "c", "ads"));
List<Serializable> hmget = source.hmget("hmx", int.class, "a");
System.out.println(hmget);
Integer hm = source.getHm("hmx", int.class, "ads");
System.out.println(hm);
Map<String, String> hms = source.getHms("hmx", "a", "b");
System.out.println(hms);
/*AnyValue.DefaultAnyValue conf = new AnyValue.DefaultAnyValue();
conf.addValue("node", new AnyValue.DefaultAnyValue().addValue("addr", "47.111.150.118").addValue("port", "6064").addValue("password", "*Zhong9307!").addValue("db", 2));
source.defaultConvert = JsonFactory.root().getConvert();
source.initValueType(String.class); //value用String类型
source.init(conf);
source.init(conf);*/
}
public static void main(String[] args) {
//source.setLong("a", 125);
/*long a = source.getLong("a", 0);
System.out.println(a);
List<String> keys = source.keys("farm*");
keys.forEach(x -> System.out.println(x));
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}*/
// ===========================================
//System.out.println(source.remove("a", "b"));
// bit
@ -115,10 +211,10 @@ public class RedisTest {
System.out.println(source.getCollectionSize("sk")); // 2*/
Map<String, String> hms = source.getHms("supportusers", "5-kfeu0f", "xxxx", "3-0kbt7u8t", "95q- ");
/*Map<String, String> hms = source.getHms("supportusers", "5-kfeu0f", "xxxx", "3-0kbt7u8t", "95q- ");
hms.forEach((k, v) -> {
System.out.println(k + " : " + v);
});
});*/
/*MyRedisCacheSource<String> source2 = new MyRedisCacheSource();

View File

@ -1,6 +1,7 @@
package com.zdemo.test;
import com.zdemo.IType;
import com.zdemo.ZhubProvider;
import com.zdemo.zhub.RpcResult;
import com.zdemo.zhub.ZHubClient;
import org.redkale.net.http.RestMapping;
@ -15,22 +16,28 @@ import java.util.List;
import java.util.Map;
import java.util.concurrent.CompletableFuture;
@RestService(automapping = true)
@RestService(automapping = true, name = "hello")
public class HelloService implements Service {
@Resource(name = "zhub")
@Resource(name = "hub")
private ZHubClient zhub;
private net.tccn.zhub.ZHubClient zhubx = null;
/*@Resource(name = "hubx")
private ZHubClient zhubx;
@Resource(name = "vvvvhub2")
private ZHubClient zhub2;*/
//private net.tccn.zhub.ZHubClient zhubx = null;
@Override
public void init(AnyValue config) {
CompletableFuture.runAsync(() -> {
/*CompletableFuture.runAsync(() -> {
zhubx = new net.tccn.zhub.ZHubClient("127.0.0.1", 1216, "g-dev", "DEV-LOCAL");
//zhubx = new net.tccn.zhub.ZHubClient("47.111.150.118", 6066, "g-dev", "DEV-LOCAL");
});
});*/
// Function<Rpc<T>, RpcResult<R>> fun
/*zhub.rpcSubscribe("x", new TypeToken<String>() {
@ -47,7 +54,7 @@ public class HelloService implements Service {
zhub.subscribe("sport:reqtime", x -> {
System.out.println(x);
});
zhub.subscribe("abx", x -> {
zhub.subscribe("abx1", x -> {
System.out.println(x);
});
@ -111,8 +118,8 @@ public class HelloService implements Service {
/*RpcResult<FileToken> x = zhub.rpc("rpc:file:up-token", Map.of(), new TypeToken<>() {
});*/
net.tccn.zhub.RpcResult<Object> x = zhubx.rpc("y", v + i, new com.google.gson.reflect.TypeToken<>() {
});
/*RpcResult<Object> x = zhubx.rpc("y", v + i, new com.google.gson.reflect.TypeToken<>() {
});*/
System.out.println("time: " + (System.currentTimeMillis() - start) + " ms");
@ -122,6 +129,12 @@ public class HelloService implements Service {
return "ok";
}
@RestMapping(name = "send")
public String send() {
zhub.publish("abx1", 1);
return "ok";
}
public static void main(String[] args) {
// "\"别人家的女娃子\uD83E\uDD1E\uD83C\uDFFB\""