6 Commits

Author SHA1 Message Date
31916dc6a0 修改:包结构名称 2024-04-24 20:08:05 +08:00
efdf42c4f2 修改:包结构名称 2024-04-24 20:05:12 +08:00
4d125f8e46 修复:rpc 数据解析错误 2024-04-22 00:13:56 +08:00
8a5f1c10e1 优化:1、本地模式返回结果与目标类型不一致的转换
2、其他优化
2024-04-09 02:27:14 +08:00
9d23c84cba . 2024-04-07 23:35:31 +08:00
ff3d877a37 优化:rpc调用优先调用本地订阅 2024-03-28 00:22:43 +08:00
30 changed files with 498 additions and 4853 deletions

39
pom.xml
View File

@@ -4,9 +4,9 @@
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>net.tccn</groupId>
<groupId>dev.zhub</groupId>
<artifactId>zhub-client-redkale</artifactId>
<version>x.22.0</version> <!-- 支持 redkale-2.2 版本 -->
<version>0.1.1.dev</version>
<properties>
<maven.compiler.source>17</maven.compiler.source>
@@ -14,11 +14,26 @@
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
</properties>
<repositories>
<repository>
<id>maven-release</id>
<name>maven-nexus</name>
<url>https://nexus.1216.top/repository/maven-public/</url>
</repository>
</repositories>
<distributionManagement>
<repository>
<id>mvn-release</id>
<name>mvn-release</name>
<url>https://nexus.1216.top/repository/maven-releases/</url>
</repository>
</distributionManagement>
<dependencies>
<dependency>
<groupId>org.redkale</groupId>
<artifactId>redkale</artifactId>
<version>2.2.0</version>
<version>2.8.0.dev</version>
<scope>compile</scope>
</dependency>
<dependency>
@@ -28,22 +43,4 @@
<scope>compile</scope>
</dependency>
</dependencies>
<repositories>
<repository>
<id>maven-nexus</id>
<name>maven-nexus</name>
<url>https://nexus.1216.top/repository/maven-public/</url>
</repository>
</repositories>
<distributionManagement>
<repository>
<id>mvn-release</id>
<name>mvn-release</name>
<url>https://nexus.1216.top/repository/maven-releases/</url>
</repository>
</distributionManagement>
</project>

View File

@@ -1,9 +1,9 @@
package net.tccn;
package dev.zhub;
import org.redkale.convert.json.JsonConvert;
import org.redkale.util.Resourcable;
import org.redkale.util.TypeToken;
import javax.annotation.Resource;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
@@ -13,14 +13,13 @@ import java.util.function.Consumer;
* @author Liang
* @data 2020-09-05 23:18
*/
public abstract class AbstractConsumer implements IConsumer {
public abstract class AbstractConsumer extends ZhubAgentProvider implements IConsumer, Resourcable {
protected JsonConvert convert = JsonConvert.root();
@Resource(name = "APP_NAME")
protected String APP_NAME = "";
protected static String APP_NAME = "";
private Map<String, EventType> eventMap = new ConcurrentHashMap<>();
protected Map<String, EventType<?>> eventMap = new ConcurrentHashMap<>();
protected abstract String getGroupid();
@@ -32,6 +31,7 @@ public abstract class AbstractConsumer implements IConsumer {
return Set.of("-");
}
// topic 消息消费前处理
protected void accept(String topic, String value) {
EventType eventType = eventMap.get(topic);
@@ -45,6 +45,12 @@ public abstract class AbstractConsumer implements IConsumer {
eventType.accept(data);
}
// rpc 被调用端
protected <T> void rpcAccept(String topic, T value) {
EventType eventType = eventMap.get(topic);
eventType.accept(value);
}
protected final void removeEventType(String topic) {
eventMap.remove(topic);
}
@@ -73,4 +79,18 @@ public abstract class AbstractConsumer implements IConsumer {
}
// --------------
@Override
public String resourceName() {
return super.getName();
}
protected String toStr(Object v) {
if (v instanceof String) {
return (String) v;
} else if (v == null) {
return null;
}
return convert.convertTo(v);
}
}

View File

@@ -1,4 +1,4 @@
package net.tccn;
package dev.zhub;
/**
* 发布订阅 事件

View File

@@ -1,4 +1,4 @@
package net.tccn;
package dev.zhub;
import org.redkale.util.TypeToken;

View File

@@ -1,4 +1,4 @@
package net.tccn;
package dev.zhub;
import org.redkale.util.TypeToken;

View File

@@ -1,4 +1,4 @@
package net.tccn;
package dev.zhub;
import java.util.logging.Logger;

View File

@@ -1,4 +1,4 @@
package net.tccn;
package dev.zhub;
import org.redkale.util.TypeToken;

View File

@@ -1,25 +1,173 @@
package org.redkalex.cache.redis;
package dev.zhub;
import org.redkale.convert.json.JsonFactory;
import org.junit.Test;
import org.redkale.net.AsyncIOGroup;
import org.redkale.util.AnyValue;
import org.redkale.util.ResourceFactory;
import org.redkalex.cache.redis.MyRedisCacheSource;
import java.util.Map;
import static org.redkale.boot.Application.RESNAME_APP_CLIENT_ASYNCGROUP;
import static org.redkale.source.AbstractCacheSource.*;
public class RedisTest {
static MyRedisCacheSource<String> source = new MyRedisCacheSource();
static MyRedisCacheSource source = new MyRedisCacheSource();
static {
AnyValue.DefaultAnyValue conf = new AnyValue.DefaultAnyValue();
/**
* 3
*/
@Test
public void keyTest() {
source.set("a", 3);
System.out.println(source.get("a"));
source.del("a");
}
/**
* ax:false
* ax:true
* ax:false
*/
@Test
public void bitTest() {
boolean ax = source.getBit("ax", 6);
System.out.println("ax:"+ ax); // false
source.setBit("ax", 6, true);
ax = source.getBit("ax", 6);
System.out.println("ax:"+ ax); // true
source.setBit("ax", 6, false);
ax = source.getBit("ax", 6);
System.out.println("ax:"+ ax); // false
source.del("ax");
}
@Test
public void setTest() {
source.del("setx");
source.sadd("setx", int.class, 1, 2, 3, 5, 6);
int setx = source.spop("setx", int.class);
System.out.println(setx);
setx = source.spop("setx", int.class);
System.out.println(setx);
source.del("setx");
source.srem("setx", int.class,213, 2312);
/*//source.sadd("setx", list.toArray(Integer[]::new));
List<Integer> list = List.of(2, 3, 5);
// source.sadd("setx", list.toArray(Integer[]::new));
source.sadd("setx", list.toArray(Integer[]::new));
source.sadd("setx", 12, 2312, 213);
source.sadd("setx", List.of(1011, 10222));*/
}
static { // redis://:*Zhong9307!@47.111.150.118:6064?db=2
AnyValue.DefaultAnyValue conf = new AnyValue.DefaultAnyValue().addValue(CACHE_SOURCE_MAXCONNS, "1");
conf.addValue(CACHE_SOURCE_NODES, "redis://:123456@127.0.0.1:6379?db=0");
final ResourceFactory factory = ResourceFactory.create();
final AsyncIOGroup asyncGroup = new AsyncIOGroup(8192, 16);
asyncGroup.start();
factory.register(RESNAME_APP_CLIENT_ASYNCGROUP, asyncGroup);
factory.inject(source);
//source.defaultConvert = JsonFactory.root().getConvert();
source.init(conf);
/*
source.lock("lockx", 5000);
*/
source.keysStartsWith("more-hot").forEach(x -> {
System.out.println(x);
source.del(x);
int i = (short) 3;
});
//--------------------- set ------------------------------
/*
*/
/*
Collection<String> setx1 = source.getCollection("setx", String.class);
System.out.println(setx1);
//source.getexLong()
source.setHms("hmx", Map.of("a", "5", "b", "51", "c", "ads"));
List<Serializable> hmget = source.hmget("hmx", int.class, "a");
System.out.println(hmget);
Integer hm = source.getHm("hmx", int.class, "ads");
System.out.println(hm);
Map<String, String> hms = source.getHms("hmx", "a", "b");
System.out.println("hmx:" + hms);
*//*System.out.println("======================================================");
System.out.println(source.incrHm("hmx", "a", -6.0));
hms = source.getHms("hmx", "a", "b");
System.out.println("hmxa+1后的结果 " + hms);*//*
System.out.println("======================================================");
source.setHm("hmx", "c", 12);
hms = source.getHms("hmx", "a", "b", "c", "d", "a");
System.out.println("hmx设置 c=12 后的结果 " + hms);
System.out.println("======================================================");
Double c = source.getHm("hmx", double.class, "c");
System.out.println("hmx 中 c 值:" + c);*/
/*Map<String, Object> hmx = source.getHmall("hmx");
System.out.println("Hmall" + hmx);*/
/*AnyValue.DefaultAnyValue conf = new AnyValue.DefaultAnyValue();
conf.addValue("node", new AnyValue.DefaultAnyValue().addValue("addr", "47.111.150.118").addValue("port", "6064").addValue("password", "*Zhong9307!").addValue("db", 2));
source.defaultConvert = JsonFactory.root().getConvert();
source.initValueType(String.class); //value用String类型
source.init(conf);
source.init(conf);*/
}
public static void main(String[] args) {
//source.setLong("a", 125);
/*long a = source.getLong("a", 0);
System.out.println(a);
List<String> keys = source.keys("farm*");
keys.forEach(x -> System.out.println(x));
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}*/
// ===========================================
//System.out.println(source.remove("a", "b"));
// bit
@@ -115,10 +263,10 @@ public class RedisTest {
System.out.println(source.getCollectionSize("sk")); // 2*/
Map<String, String> hms = source.getHms("supportusers", "5-kfeu0f", "xxxx", "3-0kbt7u8t", "95q- ");
/*Map<String, String> hms = source.getHms("supportusers", "5-kfeu0f", "xxxx", "3-0kbt7u8t", "95q- ");
hms.forEach((k, v) -> {
System.out.println(k + " : " + v);
});
});*/
/*MyRedisCacheSource<String> source2 = new MyRedisCacheSource();

View File

@@ -0,0 +1,63 @@
package dev.zhub;
import org.redkale.boot.Application;
import org.redkale.boot.NodeServer;
import org.redkale.cluster.CacheClusterAgent;
import org.redkale.cluster.ClusterAgent;
import org.redkale.service.Service;
import org.redkale.util.ResourceEvent;
import java.net.InetSocketAddress;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
public abstract class ZhubAgentProvider extends ClusterAgent {
@Override
public void onResourceChange(ResourceEvent[] events) {
}
@Override
public void register(Application application) {
}
@Override
public void deregister(Application application) {
}
@Override
public CompletableFuture<Set<InetSocketAddress>> queryHttpAddress(String protocol, String module, String resname) {
return null;
}
@Override
public CompletableFuture<Set<InetSocketAddress>> querySncpAddress(String protocol, String restype, String resname) {
return null;
}
@Override
protected CompletableFuture<Set<InetSocketAddress>> queryAddress(ClusterEntry entry) {
return null;
}
@Override
protected ClusterEntry register(NodeServer ns, String protocol, Service service) {
deregister(ns, protocol, service);
ClusterEntry clusterEntry = new ClusterEntry(ns, protocol, service);
CacheClusterAgent.AddressEntry entry = new CacheClusterAgent.AddressEntry();
entry.addr = clusterEntry.address;
entry.resname = clusterEntry.resourceName;
entry.nodeid = this.nodeid;
entry.time = System.currentTimeMillis();
//source.hset(clusterEntry.serviceName, clusterEntry.serviceid, CacheClusterAgent.AddressEntry.class, entry);
return clusterEntry;
}
@Override
protected void deregister(NodeServer ns, String protocol, Service service) {
}
}

View File

@@ -0,0 +1,21 @@
package dev.zhub;
import dev.zhub.client.ZHubClient;
import org.redkale.annotation.Priority;
import org.redkale.cluster.ClusterAgent;
import org.redkale.cluster.ClusterAgentProvider;
import org.redkale.util.AnyValue;
@Priority(1)
public class ZhubProvider implements ClusterAgentProvider {
@Override
public boolean acceptsConf(AnyValue config) {
return new ZHubClient().acceptsConf(config);
}
@Override
public ClusterAgent createInstance() {
return new ZHubClient();
}
}

View File

@@ -1,4 +1,4 @@
package net.tccn.zhub;
package dev.zhub.client;
// ================================================== lock ==================================================
public class Lock {

View File

@@ -1,8 +1,9 @@
package net.tccn.zhub;
package dev.zhub.client;
import org.redkale.convert.ConvertColumn;
import org.redkale.convert.json.JsonConvert;
import org.redkale.service.RetResult;
import org.redkale.util.TypeToken;
import org.redkale.util.Utility;
public class Rpc<T> {
private String ruk; // request unique key:
@@ -11,13 +12,15 @@ public class Rpc<T> {
private RpcResult rpcResult;
private TypeToken typeToken;
public Rpc() {
}
protected Rpc(String appname, String ruk, String topic, Object value) {
this.ruk = appname + "::" + ruk;
protected Rpc(String appname, String topic, T value) {
this.ruk = appname + "::" + Utility.uuid();
this.topic = topic;
this.value = (T) JsonConvert.root().convertTo(value);
this.value = value;
}
public String getRuk() {
@@ -53,6 +56,16 @@ public class Rpc<T> {
this.rpcResult = rpcResult;
}
@ConvertColumn(ignore = true)
public TypeToken getTypeToken() {
return typeToken;
}
@ConvertColumn(ignore = true)
public void setTypeToken(TypeToken typeToken) {
this.typeToken = typeToken;
}
@ConvertColumn(ignore = true)
public String getBackTopic() {
return ruk.split("::")[0];

View File

@@ -1,4 +1,4 @@
package net.tccn.zhub;
package dev.zhub.client;
public class RpcResult<R> {
private String ruk;

View File

@@ -1,10 +1,14 @@
package net.tccn.zhub;
package dev.zhub.client;
import net.tccn.*;
import net.tccn.timer.Timers;
import dev.zhub.*;
import dev.zhub.timer.Timers;
import org.redkale.annotation.AutoLoad;
import org.redkale.annotation.ResourceType;
import org.redkale.service.Local;
import org.redkale.service.Service;
import org.redkale.util.*;
import org.redkale.util.AnyValue;
import org.redkale.util.Comment;
import org.redkale.util.TypeToken;
import java.io.BufferedReader;
import java.io.IOException;
@@ -20,8 +24,6 @@ import java.util.function.Function;
import java.util.logging.Level;
import java.util.logging.Logger;
import static java.nio.charset.StandardCharsets.UTF_8;
@Local
@AutoLoad(false)
@ResourceType(ZHubClient.class)
@@ -38,21 +40,45 @@ public class ZHubClient extends AbstractConsumer implements IConsumer, IProducer
private BufferedReader reader;
private final LinkedBlockingQueue<Timer> timerQueue = new LinkedBlockingQueue<>();
private final LinkedBlockingQueue<Event<String>> topicQueue = new LinkedBlockingQueue<>();
private final LinkedBlockingQueue<Event<String>> rpcBackQueue = new LinkedBlockingQueue<>(); // RPC BACK MSG
private final LinkedBlockingQueue<Event<String>> rpcCallQueue = new LinkedBlockingQueue<>(); // RPC CALL MSG
private final LinkedBlockingQueue<Event<String>> topicQueue = new LinkedBlockingQueue<>(); // [=> Object]
private final LinkedBlockingQueue<Event<Object>> rpcBackQueue = new LinkedBlockingQueue<>(); // RPC BACK MSG [=> Object]
private final LinkedBlockingQueue<Event<Object>> rpcCallQueue = new LinkedBlockingQueue<>(); // RPC CALL MSG [=> Object]
private final LinkedBlockingQueue<String> sendMsgQueue = new LinkedBlockingQueue<>(); // SEND MSG
/*private BiConsumer<Runnable, Integer> threadBuilder = (r, n) -> {
for (int i = 0; i < n; i++) {
new Thread(() -> r.run()).start();
}
};*/
private static Map<String, ZHubClient> mainHub = new HashMap<>(); // 127.0.0.1:1216 - ZHubClient
public ZHubClient() {
}
public ZHubClient(String name, Map<String, String> attr) {
this.APP_NAME = name;
this.addr = attr.get("addr");
this.groupid = attr.get("groupid");
this.auth = attr.get("auth");
this.initClient(null);
}
@Override
public void init(AnyValue config) {
APP_NAME = application.getName();
/*if (!preInit()) {
return;
}*/
if (config == null) {
initClient(null);
} else {
Map<String, AnyValue> nodes = getNodes(config);
for (String rsName : nodes.keySet()) {
ZHubClient client = new ZHubClient().initClient(nodes.get(rsName));
application.getResourceFactory().register(rsName, client);
}
}
}
private ZHubClient initClient(AnyValue config) {
// 自动注入
if (config != null) {
addr = config.getValue("addr", addr);
@@ -68,10 +94,8 @@ public class ZHubClient extends AbstractConsumer implements IConsumer, IProducer
}
// 设置第一个启动的 实例为主实例
synchronized (ZHubClient.class) {
if (!mainHub.containsKey(addr)) { // 确保同步执行此 init 逻辑
mainHub.put(addr, this);
}
if (!mainHub.containsKey(addr)) { // 确保同步执行此 init 逻辑
mainHub.put(addr, this);
}
CompletableFuture.runAsync(() -> {
@@ -113,7 +137,7 @@ public class ZHubClient extends AbstractConsumer implements IConsumer, IProducer
String value = "";
do {
if (value.length() > 0) {
if (!value.isEmpty()) {
value += "\r\n";
}
String s = reader.readLine();
@@ -189,93 +213,88 @@ public class ZHubClient extends AbstractConsumer implements IConsumer, IProducer
}).thenAcceptAsync(x -> {
// 定时调度事件已加入耗时监控
new Thread(() -> {
ExecutorService executor = Executors.newSingleThreadExecutor();
ExecutorService pool = Executors.newFixedThreadPool(1);
while (true) {
Timer timer = null;
try {
timer = timerQueue.take();
long start = System.currentTimeMillis();
executor.submit(timer.runnable).get(5, TimeUnit.SECONDS);
pool.submit(timer.runnable).get(5, TimeUnit.SECONDS);
long end = System.currentTimeMillis();
logger.finest(String.format("timer [%s] : elapsed time %s ms", timer.name, end - start));
} catch (InterruptedException | ExecutionException | TimeoutException e) {
if (e instanceof TimeoutException) {
executor = Executors.newSingleThreadExecutor();
logger.log(Level.WARNING, "TimeoutException [" + timer.name + "]", e);
} else {
e.printStackTrace();
}
} catch (InterruptedException e) {
e.printStackTrace();
} catch (TimeoutException e) {
logger.log(Level.SEVERE, "timer [" + timer.name + "] time out: " + 5 + " S", e);
pool = Executors.newFixedThreadPool(1);
} catch (Exception e) {
logger.log(Level.WARNING, "timer [" + timer.name + "]", e);
}
}
}).start();
// topic msg已加入耗时监控
new Thread(() -> {
ExecutorService executor = Executors.newSingleThreadExecutor();
ExecutorService pool = Executors.newFixedThreadPool(1);
while (true) {
Event<String> event = null;
try {
event = topicQueue.take();
logger.log(Level.FINE, "topic[" + event.topic + "] :" + event.value);
String topic = event.topic;
String value = event.value;
executor.submit(() -> accept(topic, value)).get(5, TimeUnit.SECONDS);
} catch (InterruptedException | ExecutionException | TimeoutException e) {
if (e instanceof TimeoutException) {
executor = Executors.newSingleThreadExecutor();
logger.log(Level.WARNING, "TimeoutException, topic[" + event.topic + "], value[" + event.value + "]", e);
} else if (event != null) {
logger.log(Level.WARNING, "topic[" + event.topic + "] event accept error :" + event.value, e);
}
pool.submit(() -> accept(topic, value)).get(5, TimeUnit.SECONDS);
} catch (InterruptedException e) {
e.printStackTrace();
} catch (TimeoutException e) {
logger.log(Level.SEVERE, "topic[" + event.topic + "] event deal time out: " + 5 + " S, value: " + toStr(event.value), e);
pool = Executors.newFixedThreadPool(1);
} catch (Exception e) {
logger.log(Level.WARNING, "topic[" + event.topic + "] event accept error :" + toStr(event.value), e);
}
}
}, "ZHub-topic-accept").start();
}).start();
// rpc back ,仅做数据解析暂无耗时监控
new Thread(() -> {
ExecutorService executor = Executors.newSingleThreadExecutor();
while (true) {
Event<String> event = null;
Event<Object> event = null;
try {
event = rpcBackQueue.take();
logger.info(String.format("rpc-back:[%s]", event.value));
String value = event.value;
executor.submit(() -> rpcAccept(value)).get(5, TimeUnit.SECONDS);
} catch (InterruptedException | ExecutionException | TimeoutException e) {
if (e instanceof TimeoutException) {
executor = Executors.newSingleThreadExecutor();
logger.log(Level.WARNING, "rpc-back TimeoutException, topic[" + event.topic + "], value[" + event.value + "]", e);
} else if (event != null) {
logger.log(Level.WARNING, "rpc-back[" + event.value + "] event accept error :" + event.value, e);
}
//if (event)
logger.finest(String.format("rpc-back:[%s]: %s", event.topic, toStr(event.value)));
rpcAccept(event.value);
} catch (InterruptedException e) {
e.printStackTrace();
} catch (Exception e) {
logger.log(Level.WARNING, "rpc-back[" + event.topic + "] event accept error :" + toStr(event.value), e);
}
}
}, "ZHub-rpc-call").start();
}).start();
// rpc call已加入耗时监控
new Thread(() -> {
ExecutorService executor = Executors.newSingleThreadExecutor();
ExecutorService pool = Executors.newFixedThreadPool(1);
while (true) {
Event<String> event = null;
Event<Object> event = null;
try {
event = rpcCallQueue.take();
logger.info(String.format("rpc-call:[%s] %s", event.topic, event.value));
logger.finest(String.format("rpc-call:[%s] %s", event.topic, toStr(event.value)));
String topic = event.topic;
String value = event.value;
executor.submit(() -> accept(topic, value)).get(5, TimeUnit.SECONDS);
} catch (InterruptedException | ExecutionException | TimeoutException e) {
if (e instanceof TimeoutException) {
executor = Executors.newSingleThreadExecutor();
logger.log(Level.WARNING, "rpc-call TimeoutException, topic[" + event.topic + "], value[" + event.value + "]", e);
} else if (event != null) {
logger.log(Level.WARNING, "rpc-call[" + event.topic + "] event accept error :" + event.value, e);
}
Object value = event.value;
pool.submit(() -> rpcAccept(topic, value)).get(5, TimeUnit.SECONDS);
} catch (InterruptedException e) {
e.printStackTrace();
} catch (TimeoutException e) {
logger.log(Level.SEVERE, "topic[" + event.topic + "] event deal time out: " + 5 + " S, value: " + toStr(event.value), e);
pool = Executors.newFixedThreadPool(1);
} catch (Exception e) {
logger.log(Level.WARNING, "rpc-call[" + event.topic + "] event accept error :" + toStr(event.value), e);
}
}
}, "ZHub-rpc-call").start();
}).start();
// send msg
new Thread(() -> {
@@ -283,23 +302,20 @@ public class ZHubClient extends AbstractConsumer implements IConsumer, IProducer
String msg = null;
try {
msg = sendMsgQueue.take();
writer.write(msg.getBytes(UTF_8));
writer.flush();
} catch (InterruptedException | IOException e) {
logger.log(Level.WARNING, "send-msg[" + msg + "] event accept error :", e);
try {
Thread.sleep(5000);
assert msg != null;
writer.write(msg.getBytes(UTF_8));
writer.flush();
} catch (IOException | InterruptedException | NullPointerException ex) {
e.printStackTrace();
}
// logger.log(Level.FINEST, "send-msg: [" + msg + "]");
writer.write(msg.getBytes());
writer.flush();
} catch (InterruptedException e) {
e.printStackTrace();
} catch (Exception e) {
logger.log(Level.WARNING, "send-msg[" + msg + "] event accept error :", e);
}
}
}).start();
});
return this;
}
public boolean acceptsConf(AnyValue config) {
@@ -371,15 +387,6 @@ public class ZHubClient extends AbstractConsumer implements IConsumer, IProducer
return str.length();
}
private String toStr(Object v) {
if (v instanceof String) {
return (String) v;
} else if (v == null) {
return null;
}
return convert.convertTo(v);
}
protected boolean initSocket(int retry) {
for (int i = 0; i <= retry; i++) {
try {
@@ -392,7 +399,7 @@ public class ZHubClient extends AbstractConsumer implements IConsumer, IProducer
client.setKeepAlive(true);
writer = client.getOutputStream();
reader = new BufferedReader(new InputStreamReader(client.getInputStream(), UTF_8));
reader = new BufferedReader(new InputStreamReader(client.getInputStream()));
String groupid = getGroupid();
if (groupid == null || groupid.isEmpty()) {
@@ -401,7 +408,7 @@ public class ZHubClient extends AbstractConsumer implements IConsumer, IProducer
send("auth", auth);
send("groupid " + groupid);
StringBuffer buf = new StringBuffer("subscribe lock trylock");
StringBuilder buf = new StringBuilder("subscribe lock trylock");
if (mainHub.containsValue(this)) {
buf.append(" " + APP_NAME);
}
@@ -415,9 +422,9 @@ public class ZHubClient extends AbstractConsumer implements IConsumer, IProducer
send("timer", name);
});
if (retry > 0) {
logger.warning(String.format("ZHubClient[%s][%s] %s Succeed", getGroupid(), i + 1, retry > 0 ? "reconnection" : "init"));
logger.warning(String.format("ZHubClient[%s][%s] %s Succeed", getGroupid(), i + 1, "reconnection"));
} else {
logger.fine(String.format("ZHubClient[%s] %s Succeed", getGroupid(), retry > 0 ? "reconnection" : "init"));
logger.fine(String.format("ZHubClient[%s] %s Succeed", getGroupid(), "init"));
}
return true;
} catch (Exception e) {
@@ -448,11 +455,15 @@ public class ZHubClient extends AbstractConsumer implements IConsumer, IProducer
}
public boolean publish(String topic, Object v) {
/*if (eventMap.containsKey(topic)) { // 本地调用
topicQueue.add(Event.of(topic, v));
return true;
}*/
return send("publish", topic, toStr(v));
}
public void broadcast(String topic, Object v) {
send("broadcast", topic, toStr(v));
send("broadcast", topic, toStr(v)); // 广播必须走远端模式
}
// 发送 publish 主题消息若多次发送的 topic + "-" + value 相同将会做延时重置
@@ -581,11 +592,10 @@ public class ZHubClient extends AbstractConsumer implements IConsumer, IProducer
// ================================================== rpc ==================================================
// -- 调用端 --
private static Map<String, Rpc> rpcMap = new ConcurrentHashMap<>();
private static Map<String, TypeToken> rpcRetType = new ConcurrentHashMap<>();
@Comment("rpc call")
public RpcResult<String> rpc(String topic, Object v) {
return rpc(topic, v, IType.STRING);
public RpcResult<Void> rpc(String topic, Object v) {
return rpc(topic, v, null);
}
@Comment("rpc call")
@@ -595,14 +605,18 @@ public class ZHubClient extends AbstractConsumer implements IConsumer, IProducer
@Comment("rpc call")
public <T, R> RpcResult<R> rpc(String topic, T v, TypeToken<R> typeToken, long timeout) {
Rpc rpc = new Rpc<>(APP_NAME, Utility.uuid(), topic, v);
Rpc rpc = new Rpc<>(APP_NAME, topic, v);
rpc.setTypeToken(typeToken);
String ruk = rpc.getRuk();
rpcMap.put(ruk, rpc);
if (typeToken != null) {
rpcRetType.put(ruk, typeToken);
}
try {
publish(topic, rpc); // send("rpc", topic, toStr(rpc));
if (eventMap.containsKey(topic)) { // 本地调用
rpcCallQueue.add(Event.of(topic, rpc));
} else {
rpc.setValue(toStr(rpc.getValue()));
publish(topic, rpc); // send("rpc", topic, toStr(rpc));
}
synchronized (rpc) {
if (timeout <= 0) {
timeout = 1000 * 15;
@@ -634,8 +648,8 @@ public class ZHubClient extends AbstractConsumer implements IConsumer, IProducer
return rpc.getRpcResult();
}
public <T> CompletableFuture<RpcResult<String>> rpcAsync(String topic, T v) {
return CompletableFuture.supplyAsync(() -> rpc(topic, v, IType.STRING));
public <T, R> CompletableFuture<RpcResult<R>> rpcAsync(String topic, T v) {
return CompletableFuture.supplyAsync(() -> rpc(topic, v, null));
}
public <T, R> CompletableFuture<RpcResult<R>> rpcAsync(String topic, T v, TypeToken<R> typeToken) {
@@ -652,16 +666,38 @@ public class ZHubClient extends AbstractConsumer implements IConsumer, IProducer
// RpcResult: {ruk:xxx-xxxx, retcode:0}
@Comment("rpc call back consumer")
private void rpcAccept(String value) {
private <T> void rpcAccept(T value) {
// 接收到 本地调用返回的 RpcResult
if (value instanceof RpcResult) {
String ruk = ((RpcResult) value).getRuk();
Rpc rpc = rpcMap.remove(ruk);
if (rpc == null) {
return;
}
// 本地模式下返回的数据对象类型需要和处理端一致不然会出现类型转换异常 - 解决办法当出现不一致的情况取数据做转换
TypeToken typeToken = rpc.getTypeToken();
if (typeToken.getType() != ((RpcResult<?>) value).getResult().getClass()) {
Object result = convert.convertFrom(typeToken.getType(), toStr(((RpcResult<?>) value).getResult()));
((RpcResult<Object>) value).setResult(result);
}
rpc.setRpcResult((RpcResult) value);
synchronized (rpc) {
rpc.notify();
}
return;
}
RpcResult resp = convert.convertFrom(new TypeToken<RpcResult<String>>() {
}.getType(), value);
}.getType(), (String) value);
String ruk = resp.getRuk();
Rpc rpc = rpcMap.remove(ruk);
if (rpc == null) {
return;
}
TypeToken typeToken = rpcRetType.get(ruk);
TypeToken typeToken = rpc.getTypeToken();
Object result = resp.getResult();
if (result != null && typeToken != null && !"java.lang.String".equals(typeToken.getType().getTypeName()) && !"java.lang.Void".equals(typeToken.getType().getTypeName())) {
@@ -685,18 +721,29 @@ public class ZHubClient extends AbstractConsumer implements IConsumer, IProducer
@Comment("rpc call consumer")
public <T, R> void rpcSubscribe(String topic, TypeToken<T> typeToken, Function<Rpc<T>, RpcResult<R>> fun) {
Consumer<String> consumer = v -> {
Consumer<T> consumer = v -> {
Rpc<T> rpc = null;
try {
rpc = convert.convertFrom(new TypeToken<Rpc<String>>() {
}.getType(), v);
if (v instanceof String) {
rpc = convert.convertFrom(new TypeToken<Rpc<String>>() {
}.getType(), (String) v);
} else {
rpc = (Rpc<T>) v;
}
// 参数转换
T paras = convert.convertFrom(typeToken.getType(), (String) rpc.getValue());
rpc.setValue(paras);
if (rpc.getValue() instanceof String && !"java.lang.String".equals(typeToken.getType().getTypeName())) {
T paras = convert.convertFrom(typeToken.getType(), (String) rpc.getValue());
rpc.setValue(paras);
}
RpcResult result = fun.apply(rpc);
result.setResult(toStr(result.getResult()));
publish(rpc.getBackTopic(), result);
if (APP_NAME.equals(rpc.getBackTopic())) {
rpcBackQueue.add(Event.of(topic, result));
} else {
result.setResult(toStr(result.getResult())); // 远程模式 结果转换
publish(rpc.getBackTopic(), result);
}
} catch (Exception e) {
logger.log(Level.WARNING, "rpc call consumer error: " + v, e);
publish(rpc.getBackTopic(), rpc.retError("服务调用失败!"));
@@ -705,6 +752,6 @@ public class ZHubClient extends AbstractConsumer implements IConsumer, IProducer
};
rpcTopics.add(topic);
subscribe(topic, consumer);
subscribe(topic, typeToken, consumer);
}
}

View File

@@ -1,7 +1,7 @@
package net.tccn.timer;
package dev.zhub.timer;
import net.tccn.timer.queue.TimerQueue;
import net.tccn.timer.task.Task;
import dev.zhub.timer.queue.TimerQueue;
import dev.zhub.timer.task.Task;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
@@ -24,7 +24,7 @@ public class TimerExecutor {
for (Task t : task) {
t.setTimerExecutor(this);
queue.push(t);
logger.finest("add new task : " + t.getName());
// logger.finest("add new task : " + t.getName());
}
}

View File

@@ -1,8 +1,8 @@
package net.tccn.timer;
package dev.zhub.timer;
import net.tccn.timer.scheduled.Scheduled;
import net.tccn.timer.task.Job;
import net.tccn.timer.task.Task;
import dev.zhub.timer.scheduled.Scheduled;
import dev.zhub.timer.task.Job;
import dev.zhub.timer.task.Task;
import java.time.LocalDateTime;
import java.time.ZoneId;
@@ -93,10 +93,10 @@ public class TimerTask implements Task {
if (!isComplete) {
int count = execCount.incrementAndGet(); // 执行次数+1
long start = System.currentTimeMillis();
// long start = System.currentTimeMillis();
job.execute(this);
long end = System.currentTimeMillis();
logger.finest(String.format("task [%s] : not complete -> %s, time: %s ms, exec count: %s.", getName(), isComplete ? "had complete" : "not complete", end - start, count));
// long end = System.currentTimeMillis();
// logger.finest(String.format("task [%s] : not complete -> %s, time: %s ms, exec count: %s.", getName(), isComplete ? "had complete" : "not complete", end - start, count));
if (!isComplete) {
timerExecutor.add(this, true);

View File

@@ -1,6 +1,6 @@
package net.tccn.timer;
package dev.zhub.timer;
import net.tccn.timer.scheduled.ScheduledCycle;
import dev.zhub.timer.scheduled.ScheduledCycle;
import org.redkale.util.Utility;
import java.util.function.Supplier;

View File

@@ -1,6 +1,6 @@
package net.tccn.timer.queue;
package dev.zhub.timer.queue;
import net.tccn.timer.task.Task;
import dev.zhub.timer.task.Task;
import java.util.LinkedList;
import java.util.concurrent.TimeUnit;

View File

@@ -1,4 +1,4 @@
package net.tccn.timer.scheduled;
package dev.zhub.timer.scheduled;
import java.time.LocalDateTime;

View File

@@ -1,4 +1,4 @@
package net.tccn.timer.scheduled;
package dev.zhub.timer.scheduled;
import java.time.LocalDateTime;
import java.time.temporal.ChronoUnit;

View File

@@ -1,4 +1,4 @@
package net.tccn.timer.scheduled;
package dev.zhub.timer.scheduled;
import java.time.LocalDate;
import java.time.LocalDateTime;

View File

@@ -1,4 +1,4 @@
package net.tccn.timer.task;
package dev.zhub.timer.task;
/**
* @author: liangxianyou at 2018/12/8 17:24.

View File

@@ -1,7 +1,7 @@
package net.tccn.timer.task;
package dev.zhub.timer.task;
import net.tccn.timer.TimerExecutor;
import net.tccn.timer.scheduled.Scheduled;
import dev.zhub.timer.TimerExecutor;
import dev.zhub.timer.scheduled.Scheduled;
/**
* @author: liangxianyou at 2018/8/5 19:32.

View File

@@ -1,50 +0,0 @@
package net.tccn;
import net.tccn.zhub.ZHubClient;
import org.redkale.boot.Application;
import org.redkale.boot.ApplicationListener;
import org.redkale.service.Service;
import org.redkale.util.AnyValue;
import org.redkale.util.RedkaleClassLoader;
import org.redkale.util.ResourceFactory;
import java.lang.reflect.InvocationTargetException;
import java.util.concurrent.CompletableFuture;
/**
* 服务监听
*
* @author: liangxy.
*/
public class ZhubListener implements ApplicationListener {
@Override
public void preStart(Application application) {
CompletableFuture.runAsync(() -> {
ResourceFactory resourceFactory = application.getResourceFactory();
RedkaleClassLoader classLoader = application.getClassLoader();
AnyValue appConfig = application.getAppConfig();
AnyValue zhubs = appConfig.getAnyValue("zhubs");
AnyValue[] values = zhubs.getAnyValues("zhub");
for (AnyValue zhub : values) {
String className = zhub.getValue("value", ZHubClient.class.getCanonicalName());
try {
Class<?> clazz = classLoader.loadClass(className);
Service obj = (Service) clazz.getDeclaredConstructor().newInstance();
application.getResourceFactory().inject(obj);
obj.init(zhub);
resourceFactory.register(zhub.get("name"), clazz, obj);
} catch (InstantiationException | IllegalAccessException | InvocationTargetException | NoSuchMethodException | ClassNotFoundException e) {
e.printStackTrace();
}
}
});
}
@Override
public void preShutdown(Application application) {
}
}

View File

@@ -1,430 +0,0 @@
package org.redkalex.cache.redis;
import org.redkale.convert.Convert;
import org.redkale.service.Local;
import org.redkale.source.CacheSource;
import org.redkale.util.AutoLoad;
import org.redkale.util.ResourceType;
import java.io.Serializable;
import java.lang.reflect.Type;
import java.nio.charset.StandardCharsets;
import java.util.*;
import java.util.concurrent.CompletableFuture;
import java.util.stream.Stream;
@Local
@AutoLoad(false)
@ResourceType(CacheSource.class)
public class MyRedisCacheSource<V extends Object> extends RedisCacheSource<V> {
//--------------------- oth ------------------------------
public boolean setnx(String key, Object v) {
byte[][] bytes = Stream.of(key, v).map(x -> String.valueOf(x).getBytes(StandardCharsets.UTF_8)).toArray(byte[][]::new);
Serializable rs = send("SETNX", CacheEntryType.OBJECT, (Type) null, key, bytes).join();
return rs == null ? false : (long) rs == 1;
}
//--------------------- oth ------------------------------
//--------------------- bit ------------------------------
public boolean getBit(String key, int offset) {
byte[][] bytes = Stream.of(key, offset).map(x -> String.valueOf(x).getBytes(StandardCharsets.UTF_8)).toArray(byte[][]::new);
Serializable v = send("GETBIT", CacheEntryType.OBJECT, (Type) null, key, bytes).join();
return v == null ? false : (long) v == 1;
}
public void setBit(String key, int offset, boolean bool) {
byte[][] bytes = Stream.of(key, offset, bool ? 1 : 0).map(x -> String.valueOf(x).getBytes(StandardCharsets.UTF_8)).toArray(byte[][]::new);
send("SETBIT", CacheEntryType.OBJECT, (Type) null, key, bytes).join();
}
//--------------------- bit ------------------------------
//--------------------- lock ------------------------------
// 尝试加锁成功返回0否则返回上一锁剩余毫秒值
public int tryLock(String key, int millis) {
byte[][] bytes = Stream.of("" +
"if (redis.call('exists',KEYS[1]) == 0) then " +
"redis.call('psetex', KEYS[1], ARGV[1], 1) " +
"return 0; " +
"else " +
"return redis.call('PTTL', KEYS[1]); " +
"end; ", 1, key, millis).map(x -> String.valueOf(x).getBytes(StandardCharsets.UTF_8)).toArray(byte[][]::new);
int n = (int) send("EVAL", CacheEntryType.OBJECT, (Type) null, null, bytes).join();
return n;
}
// 加锁
public void lock(String key, int millis) {
int i;
do {
i = tryLock(key, millis);
try {
Thread.sleep(10);
} catch (InterruptedException e) {
e.printStackTrace();
}
} while (i > 0);
}
// 解锁
public void unlock(String key) {
remove(key);
}
//--------------------- key ------------------------------
public long getTtl(String key) {
return (long) send("TTL", CacheEntryType.OBJECT, (Type) null, key, key.getBytes(StandardCharsets.UTF_8)).join();
}
public long getPttl(String key) {
return (long) send("PTTL", CacheEntryType.OBJECT, (Type) null, key, key.getBytes(StandardCharsets.UTF_8)).join();
}
public int remove(String... keys) {
if (keys == null || keys.length == 0) {
return 0;
}
List<String> para = new ArrayList<>();
para.add("" +
" local args = ARGV;" +
" local x = 0;" +
" for i,v in ipairs(args) do" +
" local inx = redis.call('del', v);" +
" if(inx > 0) then" +
" x = x + 1;" +
" end" +
" end" +
" return x;");
para.add("0");
for (Object field : keys) {
para.add(String.valueOf(field));
}
byte[][] bytes = para.stream().map(x -> String.valueOf(x).getBytes(StandardCharsets.UTF_8)).toArray(byte[][]::new);
return (int) send("EVAL", CacheEntryType.OBJECT, (Type) null, null, bytes).join();
}
//--------------------- hmget ------------------------------
public <T extends Object> V getHm(String key, T field) {
// return (V) send("HMGET", CacheEntryType.OBJECT, (Type) null, key, key.getBytes(StandardCharsets.UTF_8), field.getBytes(StandardCharsets.UTF_8)).join();
Map<Object, V> map = getHms(key, field);
return map.get(field);
}
public <T extends Object> Map<T, V> getHms(String key, T... field) {
if (field == null || field.length == 0) {
return new HashMap<>();
}
byte[][] bytes = Stream.concat(Stream.of(key), Stream.of(field)).map(x -> String.valueOf(x).getBytes(StandardCharsets.UTF_8)).toArray(byte[][]::new);
Map<T, V> result = new HashMap<>();
List<V> vs = (List) send("HMGET", CacheEntryType.OBJECT, (Type) null, key, bytes).join();
for (int i = 0; i < field.length; i++) { // /*vs != null && vs.size() > i &&*/
if (vs.get(i) == null) {
continue;
}
result.put(field[i], vs.get(i));
}
return result;
}
public Map<String, V> getHmall(String key) {
List<V> vs = (List) send("HGETALL", CacheEntryType.OBJECT, (Type) null, key, key.getBytes(StandardCharsets.UTF_8)).join();
Map<String, V> result = new HashMap<>(vs.size() / 2);
for (int i = 0; i < vs.size(); i += 2) {
result.put(String.valueOf(vs.get(i)), vs.get(i + 1));
}
return result;
}
//--------------------- hmset、hmdel、incr ------------------------------
public <T> void setHm(String key, T field, V value) {
byte[][] bytes = Stream.of(key, field, value).map(x -> x.toString().getBytes(StandardCharsets.UTF_8)).toArray(byte[][]::new);
send("HMSET", CacheEntryType.OBJECT, (Type) null, key, bytes).join();
}
public <T> void setHms(String key, Map<T, V> kv) {
List<String> args = new ArrayList();
args.add(key);
kv.forEach((k, v) -> {
args.add(String.valueOf(k));
args.add(String.valueOf(v));
});
byte[][] bytes = args.stream().map(x -> x.getBytes(StandardCharsets.UTF_8)).toArray(byte[][]::new);
send("HMSET", CacheEntryType.OBJECT, (Type) null, key, bytes).join();
}
public <T> Long incrHm(String key, T field, long n) {
byte[][] bytes = Stream.of(key, String.valueOf(field), String.valueOf(n)).map(x -> x.getBytes(StandardCharsets.UTF_8)).toArray(byte[][]::new);
return (Long) send("HINCRBY", CacheEntryType.OBJECT, (Type) null, key, bytes).join();
}
public <T> Double incrHm(String key, T field, double n) {
byte[][] bytes = Stream.of(key, String.valueOf(field), String.valueOf(n)).map(x -> x.getBytes(StandardCharsets.UTF_8)).toArray(byte[][]::new);
Serializable v = send("HINCRBYFLOAT", CacheEntryType.OBJECT, (Type) null, key, bytes).join();
if (v == null) {
return null;
}
return Double.parseDouble(String.valueOf(v));
}
public <T> void hdel(String key, T... field) {
byte[][] bytes = Stream.concat(Stream.of(key), Stream.of(field)).map(x -> String.valueOf(x).getBytes(StandardCharsets.UTF_8)).toArray(byte[][]::new);
send("HDEL", null, (Type) null, key, bytes).join();
}
public <T> List<T> zexists(String key, T... fields) {
if (fields == null || fields.length == 0) {
return new ArrayList<>();
}
List<String> para = new ArrayList<>();
para.add("" +
" local key = KEYS[1];" +
" local args = ARGV;" +
" local result = {};" +
" for i,v in ipairs(args) do" +
" local inx = redis.call('ZREVRANK', key, v);" +
" if(inx) then" +
" table.insert(result,1,v);" +
" end" +
" end" +
" return result;");
para.add("1");
para.add(key);
for (Object field : fields) {
para.add(String.valueOf(field));
}
byte[][] bytes = para.stream().map(x -> String.valueOf(x).getBytes(StandardCharsets.UTF_8)).toArray(byte[][]::new);
return (List<T>) send("EVAL", CacheEntryType.OBJECT, (Type) null, null, bytes).join();
}
//--------------------- set ------------------------------
public <T> T srandomItem(String key) {
byte[][] bytes = Stream.of(key, 1).map(x -> formatValue(CacheEntryType.OBJECT, (Convert) null, (Type) null, x)).toArray(byte[][]::new);
List<T> list = (List) send("SRANDMEMBER", null, (Type) null, key, bytes).join();
return list != null && !list.isEmpty() ? list.get(0) : null;
}
public <T> List<T> srandomItems(String key, int n) {
byte[][] bytes = Stream.of(key, n).map(x -> formatValue(CacheEntryType.OBJECT, (Convert) null, (Type) null, x)).toArray(byte[][]::new);
return (List) send("SRANDMEMBER", null, (Type) null, key, bytes).join();
}
//--------------------- list ------------------------------
public CompletableFuture<Void> appendListItemsAsync(String key, V... values) {
byte[][] bytes = Stream.concat(Stream.of(key), Stream.of(values)).map(x -> String.valueOf(x).getBytes(StandardCharsets.UTF_8)).toArray(byte[][]::new);
return (CompletableFuture) send("RPUSH", null, (Type) null, key, bytes);
}
public CompletableFuture<Void> lpushListItemAsync(String key, V value) {
return (CompletableFuture) send("LPUSH", null, (Type) null, key, key.getBytes(StandardCharsets.UTF_8), formatValue(CacheEntryType.OBJECT, (Convert) null, (Type) null, value));
}
public void lpushListItem(String key, V value) {
lpushListItemAsync(key, value).join();
}
public void appendListItems(String key, V... values) {
appendListItemsAsync(key, values).join();
}
public void appendSetItems(String key, V... values) {
// todo:
for (V v : values) {
appendSetItem(key, v);
}
}
// 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
public CompletableFuture<Collection<V>> getCollectionAsync(String key, int offset, int limit) {
return (CompletableFuture) send("OBJECT", null, (Type) null, key, "ENCODING".getBytes(StandardCharsets.UTF_8), key.getBytes(StandardCharsets.UTF_8)).thenCompose(t -> {
if (t == null) return CompletableFuture.completedFuture(null);
if (new String((byte[]) t).contains("list")) { //list
return send("LRANGE", CacheEntryType.OBJECT, (Type) null, false, key, key.getBytes(StandardCharsets.UTF_8), String.valueOf(offset).getBytes(StandardCharsets.UTF_8), String.valueOf(offset + limit - 1).getBytes(StandardCharsets.UTF_8));
} else {
return send("SMEMBERS", CacheEntryType.OBJECT, (Type) null, true, key, key.getBytes(StandardCharsets.UTF_8));
}
});
}
public Collection<V> getCollection(String key, int offset, int limit) {
return getCollectionAsync(key, offset, limit).join();
}
public V brpop(String key, int seconds) {
byte[][] bytes = Stream.concat(Stream.of(key), Stream.of(seconds)).map(x -> String.valueOf(x).getBytes(StandardCharsets.UTF_8)).toArray(byte[][]::new);
return (V) send("BRPOP", null, (Type) null, key, bytes).join();
}
//--------------------- zset ------------------------------
public <N extends Number> void zadd(String key, Map<V, N> kv) {
if (kv == null || kv.isEmpty()) {
return;
}
List<String> args = new ArrayList();
args.add(key);
kv.forEach((k, v) -> {
args.add(String.valueOf(v));
args.add(String.valueOf(k));
});
byte[][] bytes = args.stream().map(x -> x.getBytes(StandardCharsets.UTF_8)).toArray(byte[][]::new);
send("ZADD", CacheEntryType.OBJECT, (Type) null, key, bytes).join();
}
public <N extends Number> double zincr(String key, Object number, N n) {
byte[][] bytes = Stream.of(key, n, number).map(x -> String.valueOf(x).getBytes(StandardCharsets.UTF_8)).toArray(byte[][]::new);
Serializable v = send("ZINCRBY", CacheEntryType.OBJECT, (Type) null, key, bytes).join();
return Double.parseDouble(String.valueOf(v));
}
public void zrem(String key, V... vs) {
List<String> args = new ArrayList();
args.add(key);
for (V v : vs) {
args.add(String.valueOf(v));
}
byte[][] bytes = args.stream().map(x -> x.getBytes(StandardCharsets.UTF_8)).toArray(byte[][]::new);
send("ZREM", CacheEntryType.OBJECT, (Type) null, key, bytes).join();
}
public int getZrank(String key, V v) {
byte[][] bytes = Stream.of(key, v).map(x -> String.valueOf(x).getBytes(StandardCharsets.UTF_8)).toArray(byte[][]::new);
Long t = (Long) send("ZRANK", CacheEntryType.OBJECT, (Type) null, key, bytes).join();
return t == null ? -1 : (int) (long) t;
}
public int getZrevrank(String key, V v) {
byte[][] bytes = Stream.of(key, v).map(x -> String.valueOf(x).getBytes(StandardCharsets.UTF_8)).toArray(byte[][]::new);
Long t = (Long) send("ZREVRANK", CacheEntryType.OBJECT, (Type) null, key, bytes).join();
return t == null ? -1 : (int) (long) t;
}
//ZRANGE/ZREVRANGE key start stop
public List<V> getZset(String key) {
byte[][] bytes = Stream.of(key, 0, -1).map(x -> String.valueOf(x).getBytes(StandardCharsets.UTF_8)).toArray(byte[][]::new);
List<V> vs = (List<V>) send("ZREVRANGE", CacheEntryType.OBJECT, (Type) null, key, bytes).join();
List<V> vs2 = new ArrayList(vs.size());
for (int i = 0; i < vs.size(); ++i) {
if (i % 2 == 1) {
vs2.add(this.convert.convertFrom(this.objValueType, String.valueOf(vs.get(i))));
} else {
vs2.add(vs.get(i));
}
}
return vs2;
}
public List<V> getZset(String key, int offset, int limit) {
byte[][] bytes = Stream.of(key, offset, offset + limit - 1).map(x -> String.valueOf(x).getBytes(StandardCharsets.UTF_8)).toArray(byte[][]::new);
List<V> vs = (List<V>) send("ZREVRANGE", CacheEntryType.OBJECT, (Type) null, key, bytes).join();
List<V> vs2 = new ArrayList(vs.size());
for (int i = 0; i < vs.size(); ++i) {
if (i % 2 == 1) {
vs2.add(this.convert.convertFrom(this.objValueType, String.valueOf(vs.get(i))));
} else {
vs2.add(vs.get(i));
}
}
return vs2;
}
public LinkedHashMap<V, Long> getZsetLongScore(String key) {
LinkedHashMap<V, Double> map = getZsetDoubleScore(key);
if (map.isEmpty()) {
return new LinkedHashMap<>();
}
LinkedHashMap<V, Long> map2 = new LinkedHashMap<>(map.size());
map.forEach((k, v) -> map2.put(k, (long) (double) v));
return map2;
}
public LinkedHashMap<V, Long> getZsetItemsLongScore(String key) {
byte[][] bytes = Stream.of(key, 0, -1, "WITHSCORES").map(x -> String.valueOf(x).getBytes(StandardCharsets.UTF_8)).toArray(byte[][]::new);
List vs = (List) send("ZRANGE", CacheEntryType.OBJECT, (Type) null, key, bytes).join();
LinkedHashMap<V, Long> map = new LinkedHashMap<>();
for (int i = 0; i < vs.size(); i += 2) {
map.put((V) vs.get(i), (long) Double.parseDouble((String) vs.get(i + 1)));
}
return map;
}
public Long getZsetLongScore(String key, V v) {
Double score = getZsetDoubleScore(key, v);
if (score == null) {
return null;
}
return (long) (double) score;
}
public LinkedHashMap<V, Double> getZsetDoubleScore(String key) {
byte[][] bytes = Stream.of(key, 0, -1, "WITHSCORES").map(x -> String.valueOf(x).getBytes(StandardCharsets.UTF_8)).toArray(byte[][]::new);
List vs = (List) send("ZREVRANGE", CacheEntryType.OBJECT, (Type) null, key, bytes).join();
LinkedHashMap<V, Double> map = new LinkedHashMap<>();
for (int i = 0; i < vs.size(); i += 2) {
map.put((V) vs.get(i), Double.parseDouble((String) vs.get(i + 1)));
}
return map;
}
public Double getZsetDoubleScore(String key, V v) {
byte[][] bytes = Stream.of(key, v).map(x -> String.valueOf(x).getBytes(StandardCharsets.UTF_8)).toArray(byte[][]::new);
Serializable zscore = send("ZSCORE", CacheEntryType.OBJECT, (Type) null, key, bytes).join();
if (zscore == null) {
return null;
}
return Double.parseDouble(String.valueOf(zscore));
}
public LinkedHashMap<V, Long> getZsetLongScore(String key, int offset, int limit) {
byte[][] bytes = Stream.of(key, offset, offset + limit - 1, "WITHSCORES").map(x -> String.valueOf(x).getBytes(StandardCharsets.UTF_8)).toArray(byte[][]::new);
List vs = (List) send("ZREVRANGE", CacheEntryType.OBJECT, (Type) null, key, bytes).join();
LinkedHashMap<V, Long> map = new LinkedHashMap<>();
for (int i = 0; i < vs.size(); i += 2) {
map.put((V) vs.get(i), (long) Double.parseDouble((String) vs.get(i + 1)));
}
return map;
}
public LinkedHashMap<V, Double> getZsetDoubleScore(String key, int offset, int limit) {
byte[][] bytes = Stream.of(key, offset, offset + limit - 1, "WITHSCORES").map(x -> String.valueOf(x).getBytes(StandardCharsets.UTF_8)).toArray(byte[][]::new);
List vs = (List) send("ZREVRANGE", CacheEntryType.OBJECT, (Type) null, key, bytes).join();
LinkedHashMap<V, Double> map = new LinkedHashMap<>();
for (int i = 0; i < vs.size(); i += 2) {
map.put((V) vs.get(i), Double.parseDouble(vs.get(i + 1) + ""));
}
return map;
}
// ----------
protected byte[] formatValue(CacheEntryType cacheType, Convert convert0, Type resultType, Object value) {
if (value == null) return "null".getBytes(StandardCharsets.UTF_8);
if (convert0 == null) convert0 = convert;
if (cacheType == CacheEntryType.LONG || cacheType == CacheEntryType.ATOMIC)
return String.valueOf(value).getBytes(StandardCharsets.UTF_8);
if (cacheType == CacheEntryType.STRING) return convert0.convertToBytes(String.class, value);
if (value instanceof String) return String.valueOf(value).getBytes(StandardCharsets.UTF_8);
if (value instanceof Number) return String.valueOf(value).getBytes(StandardCharsets.UTF_8);
return convert0.convertToBytes(resultType == null ? objValueType : resultType, value);
}
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -1 +1 @@
net.tccn.ZhubProvider
dev.zhub.ZhubProvider

View File

@@ -1,7 +1,7 @@
package net.tccn.mq;
import net.tccn.Event;
import net.tccn.timer.Timers;
import dev.zhub.timer.Timers;
import org.junit.Test;
import org.redkale.convert.json.JsonConvert;

View File

@@ -27,15 +27,15 @@ public class HelloService implements Service {
@Resource(name = "vvvvhub2")
private ZHubClient zhub2;*/
//private net.tccn.zhub.ZHubClient zhubx = null;
//private dev.zhub.client.ZHubClient zhubx = null;
@Override
public void init(AnyValue config) {
/*CompletableFuture.runAsync(() -> {
zhubx = new net.tccn.zhub.ZHubClient("127.0.0.1", 1216, "g-dev", "DEV-LOCAL");
//zhubx = new net.tccn.zhub.ZHubClient("47.111.150.118", 6066, "g-dev", "DEV-LOCAL");
zhubx = new dev.zhub.client.ZHubClient("127.0.0.1", 1216, "g-dev", "DEV-LOCAL");
//zhubx = new dev.zhub.client.ZHubClient("47.111.150.118", 6066, "g-dev", "DEV-LOCAL");
});*/
// Function<Rpc<T>, RpcResult<R>> fun