63 Commits

Author SHA1 Message Date
Redkale
6b59c54087 Redkale 2.0.0.rc4 结束 2020-02-02 10:48:54 +08:00
Redkale
58f5ee999a 2020-01-30 12:04:56 +08:00
Redkale
32e8c033ea 2020-01-30 12:02:42 +08:00
Redkale
84a96f750f 2020-01-30 11:53:17 +08:00
Redkale
6560e71e48 2020-01-30 11:44:46 +08:00
Redkale
ee8a18a528 2020-01-30 11:35:05 +08:00
Redkale
c36fc36964 2020-01-29 12:31:11 +08:00
Redkale
a34f85bfc9 2020-01-29 11:35:49 +08:00
Redkale
6f00efa077 2020-01-29 11:28:24 +08:00
Redkale
469dff8478 2020-01-27 18:39:11 +08:00
Redkale
c50eb79b1d 2020-01-27 15:57:13 +08:00
Redkale
985bf6ed25 2020-01-27 15:56:41 +08:00
Redkale
bd51696e08 2020-01-27 14:03:53 +08:00
Redkale
2764d59a4f 2020-01-27 13:01:43 +08:00
Redkale
c1509bb712 2020-01-27 12:32:22 +08:00
Redkale
b14f32deb3 2020-01-27 12:31:34 +08:00
Redkale
62145a2aad 2020-01-27 12:23:33 +08:00
Redkale
8fb88a57b2 2020-01-26 20:20:17 +08:00
Redkale
44f12ae003 2020-01-26 15:51:05 +08:00
Redkale
bf97ef3a08 2020-01-26 14:03:49 +08:00
Redkale
aeefc3b8a2 2020-01-17 14:43:33 +08:00
Redkale
5763718816 WebSocketRunner加入写队列 2020-01-17 14:39:19 +08:00
Redkale
3b142b7504 PoolSource的默认超时时间从3秒改成6秒 2020-01-17 14:04:15 +08:00
Redkale
0f52d32424 TcpAioAsyncConnection去掉队列写 2020-01-17 13:57:41 +08:00
Redkale
f5f3c48f38 2020-01-16 17:24:46 +08:00
Redkale
9f9f5aa000 HttpResponse增加retResultHandler属性 2020-01-16 16:59:37 +08:00
Redkale
be4ca0287b HttpResponse增加retResultHandler属性 2020-01-16 11:11:34 +08:00
Redkale
a84ed72f28 2020-01-14 16:16:28 +08:00
Redkale
8eb5f56f42 2020-01-14 10:59:45 +08:00
Redkale
df1aa2b379 2020-01-11 22:02:42 +08:00
Redkale
da2befcb97 2020-01-11 21:22:34 +08:00
Redkale
92d0f7e796 2020-01-11 21:17:11 +08:00
Redkale
6aa3949d05 DataSource增加可group by的queryColumnMap系列方法,缓存EntityCache部分未实现 2020-01-11 13:32:31 +08:00
Redkale
fa833d9224 2020-01-10 20:30:08 +08:00
Redkale
c9261f8475 增加ofArray方法 2020-01-10 19:25:17 +08:00
Redkale
a1df62af08 2020-01-10 19:20:32 +08:00
Redkale
aa12413f4e 2020-01-10 19:15:37 +08:00
Redkale
2ccd9ba10f ColumnExpress增加减法DEC 2020-01-10 19:10:10 +08:00
Redkale
81ae68c571 ColumnExpress增加减法DEC 2020-01-10 19:08:50 +08:00
Redkale
51b45f4713 DataSource.insert增加Collection、Stream方法 2020-01-08 13:36:59 +08:00
Redkale
66e1f58879 2020-01-08 10:42:08 +08:00
Redkale
06bb5180cf 【不兼容】WebSocket中onConnected、onClose方法的返回值由void改成CompletableFuture 2020-01-08 10:36:54 +08:00
Redkale
d83d7f879c 2020-01-07 16:32:03 +08:00
Redkale
b8e92c949e 2020-01-07 16:31:32 +08:00
Redkale
91548a0ca9 2020-01-07 16:31:02 +08:00
Redkale
dbca25cd54 2020-01-07 16:29:38 +08:00
Redkale
539ea15ae5 2020-01-07 16:28:11 +08:00
Redkale
ad1d9f33d4 WebSocket增加getUserSet方法 2020-01-07 13:07:04 +08:00
Redkale
df98c1a58e 优化querySet和queryColumnSet系列方法 2020-01-07 11:48:30 +08:00
Redkale
56d1969c96 2020-01-07 11:46:34 +08:00
Redkale
4b341436af 2020-01-07 11:01:08 +08:00
Redkale
4d3d73b4c1 WebSocketRange增加几个小方法 2020-01-07 10:21:01 +08:00
Redkale
55ab279e7f 2020-01-02 09:51:37 +08:00
Redkale
82ab994608 2019-12-19 10:55:11 +08:00
Redkale
1c4035e677 2019-12-18 23:50:29 +08:00
Redkale
afdc9e7207 2019-12-18 23:00:23 +08:00
Redkale
9b83abb06a Attribute增加subclass参数用于识别泛型的子类 2019-12-18 22:06:54 +08:00
Redkale
26d1a10bd0 2019-12-18 19:51:35 +08:00
Redkale
7e55dcc46d FilterNode兼容String的 >= > < <= 2019-12-11 19:01:45 +08:00
Redkale
45802d2403 修复2019.6.20改动时remoteAddrHeader带来的bug 2019-12-07 21:02:13 +08:00
Redkale
3660a2a4e5 2019-12-07 19:43:43 +08:00
Redkale
bed81bd93d 修复WriteMoreCompletionHandler的bug 2019-11-30 11:14:44 +08:00
Redkale
131855cdc5 2019-11-30 09:04:01 +08:00
40 changed files with 1892 additions and 380 deletions

View File

@@ -46,19 +46,19 @@ public class LogFileHandler extends Handler {
private static final String format = "%1$tY-%1$tm-%1$td %1$tH:%1$tM:%1$tS.%tL %4$s %2$s\r\n%5$s%6$s\r\n";
@Override
public String format(LogRecord record) {
public String format(LogRecord log) {
String source;
if (record.getSourceClassName() != null) {
source = record.getSourceClassName();
if (record.getSourceMethodName() != null) {
source += " " + record.getSourceMethodName();
if (log.getSourceClassName() != null) {
source = log.getSourceClassName();
if (log.getSourceMethodName() != null) {
source += " " + log.getSourceMethodName();
}
} else {
source = record.getLoggerName();
source = log.getLoggerName();
}
String message = formatMessage(record);
String message = formatMessage(log);
String throwable = "";
if (record.getThrown() != null) {
if (log.getThrown() != null) {
StringWriter sw = new StringWriter();
PrintWriter pw = new PrintWriter(sw) {
@Override
@@ -67,22 +67,22 @@ public class LogFileHandler extends Handler {
}
};
pw.println();
record.getThrown().printStackTrace(pw);
log.getThrown().printStackTrace(pw);
pw.close();
throwable = sw.toString();
}
return String.format(format,
System.currentTimeMillis(),
source,
record.getLoggerName(),
record.getLevel().getName(),
log.getLoggerName(),
log.getLevel().getName(),
message,
throwable);
}
}
protected final LinkedBlockingQueue<LogRecord> records = new LinkedBlockingQueue();
protected final LinkedBlockingQueue<LogRecord> logqueue = new LinkedBlockingQueue();
private String pattern;
@@ -144,9 +144,9 @@ public class LogFileHandler extends Handler {
public void run() {
while (true) {
try {
LogRecord record = records.take();
LogRecord log = logqueue.take();
final boolean bigger = (limit > 0 && limit <= loglength.get());
final boolean changeday = tomorrow <= record.getMillis();
final boolean changeday = tomorrow <= log.getMillis();
if (bigger || changeday) {
updateTomorrow();
if (logstream != null) {
@@ -193,12 +193,12 @@ public class LogFileHandler extends Handler {
logunusualstream = new FileOutputStream(logunusualfile, append);
}
//----------------------写日志-------------------------
String message = getFormatter().format(record);
String message = getFormatter().format(log);
String encoding = getEncoding();
byte[] bytes = encoding == null ? message.getBytes() : message.getBytes(encoding);
logstream.write(bytes);
loglength.addAndGet(bytes.length);
if (unusual != null && (record.getLevel() == Level.WARNING || record.getLevel() == Level.SEVERE)) {
if (unusual != null && (log.getLevel() == Level.WARNING || log.getLevel() == Level.SEVERE)) {
logunusualstream.write(bytes);
logunusuallength.addAndGet(bytes.length);
}
@@ -310,21 +310,21 @@ public class LogFileHandler extends Handler {
}
@Override
public void publish(LogRecord record) {
final String sourceClassName = record.getSourceClassName();
public void publish(LogRecord log) {
final String sourceClassName = log.getSourceClassName();
if (sourceClassName == null || true) {
StackTraceElement[] ses = new Throwable().getStackTrace();
for (int i = 2; i < ses.length; i++) {
if (ses[i].getClassName().startsWith("java.util.logging")) continue;
record.setSourceClassName('[' + Thread.currentThread().getName() + "] " + ses[i].getClassName());
record.setSourceMethodName(ses[i].getMethodName());
log.setSourceClassName('[' + Thread.currentThread().getName() + "] " + ses[i].getClassName());
log.setSourceMethodName(ses[i].getMethodName());
break;
}
} else {
record.setSourceClassName('[' + Thread.currentThread().getName() + "] " + sourceClassName);
log.setSourceClassName('[' + Thread.currentThread().getName() + "] " + sourceClassName);
}
if (denyreg != null && denyreg.matcher(record.getMessage()).find()) return;
records.offer(record);
if (denyreg != null && denyreg.matcher(log.getMessage()).find()) return;
logqueue.offer(log);
}
@Override

View File

@@ -303,7 +303,7 @@ public abstract class NodeServer {
//NodeServer.this.watchFactory.inject(src);
if (source instanceof Service && needinit) ((Service) source).init(sourceConf);
} catch (Exception e) {
logger.log(Level.SEVERE, "DataSource inject error", e);
logger.log(Level.SEVERE, "[" + Thread.currentThread().getName() + "] DataSource inject error", e);
}
}, DataSource.class);

View File

@@ -129,9 +129,9 @@ public abstract class AsyncConnection implements ReadableByteChannel, WritableBy
public abstract <A> void write(ByteBuffer[] srcs, int offset, int length, A attachment, CompletionHandler<Integer, ? super A> handler);
public void setReadBuffer(Buffer buffer) {
public void setReadBuffer(ByteBuffer buffer) {
if (this.readBuffer != null) throw new RuntimeException("repeat AsyncConnection.setReadBuffer");
this.readBuffer = (ByteBuffer) buffer;
this.readBuffer = buffer;
}
public ByteBuffer pollReadBuffer() {
@@ -147,25 +147,25 @@ public abstract class AsyncConnection implements ReadableByteChannel, WritableBy
return bufferSupplier.get();
}
public void offerBuffer(Buffer buffer) {
public void offerBuffer(ByteBuffer buffer) {
if (buffer == null) return;
// Thread thread = Thread.currentThread();
// if (thread instanceof IOThread) {
// ((IOThread) thread).getBufferPool().accept((ByteBuffer) buffer);
// return;
// }
bufferConsumer.accept((ByteBuffer) buffer);
bufferConsumer.accept(buffer);
}
public void offerBuffer(Buffer... buffers) {
public void offerBuffer(ByteBuffer... buffers) {
if (buffers == null) return;
Consumer<ByteBuffer> consumer = this.bufferConsumer;
// Thread thread = Thread.currentThread();
// if (thread instanceof IOThread) {
// consumer = ((IOThread) thread).getBufferPool();
// }
for (Buffer buffer : buffers) {
consumer.accept((ByteBuffer) buffer);
for (ByteBuffer buffer : buffers) {
consumer.accept(buffer);
}
}

View File

@@ -125,7 +125,7 @@ public abstract class Server<K extends Serializable, C extends Context, R extend
this.aliveTimeoutSeconds = config.getIntValue("aliveTimeoutSeconds", 30);
this.readTimeoutSeconds = config.getIntValue("readTimeoutSeconds", 0);
this.writeTimeoutSeconds = config.getIntValue("writeTimeoutSeconds", 0);
this.backlog = parseLenth(config.getValue("backlog"), 8 * 1024);
this.backlog = parseLenth(config.getValue("backlog"), 1024);
this.maxbody = parseLenth(config.getValue("maxbody"), 64 * 1024);
int bufCapacity = parseLenth(config.getValue("bufferCapacity"), "UDP".equalsIgnoreCase(protocol) ? 1350 : 32 * 1024);
this.bufferCapacity = "UDP".equalsIgnoreCase(protocol) ? bufCapacity : (bufCapacity < 8 * 1024 ? 8 * 1024 : bufCapacity);

View File

@@ -24,7 +24,7 @@ import javax.net.ssl.SSLContext;
*/
public class TcpAioAsyncConnection extends AsyncConnection {
private final Semaphore semaphore = new Semaphore(1);
//private final Semaphore semaphore = new Semaphore(1);
private int readTimeoutSeconds;
@@ -103,35 +103,35 @@ public class TcpAioAsyncConnection extends AsyncConnection {
}
}
private <A> void nextWrite(Throwable exc, A attachment) {
BlockingQueue<WriteEntry> queue = this.writeQueue;
if (queue != null && exc != null && !isOpen()) {
WriteEntry entry;
while ((entry = queue.poll()) != null) {
try {
entry.writeHandler.failed(exc, entry.writeAttachment);
} catch (Throwable e) {
e.printStackTrace(System.err);
}
}
return;
}
WriteEntry entry = queue == null ? null : queue.poll();
if (entry != null) {
try {
if (entry.writeOneBuffer == null) {
write(false, entry.writeBuffers, entry.writeOffset, entry.writeLength, entry.writeAttachment, entry.writeHandler);
} else {
write(false, entry.writeOneBuffer, entry.writeAttachment, entry.writeHandler);
}
} catch (Exception e) {
entry.writeHandler.failed(e, entry.writeAttachment);
}
} else {
semaphore.release();
}
}
// private <A> void nextWrite(Throwable exc, A attachment) {
// BlockingQueue<WriteEntry> queue = this.writeQueue;
// if (queue != null && exc != null && !isOpen()) {
// WriteEntry entry;
// while ((entry = queue.poll()) != null) {
// try {
// entry.writeHandler.failed(exc, entry.writeAttachment);
// } catch (Throwable e) {
// e.printStackTrace(System.err);
// }
// }
// return;
// }
// WriteEntry entry = queue == null ? null : queue.poll();
//
// if (entry != null) {
// try {
// if (entry.writeOneBuffer == null) {
// write(false, entry.writeBuffers, entry.writeOffset, entry.writeLength, entry.writeAttachment, entry.writeHandler);
// } else {
// write(false, entry.writeOneBuffer, entry.writeAttachment, entry.writeHandler);
// }
// } catch (Exception e) {
// entry.writeHandler.failed(e, entry.writeAttachment);
// }
// } else {
// semaphore.release();
// }
// }
@Override
public <A> void write(ByteBuffer src, A attachment, CompletionHandler<Integer, ? super A> handler) {
@@ -139,17 +139,17 @@ public class TcpAioAsyncConnection extends AsyncConnection {
}
private <A> void write(boolean acquire, ByteBuffer src, A attachment, CompletionHandler<Integer, ? super A> handler) {
if (acquire && !semaphore.tryAcquire()) {
if (this.writeQueue == null) {
synchronized (semaphore) {
if (this.writeQueue == null) {
this.writeQueue = new LinkedBlockingDeque<>();
}
}
}
this.writeQueue.add(new WriteEntry(src, attachment, handler));
return;
}
// if (acquire && !semaphore.tryAcquire()) {
// if (this.writeQueue == null) {
// synchronized (semaphore) {
// if (this.writeQueue == null) {
// this.writeQueue = new LinkedBlockingDeque<>();
// }
// }
// }
// this.writeQueue.add(new WriteEntry(src, attachment, handler));
// return;
// }
WriteOneCompletionHandler newHandler = new WriteOneCompletionHandler(src, handler);
if (!channel.isOpen()) {
newHandler.failed(new ClosedChannelException(), attachment);
@@ -173,17 +173,17 @@ public class TcpAioAsyncConnection extends AsyncConnection {
}
private <A> void write(boolean acquire, ByteBuffer[] srcs, int offset, int length, A attachment, final CompletionHandler<Integer, ? super A> handler) {
if (acquire && !semaphore.tryAcquire()) {
if (this.writeQueue == null) {
synchronized (semaphore) {
if (this.writeQueue == null) {
this.writeQueue = new LinkedBlockingDeque<>();
}
}
}
this.writeQueue.add(new WriteEntry(srcs, offset, length, attachment, handler));
return;
}
// if (acquire && !semaphore.tryAcquire()) {
// if (this.writeQueue == null) {
// synchronized (semaphore) {
// if (this.writeQueue == null) {
// this.writeQueue = new LinkedBlockingDeque<>();
// }
// }
// }
// this.writeQueue.add(new WriteEntry(srcs, offset, length, attachment, handler));
// return;
// }
WriteMoreCompletionHandler newHandler = new WriteMoreCompletionHandler(srcs, offset, length, handler);
if (!channel.isOpen()) {
newHandler.failed(new ClosedChannelException(), attachment);
@@ -300,16 +300,16 @@ public class TcpAioAsyncConnection extends AsyncConnection {
if (result >= 0) {
writeCount += result;
try {
int index = -1;
int incre = -1;
for (int i = writeOffset; i < (writeOffset + writeLength); i++) {
if (writeBuffers[i].hasRemaining()) {
index = i;
incre = i - writeOffset;
break;
}
}
if (index >= 0) {
writeOffset += index;
writeLength -= index;
if (incre >= 0) {
writeOffset += incre;
writeLength -= incre;
channel.write(writeBuffers, writeOffset, writeLength, writeTimeoutSeconds > 0 ? writeTimeoutSeconds : 60, TimeUnit.SECONDS, attachment, this);
return;
}
@@ -317,27 +317,27 @@ public class TcpAioAsyncConnection extends AsyncConnection {
failed(e, attachment);
return;
}
try {
// try {
writeHandler.completed(writeCount, attachment);
} finally {
nextWrite(null, attachment);
}
// } finally {
// nextWrite(null, attachment);
// }
} else {
try {
// try {
writeHandler.completed(result.intValue(), attachment);
} finally {
nextWrite(null, attachment);
}
// } finally {
// nextWrite(null, attachment);
// }
}
}
@Override
public void failed(Throwable exc, A attachment) {
try {
// try {
writeHandler.failed(exc, attachment);
} finally {
nextWrite(exc, attachment);
}
// } finally {
// nextWrite(exc, attachment);
// }
}
}
@@ -364,21 +364,21 @@ public class TcpAioAsyncConnection extends AsyncConnection {
failed(e, attachment);
return;
}
try {
// try {
writeHandler.completed(result, attachment);
} finally {
nextWrite(null, attachment);
}
// } finally {
// nextWrite(null, attachment);
// }
}
@Override
public void failed(Throwable exc, A attachment) {
try {
// try {
writeHandler.failed(exc, attachment);
} finally {
nextWrite(exc, attachment);
}
// } finally {
// nextWrite(exc, attachment);
// }
}
}

View File

@@ -32,6 +32,7 @@ public class HttpContext extends Context {
public HttpContext(HttpContextConfig config) {
super(config);
this.remoteAddrHeader = config.remoteAddrHeader;
random.setSeed(Math.abs(System.nanoTime()));
}

View File

@@ -16,15 +16,15 @@ import java.util.*;
import java.util.logging.Level;
import org.redkale.convert.json.JsonConvert;
import org.redkale.net.*;
import org.redkale.util.*;
import org.redkale.util.AnyValue.DefaultAnyValue;
import org.redkale.util.*;
/**
* Http请求包 与javax.servlet.http.HttpServletRequest 基本类似。 <br>
* 同时提供json的解析接口: public Object getJsonParameter(Type type, String name) <br>
* Redkale提倡带简单的参数的GET请求采用类似REST风格, 因此提供了 getRequstURIPath 系列接口。 <br>
* 例如简单的翻页查询 <br>
* /pipes/record/query/offset:0/limit:20 <br>
* /pipes/user/query/offset:0/limit:20 <br>
* 获取页号: int offset = request.getRequstURIPath("offset:", 0); <br>
* 获取行数: int limit = request.getRequstURIPath("limit:", 10); <br>
* <p>
@@ -109,40 +109,44 @@ public class HttpRequest extends Request<HttpContext> {
@Override
protected int readHeader(final ByteBuffer buffer) {
if (!readLine(buffer, array)) return -1;
ByteArray bytes = array;
if (!readLine(buffer, bytes)) return -1;
Charset charset = this.context.getCharset();
int index = 0;
int offset = array.find(index, ' ');
int offset = bytes.find(index, ' ');
if (offset <= 0) return -1;
this.method = array.toString(index, offset, charset).trim();
this.method = bytes.toString(index, offset, charset);
index = ++offset;
offset = array.find(index, ' ');
offset = bytes.find(index, ' ');
if (offset <= 0) return -1;
int off = array.find(index, '#');
int off = bytes.find(index, '#');
if (off > 0) offset = off;
int qst = array.find(index, offset, (byte) '?');
int qst = bytes.find(index, offset, (byte) '?');
if (qst > 0) {
this.requestURI = array.toDecodeString(index, qst - index, charset).trim();
this.queryBytes = array.getBytes(qst + 1, offset - qst - 1);
this.requestURI = bytes.toDecodeString(index, qst - index, charset);
this.queryBytes = bytes.getBytes(qst + 1, offset - qst - 1);
try {
addParameter(array, qst + 1, offset - qst - 1);
addParameter(bytes, qst + 1, offset - qst - 1);
} catch (Exception e) {
this.context.getLogger().log(Level.WARNING, "HttpRequest.addParameter error: " + array.toString(), e);
this.context.getLogger().log(Level.WARNING, "HttpRequest.addParameter error: " + bytes.toString(), e);
}
} else {
this.requestURI = array.toDecodeString(index, offset - index, charset).trim();
this.requestURI = bytes.toDecodeString(index, offset - index, charset);
this.queryBytes = new byte[0];
}
index = ++offset;
this.protocol = array.toString(index, array.size() - index, charset).trim();
while (readLine(buffer, array)) {
if (array.size() < 2) break;
this.protocol = bytes.toString(index, bytes.size() - index, charset);
//header
while (readLine(buffer, bytes)) {
if (bytes.size() < 2) break;
index = 0;
offset = array.find(index, ':');
offset = bytes.find(index, ':');
if (offset <= 0) return -1;
String name = array.toString(index, offset, charset).trim();
String name = bytes.toString(index, offset, charset);
index = offset + 1;
String value = array.toString(index, array.size() - index, charset).trim();
//Upgrade: websocket 前面有空格所以需要trim()
String value = bytes.toString(index, bytes.size() - index, charset).trim();
switch (name) {
case "Content-Type":
case "content-type":
@@ -181,14 +185,14 @@ public class HttpRequest extends Request<HttpContext> {
if (this.contentType != null && this.contentType.contains("boundary=")) this.boundary = true;
if (this.boundary) this.keepAlive = false; //文件上传必须设置keepAlive为false因为文件过大时用户不一定会skip掉多余的数据
array.clear();
bytes.clear();
if (this.contentLength > 0 && (this.contentType == null || !this.boundary)) {
if (this.contentLength > context.getMaxbody()) return -1;
array.write(buffer, Math.min((int) this.contentLength, buffer.remaining()));
int lr = (int) this.contentLength - array.size();
bytes.write(buffer, Math.min((int) this.contentLength, buffer.remaining()));
int lr = (int) this.contentLength - bytes.size();
return lr > 0 ? lr : 0;
}
if (buffer.hasRemaining() && (this.boundary || !this.keepAlive)) array.write(buffer, buffer.remaining()); //文件上传、HTTP1.0或Connection:close
if (buffer.hasRemaining() && (this.boundary || !this.keepAlive)) bytes.write(buffer, buffer.remaining()); //文件上传、HTTP1.0或Connection:close
//暂不考虑是keep-alive且存在body却没有指定Content-Length的情况
return 0;
}
@@ -700,7 +704,7 @@ public class HttpRequest extends Request<HttpContext> {
/**
* 获取请求URL最后的一个/后面的部分的short值 <br>
* 例如请求URL /pipes/record/query/2 <br>
* 例如请求URL /pipes/user/query/2 <br>
* 获取type参数: short type = request.getRequstURILastPath((short)0); //type = 2
*
* @param defvalue 默认short值
@@ -719,7 +723,7 @@ public class HttpRequest extends Request<HttpContext> {
/**
* 获取请求URL最后的一个/后面的部分的short值 <br>
* 例如请求URL /pipes/record/query/2 <br>
* 例如请求URL /pipes/user/query/2 <br>
* 获取type参数: short type = request.getRequstURILastPath(16, (short)0); //type = 2
*
* @param radix 进制数
@@ -739,7 +743,7 @@ public class HttpRequest extends Request<HttpContext> {
/**
* 获取请求URL最后的一个/后面的部分的int值 <br>
* 例如请求URL /pipes/record/query/2 <br>
* 例如请求URL /pipes/user/query/2 <br>
* 获取type参数: int type = request.getRequstURILastPath(0); //type = 2
*
* @param defvalue 默认int值
@@ -757,7 +761,7 @@ public class HttpRequest extends Request<HttpContext> {
/**
* 获取请求URL最后的一个/后面的部分的int值 <br>
* 例如请求URL /pipes/record/query/2 <br>
* 例如请求URL /pipes/user/query/2 <br>
* 获取type参数: int type = request.getRequstURILastPath(16, 0); //type = 2
*
* @param radix 进制数
@@ -776,7 +780,7 @@ public class HttpRequest extends Request<HttpContext> {
/**
* 获取请求URL最后的一个/后面的部分的float值 <br>
* 例如请求URL /pipes/record/query/2 <br>
* 例如请求URL /pipes/user/query/2 <br>
* 获取type参数: float type = request.getRequstURILastPath(0.0f); //type = 2.0f
*
* @param defvalue 默认float值
@@ -794,7 +798,7 @@ public class HttpRequest extends Request<HttpContext> {
/**
* 获取请求URL最后的一个/后面的部分的int值 <br>
* 例如请求URL /pipes/record/query/2 <br>
* 例如请求URL /pipes/user/query/2 <br>
* 获取type参数: long type = request.getRequstURILastPath(0L); //type = 2
*
* @param defvalue 默认long值
@@ -812,7 +816,7 @@ public class HttpRequest extends Request<HttpContext> {
/**
* 获取请求URL最后的一个/后面的部分的int值 <br>
* 例如请求URL /pipes/record/query/2 <br>
* 例如请求URL /pipes/user/query/2 <br>
* 获取type参数: long type = request.getRequstURILastPath(16, 0L); //type = 2
*
* @param radix 进制数
@@ -831,7 +835,7 @@ public class HttpRequest extends Request<HttpContext> {
/**
* 获取请求URL最后的一个/后面的部分的double值 <br>
* 例如请求URL /pipes/record/query/2 <br>
* 例如请求URL /pipes/user/query/2 <br>
* 获取type参数: double type = request.getRequstURILastPath(0.0); //type = 2.0
*
* @param defvalue 默认double值
@@ -862,7 +866,7 @@ public class HttpRequest extends Request<HttpContext> {
/**
* 获取请求URL分段中含prefix段的值 <br>
* 例如请求URL /pipes/record/query/name:hello <br>
* 例如请求URL /pipes/user/query/name:hello <br>
* 获取name参数: String name = request.getRequstURIPath("name:", "none");
*
* @param prefix prefix段前缀
@@ -881,7 +885,7 @@ public class HttpRequest extends Request<HttpContext> {
/**
* 获取请求URL分段中含prefix段的short值 <br>
* 例如请求URL /pipes/record/query/type:10 <br>
* 例如请求URL /pipes/user/query/type:10 <br>
* 获取type参数: short type = request.getRequstURIPath("type:", (short)0);
*
* @param prefix prefix段前缀
@@ -900,7 +904,7 @@ public class HttpRequest extends Request<HttpContext> {
/**
* 获取请求URL分段中含prefix段的short值 <br>
* 例如请求URL /pipes/record/query/type:a <br>
* 例如请求URL /pipes/user/query/type:a <br>
* 获取type参数: short type = request.getRequstURIPath(16, "type:", (short)0); //type = 10
*
* @param radix 进制数
@@ -920,7 +924,7 @@ public class HttpRequest extends Request<HttpContext> {
/**
* 获取请求URL分段中含prefix段的int值 <br>
* 例如请求URL /pipes/record/query/offset:0/limit:50 <br>
* 例如请求URL /pipes/user/query/offset:0/limit:50 <br>
* 获取offset参数: int offset = request.getRequstURIPath("offset:", 0); <br>
* 获取limit参数: int limit = request.getRequstURIPath("limit:", 20); <br>
*
@@ -940,7 +944,7 @@ public class HttpRequest extends Request<HttpContext> {
/**
* 获取请求URL分段中含prefix段的int值 <br>
* 例如请求URL /pipes/record/query/offset:0/limit:50 <br>
* 例如请求URL /pipes/user/query/offset:0/limit:50 <br>
* 获取offset参数: int offset = request.getRequstURIPath("offset:", 0); <br>
* 获取limit参数: int limit = request.getRequstURIPath(16, "limit:", 20); // limit = 16 <br>
*
@@ -961,7 +965,7 @@ public class HttpRequest extends Request<HttpContext> {
/**
* 获取请求URL分段中含prefix段的float值 <br>
* 例如请求URL /pipes/record/query/point:40.0 <br>
* 例如请求URL /pipes/user/query/point:40.0 <br>
* 获取time参数: float point = request.getRequstURIPath("point:", 0.0f);
*
* @param prefix prefix段前缀
@@ -980,7 +984,7 @@ public class HttpRequest extends Request<HttpContext> {
/**
* 获取请求URL分段中含prefix段的long值 <br>
* 例如请求URL /pipes/record/query/time:1453104341363/id:40 <br>
* 例如请求URL /pipes/user/query/time:1453104341363/id:40 <br>
* 获取time参数: long time = request.getRequstURIPath("time:", 0L);
*
* @param prefix prefix段前缀
@@ -999,7 +1003,7 @@ public class HttpRequest extends Request<HttpContext> {
/**
* 获取请求URL分段中含prefix段的long值 <br>
* 例如请求URL /pipes/record/query/time:1453104341363/id:40 <br>
* 例如请求URL /pipes/user/query/time:1453104341363/id:40 <br>
* 获取time参数: long time = request.getRequstURIPath(16, "time:", 0L);
*
* @param radix 进制数
@@ -1019,7 +1023,7 @@ public class HttpRequest extends Request<HttpContext> {
/**
* 获取请求URL分段中含prefix段的double值 <br>
* 例如请求URL /pipes/record/query/point:40.0 <br>
* 例如请求URL /pipes/user/query/point:40.0 <br>
* 获取time参数: double point = request.getRequstURIPath("point:", 0.0);
*
* @param prefix prefix段前缀
@@ -1550,8 +1554,8 @@ public class HttpRequest extends Request<HttpContext> {
}
/**
* 获取翻页对象 https://redkale.org/pipes/records/list/offset:0/limit:20/sort:createtime%20ASC <br>
* https://redkale.org/pipes/records/list?flipper={'offset':0,'limit':20, 'sort':'createtime ASC'} <br>
* 获取翻页对象 https://redkale.org/pipes/users/list/offset:0/limit:20/sort:createtime%20ASC <br>
* https://redkale.org/pipes/users/list?flipper={'offset':0,'limit':20, 'sort':'createtime ASC'} <br>
* 以上两种接口都可以获取到翻页对象
*
*

View File

@@ -123,6 +123,8 @@ public class HttpResponse extends Response<HttpContext, HttpRequest> {
private BiFunction<HttpResponse, ByteBuffer[], ByteBuffer[]> bufferHandler;
private BiFunction<HttpRequest, org.redkale.service.RetResult, org.redkale.service.RetResult> retResultHandler;
private Supplier<ByteBuffer> bodyBufferSupplier;
//------------------------------------------------
@@ -203,6 +205,7 @@ public class HttpResponse extends Response<HttpContext, HttpRequest> {
this.headLenPos = -1;
this.header.clear();
this.bufferHandler = null;
this.retResultHandler = null;
return super.recycle();
}
@@ -393,8 +396,11 @@ public class HttpResponse extends Response<HttpContext, HttpRequest> {
*
* @param ret RetResult输出对象
*/
public void finishJson(final org.redkale.service.RetResult ret) {
public void finishJson(org.redkale.service.RetResult ret) {
this.contentType = this.jsonContentType;
if (this.retResultHandler != null) {
ret = this.retResultHandler.apply(this.request, ret);
}
if (this.recycleListener != null) this.output = ret;
if (ret != null && !ret.isSuccess()) {
this.header.addValue("retcode", String.valueOf(ret.getRetcode()));
@@ -411,8 +417,11 @@ public class HttpResponse extends Response<HttpContext, HttpRequest> {
* @param convert 指定的JsonConvert
* @param ret RetResult输出对象
*/
public void finishJson(final JsonConvert convert, final org.redkale.service.RetResult ret) {
public void finishJson(final JsonConvert convert, org.redkale.service.RetResult ret) {
this.contentType = this.jsonContentType;
if (this.retResultHandler != null) {
ret = this.retResultHandler.apply(this.request, ret);
}
if (this.recycleListener != null) this.output = ret;
if (ret != null && !ret.isSuccess()) {
this.header.addValue("retcode", String.valueOf(ret.getRetcode()));
@@ -482,7 +491,7 @@ public class HttpResponse extends Response<HttpContext, HttpRequest> {
* @param obj 输出对象
*/
@SuppressWarnings("unchecked")
public void finish(final Convert convert, final Type type, final Object obj) {
public void finish(final Convert convert, final Type type, Object obj) {
if (obj == null) {
finish("null");
} else if (obj instanceof CompletableFuture) {
@@ -549,6 +558,10 @@ public class HttpResponse extends Response<HttpContext, HttpRequest> {
if (this.recycleListener != null) this.output = obj;
if (obj instanceof org.redkale.service.RetResult) {
org.redkale.service.RetResult ret = (org.redkale.service.RetResult) obj;
if (this.retResultHandler != null) {
ret = this.retResultHandler.apply(this.request, ret);
obj = ret;
}
if (!ret.isSuccess()) {
this.header.addValue("retcode", String.valueOf(ret.getRetcode())).addValue("retinfo", ret.getRetinfo());
}
@@ -1161,6 +1174,24 @@ public class HttpResponse extends Response<HttpContext, HttpRequest> {
this.bufferHandler = bufferHandler;
}
/**
* 获取输出RetResult时的拦截器
*
* @return 拦截器
*/
protected BiFunction<HttpRequest, org.redkale.service.RetResult, org.redkale.service.RetResult> getRetResultHandler() {
return retResultHandler;
}
/**
* 设置输出RetResult时的拦截器
*
* @param retResultHandler 拦截器
*/
public void retResultHandler(BiFunction<HttpRequest, org.redkale.service.RetResult, org.redkale.service.RetResult> retResultHandler) {
this.retResultHandler = retResultHandler;
}
protected final class TransferFileHandler implements CompletionHandler<Integer, ByteBuffer> {
private final File file;

View File

@@ -453,10 +453,10 @@ public class HttpServer extends Server<String, HttpContext, HttpRequest, HttpRes
@Override
protected ObjectPool<ByteBuffer> createBufferPool(AtomicLong createCounter, AtomicLong cycleCounter, int bufferPoolSize) {
AtomicLong createBufferCounter = new AtomicLong();
AtomicLong cycleBufferCounter = new AtomicLong();
if (createCounter == null) createCounter = new AtomicLong();
if (cycleCounter == null) cycleCounter = new AtomicLong();
final int rcapacity = this.bufferCapacity;
ObjectPool<ByteBuffer> bufferPool = new ObjectPool<>(createBufferCounter, cycleBufferCounter, bufferPoolSize,
ObjectPool<ByteBuffer> bufferPool = new ObjectPool<>(createCounter, cycleCounter, bufferPoolSize,
(Object... params) -> ByteBuffer.allocateDirect(rcapacity), null, (e) -> {
if (e == null || e.isReadOnly() || e.capacity() != rcapacity) return false;
e.clear();

View File

@@ -1555,7 +1555,7 @@ public final class Rest {
RestUploadFile ru = field.getAnnotation(RestUploadFile.class);
RestURI ri = field.getAnnotation(RestURI.class);
if (rh == null && rc == null && ra == null && rb == null && rs == null && ru == null && ri == null) continue;
if (rh != null && field.getType() != String.class) throw new RuntimeException("@RestHeader must on String Field in " + field);
if (rh != null && field.getType() != String.class && field.getType() != InetSocketAddress.class) throw new RuntimeException("@RestHeader must on String Field in " + field);
if (rc != null && field.getType() != String.class) throw new RuntimeException("@RestCookie must on String Field in " + field);
if (rs != null && field.getType() != String.class) throw new RuntimeException("@RestSessionid must on String Field in " + field);
if (ra != null && field.getType() != String.class) throw new RuntimeException("@RestAddress must on String Field in " + field);
@@ -1569,7 +1569,7 @@ public final class Rest {
String attrFieldName;
String restname = "";
if (rh != null) {
attrFieldName = "_redkale_attr_header_" + restAttributes.size();
attrFieldName = "_redkale_attr_header_" + (field.getType() != String.class ? "json_" : "") + restAttributes.size();
restname = rh.name();
} else if (rc != null) {
attrFieldName = "_redkale_attr_cookie_" + restAttributes.size();
@@ -1630,6 +1630,14 @@ public final class Rest {
mv.visitMethodInsn(INVOKEVIRTUAL, reqInternalName, "getConnection", "()Ljava/lang/String;", false);
} else if ("Method".equalsIgnoreCase(headerkey)) {
mv.visitMethodInsn(INVOKEVIRTUAL, reqInternalName, "getMethod", "()Ljava/lang/String;", false);
} else if (en.getKey().contains("_header_json_")) {
String typefieldname = "_redkale_body_jsontype_" + bodyTypes.size();
bodyTypes.put(typefieldname, (java.lang.reflect.Type) en.getValue()[2]);
mv.visitVarInsn(ALOAD, 0);
mv.visitFieldInsn(GETFIELD, newDynName, typefieldname, "Ljava/lang/reflect/Type;");
mv.visitLdcInsn(headerkey);
mv.visitMethodInsn(INVOKEVIRTUAL, reqInternalName, "getJsonHeader", "(Ljava/lang/reflect/Type;Ljava/lang/String;)Ljava/lang/Object;", false);
mv.visitTypeInsn(CHECKCAST, Type.getInternalName((Class) en.getValue()[1]));
} else {
mv.visitLdcInsn(headerkey);
mv.visitLdcInsn("");

View File

@@ -740,8 +740,11 @@ public abstract class WebSocket<G extends Serializable, T> {
/**
* WebSokcet连接成功后的回调方法
*
* @return Future 可以为null
*/
public void onConnected() {
public CompletableFuture onConnected() {
return null;
}
/**
@@ -805,8 +808,11 @@ public abstract class WebSocket<G extends Serializable, T> {
*
* @param code 结果码非0表示非正常关闭
* @param reason 关闭原因
*
* @return Future 可以为null
*/
public void onClose(int code, String reason) {
public CompletableFuture onClose(int code, String reason) {
return null;
}
/**

View File

@@ -452,6 +452,11 @@ public class WebSocketEngine {
return (int) websockets2.values().stream().mapToInt(sublist -> sublist.size()).count();
}
@Comment("获取当前用户总数")
public Set<Serializable> getLocalUserSet() {
return single ? new LinkedHashSet<>(websockets.keySet()) : new LinkedHashSet<>(websockets2.keySet());
}
@Comment("获取当前用户总数")
public int getLocalUserSize() {
return single ? websockets.size() : websockets2.size();

View File

@@ -11,7 +11,7 @@ import java.net.*;
import java.util.*;
import java.util.concurrent.*;
import java.util.logging.*;
import java.util.stream.Stream;
import java.util.stream.*;
import javax.annotation.*;
import org.redkale.boot.*;
import org.redkale.convert.*;
@@ -208,6 +208,22 @@ public abstract class WebSocketNode {
return rs;
}
/**
* 获取在线用户总数
*
*
* @return boolean
*/
public CompletableFuture<Set<String>> getUserSet() {
if (this.localEngine != null && this.sncpNodeAddresses == null) {
return CompletableFuture.completedFuture(new LinkedHashSet<>(this.localEngine.getLocalUserSet().stream().map(x -> String.valueOf(x)).collect(Collectors.toList())));
}
tryAcquireSemaphore();
CompletableFuture<Set<String>> rs = this.sncpNodeAddresses.queryKeysStartsWithAsync(SOURCE_SNCP_USERID_PREFIX).thenApply(v -> new LinkedHashSet<>(v.stream().map(x -> x.substring(SOURCE_SNCP_USERID_PREFIX.length())).collect(Collectors.toList())));
if (semaphore != null) rs.whenComplete((r, e) -> releaseSemaphore());
return rs;
}
/**
* 判断指定用户是否WebSocket在线
*

View File

@@ -18,35 +18,47 @@ import org.redkale.convert.json.JsonConvert;
* @author zhangjx
*/
public class WebSocketRange implements Serializable {
protected String wskey;
protected Map<String, String> attach;
public WebSocketRange() {
}
public WebSocketRange(String wskey) {
this.wskey = wskey;
}
public WebSocketRange(String wskey, Map<String, String> attach) {
this.wskey = wskey;
this.attach = attach;
}
public boolean containsAttach(String key) {
return this.attach == null ? false : this.attach.containsKey(key);
}
public String getAttach(String key) {
return this.attach == null ? null : this.attach.get(key);
}
public String getAttach(String key, String defval) {
return this.attach == null ? defval : this.attach.getOrDefault(key, defval);
}
public String getWskey() {
return wskey;
}
public void setWskey(String wskey) {
this.wskey = wskey;
}
public Map<String, String> getAttach() {
return attach;
}
public void setAttach(Map<String, String> attach) {
this.attach = attach;
}

View File

@@ -34,6 +34,10 @@ class WebSocketRunner implements Runnable {
protected final boolean mergemsg;
protected final Semaphore writeSemaphore = new Semaphore(1);
protected final LinkedBlockingQueue<WriteEntry> writeQueue = new LinkedBlockingQueue();
volatile boolean closed = false;
FrameType currSeriesMergeFrameType;
@@ -59,7 +63,8 @@ class WebSocketRunner implements Runnable {
final boolean debug = context.getLogger().isLoggable(Level.FINEST);
final WebSocketRunner self = this;
try {
webSocket.onConnected();
CompletableFuture connectfFuture = webSocket.onConnected();
if (connectfFuture != null) connectfFuture.join();
webSocket._channel.setReadTimeoutSeconds(300); //读取超时5分钟
if (webSocket._channel.isOpen()) {
final int wsmaxbody = webSocket._engine.wsmaxbody;
@@ -227,9 +232,7 @@ class WebSocketRunner implements Runnable {
try {
ByteBuffer[] buffers = packet.sendBuffers != null ? packet.duplicateSendBuffers() : packet.encode(webSocket._channel.getBufferSupplier(), webSocket._channel.getBufferConsumer(), webSocket._engine.cryptor);
//if (debug) context.getLogger().log(Level.FINEST, "wsrunner.sending websocket message: " + packet);
this.lastSendTime = System.currentTimeMillis();
webSocket._channel.write(buffers, buffers, new CompletionHandler<Integer, ByteBuffer[]>() {
CompletionHandler<Integer, ByteBuffer[]> handler = new CompletionHandler<Integer, ByteBuffer[]>() {
private CompletableFuture<Integer> future = futureResult;
@@ -284,7 +287,14 @@ class WebSocketRunner implements Runnable {
}
}
});
};
this.lastSendTime = System.currentTimeMillis();
if (writeSemaphore.tryAcquire()) {
webSocket._channel.write(buffers, buffers, handler);
} else {
writeQueue.add(new WriteEntry(buffers, handler));
}
} catch (Exception t) {
futureResult.complete(RETCODE_SENDEXCEPTION);
closeRunner(RETCODE_SENDEXCEPTION, "websocket send message failed on channel.write");
@@ -293,7 +303,14 @@ class WebSocketRunner implements Runnable {
}
}
return futureResult;
return futureResult.whenComplete((r, t) -> {
WriteEntry entry = writeQueue.poll();
if (entry != null) {
webSocket._channel.write(entry.writeBuffers, entry.writeBuffers, entry.writeHandler);
} else {
writeSemaphore.release();
}
});
}
public boolean isClosed() {
@@ -307,9 +324,22 @@ class WebSocketRunner implements Runnable {
closed = true;
CompletableFuture<Void> future = engine.removeLocalThenClose(webSocket);
webSocket._channel.dispose();
webSocket.onClose(code, reason);
return future;
CompletableFuture closeFuture = webSocket.onClose(code, reason);
if (closeFuture == null) return future;
return CompletableFuture.allOf(future, closeFuture);
}
}
private static class WriteEntry {
ByteBuffer[] writeBuffers;
CompletionHandler writeHandler;
public WriteEntry(ByteBuffer[] writeBuffers, CompletionHandler writeHandler) {
this.writeBuffers = writeBuffers;
this.writeHandler = writeHandler;
}
}
}

View File

@@ -21,13 +21,13 @@ import org.redkale.util.*;
*/
public final class SncpResponse extends Response<SncpContext, SncpRequest> {
public static final int RETCODE_ILLSERVICEID = (1 << 10); //无效serviceid
public static final int RETCODE_ILLSERVICEID = (1 << 1); //无效serviceid
public static final int RETCODE_ILLSERVICEVER = (1 << 11); //无效serviceversion
public static final int RETCODE_ILLSERVICEVER = (1 << 2); //无效serviceversion
public static final int RETCODE_ILLACTIONID = (1 << 15); //无效actionid
public static final int RETCODE_ILLACTIONID = (1 << 3); //无效actionid
public static final int RETCODE_THROWEXCEPTION = (1 << 30); //内部异常
public static final int RETCODE_THROWEXCEPTION = (1 << 4); //内部异常
public static ObjectPool<Response> createPool(AtomicLong creatCounter, AtomicLong cycleCounter, int max, Creator<Response> creator) {
return new ObjectPool<>(creatCounter, cycleCounter, max, creator, (x) -> ((SncpResponse) x).prepare(), (x) -> ((SncpResponse) x).recycle());

View File

@@ -5,10 +5,10 @@
*/
package org.redkale.net.sncp;
import java.nio.*;
import java.util.*;
import java.nio.ByteBuffer;
import java.util.List;
import java.util.concurrent.atomic.*;
import org.redkale.convert.bson.*;
import org.redkale.convert.bson.BsonFactory;
import org.redkale.net.*;
import org.redkale.net.sncp.SncpContext.SncpContextConfig;
import org.redkale.service.Service;
@@ -122,10 +122,10 @@ public class SncpServer extends Server<DLong, SncpContext, SncpRequest, SncpResp
@Override
protected ObjectPool<ByteBuffer> createBufferPool(AtomicLong createCounter, AtomicLong cycleCounter, int bufferPoolSize) {
AtomicLong createBufferCounter = new AtomicLong();
AtomicLong cycleBufferCounter = new AtomicLong();
if (createCounter == null) createCounter = new AtomicLong();
if (cycleCounter == null) cycleCounter = new AtomicLong();
final int rcapacity = this.bufferCapacity;
ObjectPool<ByteBuffer> bufferPool = new ObjectPool<>(createBufferCounter, cycleBufferCounter, bufferPoolSize,
ObjectPool<ByteBuffer> bufferPool = new ObjectPool<>(createCounter, cycleCounter, bufferPoolSize,
(Object... params) -> ByteBuffer.allocateDirect(rcapacity), null, (e) -> {
if (e == null || e.isReadOnly() || e.capacity() != rcapacity) return false;
e.clear();

View File

@@ -22,6 +22,10 @@ public enum ColumnExpress {
* 加值 col = col + val
*/
INC,
/**
* 加值 col = col - val
*/
DEC,
/**
* 乘值 col = col * val
*/

View File

@@ -0,0 +1,83 @@
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package org.redkale.source;
import java.io.Serializable;
/**
* 与ColumnNodeValue 组合,用于复杂的字段表达式 。
* String 视为 字段名
*
* <p>
* 详情见: https://redkale.org
*
* @author zhangjx
* @since 2.0.0
*/
public class ColumnFuncNode implements ColumnNode {
protected FilterFunc func;
protected Serializable value;//类型只能是String、ColumnNodeValue
public ColumnFuncNode() {
}
public ColumnFuncNode(FilterFunc func, Serializable node) {
if (!(node instanceof String) && !(node instanceof ColumnNodeValue)) throw new IllegalArgumentException("value must be String or ColumnNodeValue");
this.func = func;
this.value = node;
}
public static ColumnFuncNode create(FilterFunc func, Serializable node) {
return new ColumnFuncNode(func, node);
}
public static ColumnFuncNode avg(Serializable node) {
return new ColumnFuncNode(FilterFunc.AVG, node);
}
public static ColumnFuncNode count(Serializable node) {
return new ColumnFuncNode(FilterFunc.COUNT, node);
}
public static ColumnFuncNode distinctCount(Serializable node) {
return new ColumnFuncNode(FilterFunc.DISTINCTCOUNT, node);
}
public static ColumnFuncNode max(Serializable node) {
return new ColumnFuncNode(FilterFunc.MAX, node);
}
public static ColumnFuncNode min(Serializable node) {
return new ColumnFuncNode(FilterFunc.MIN, node);
}
public static ColumnFuncNode sum(Serializable node) {
return new ColumnFuncNode(FilterFunc.SUM, node);
}
public FilterFunc getFunc() {
return func;
}
public void setFunc(FilterFunc func) {
this.func = func;
}
public Serializable getValue() {
return value;
}
public void setValue(Serializable value) {
this.value = value;
}
@Override
public String toString() {
return "{\"func\":\"" + func + "\", \"value\":" + ((value instanceof CharSequence) ? ("\"" + value + "\"") : value) + "}";
}
}

View File

@@ -0,0 +1,21 @@
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package org.redkale.source;
import java.io.Serializable;
/**
* ColumnFuncNode与ColumnNodeValue 的接口
*
* <p>
* 详情见: https://redkale.org
*
* @author zhangjx
* @since 2.0.0
*/
public interface ColumnNode extends Serializable {
}

View File

@@ -13,21 +13,33 @@ import static org.redkale.source.ColumnExpress.*;
* String 视为 字段名
* Number 视为 数值
*
* <p>
* 详情见: https://redkale.org
*
* @author zhangjx
* @since 2.0.0
*/
public class ColumnNodeValue implements Serializable {
public class ColumnNodeValue implements ColumnNode {
private Serializable left;//类型只能是String、Number、ColumnNode
protected Serializable left;//类型只能是String、Number、ColumnNodeValue
private ColumnExpress express; //不能是MOV
protected ColumnExpress express; //不能是MOV
private Serializable right;//类型只能是String、Number、ColumnNode
protected Serializable right;//类型只能是String、Number、ColumnNodeValue
public ColumnNodeValue() {
}
public ColumnNodeValue(Serializable left, ColumnExpress express, Serializable right) {
if (express == null || express == ColumnExpress.MOV) throw new IllegalArgumentException("express cannot be null or MOV");
if (express == null || express == ColumnExpress.MOV) {
throw new IllegalArgumentException("express cannot be null or MOV");
}
if (!(left instanceof String) && !(left instanceof Number) && !(left instanceof ColumnNodeValue) && !(left instanceof ColumnFuncNode)) {
throw new IllegalArgumentException("left value must be String, Number, ColumnFuncNode or ColumnNodeValue");
}
if (!(right instanceof String) && !(right instanceof Number) && !(right instanceof ColumnNodeValue) && !(right instanceof ColumnFuncNode)) {
throw new IllegalArgumentException("right value must be String, Number, ColumnFuncNode or ColumnNodeValue");
}
this.left = left;
this.express = express;
this.right = right;
@@ -41,6 +53,10 @@ public class ColumnNodeValue implements Serializable {
return new ColumnNodeValue(left, INC, right);
}
public static ColumnNodeValue dec(Serializable left, Serializable right) {
return new ColumnNodeValue(left, DEC, right);
}
public static ColumnNodeValue mul(Serializable left, Serializable right) {
return new ColumnNodeValue(left, MUL, right);
}
@@ -65,6 +81,10 @@ public class ColumnNodeValue implements Serializable {
return any(INC, right);
}
public ColumnNodeValue dec(Serializable right) {
return any(DEC, right);
}
public ColumnNodeValue mul(Serializable right) {
return any(MUL, right);
}

View File

@@ -74,6 +74,18 @@ public class ColumnValue {
return new ColumnValue(column, INC, value);
}
/**
* 返回 {column} = {column} - {value} 操作
*
* @param column 字段名
* @param value 字段值
*
* @return ColumnValue
*/
public static ColumnValue dec(String column, Serializable value) {
return new ColumnValue(column, DEC, value);
}
/**
* 返回 {column} = {column} * {value} 操作
*

View File

@@ -174,9 +174,11 @@ public class DataJdbcSource extends DataSqlSource<Connection> {
prestmt.setObject(++i, ((AtomicInteger) val).get());
} else if (val instanceof AtomicLong) {
prestmt.setObject(++i, ((AtomicLong) val).get());
} else if (val != null && !(val instanceof Number) && !(val instanceof CharSequence) && !(entity instanceof java.util.Date)
} else if (val != null && !(val instanceof Number) && !(val instanceof CharSequence) && !(val instanceof java.util.Date)
&& !val.getClass().getName().startsWith("java.sql.") && !val.getClass().getName().startsWith("java.time.")) {
prestmt.setObject(++i, info.jsonConvert.convertTo(attr.genericType(), val));
} else if (val == null && info.isNotNullJson(attr)) {
prestmt.setObject(++i, "");
} else {
prestmt.setObject(++i, val);
}
@@ -207,7 +209,7 @@ public class DataJdbcSource extends DataSqlSource<Connection> {
}
@Override
protected <T> CompletableFuture<Integer> clearTableDB(EntityInfo<T> info, String sql) {
protected <T> CompletableFuture<Integer> clearTableDB(EntityInfo<T> info, final String table, String sql) {
Connection conn = null;
try {
conn = writePool.poll();
@@ -228,7 +230,7 @@ public class DataJdbcSource extends DataSqlSource<Connection> {
}
@Override
protected <T> CompletableFuture<Integer> dropTableDB(EntityInfo<T> info, String sql) {
protected <T> CompletableFuture<Integer> dropTableDB(EntityInfo<T> info, final String table, String sql) {
Connection conn = null;
try {
conn = writePool.poll();
@@ -237,6 +239,10 @@ public class DataJdbcSource extends DataSqlSource<Connection> {
final Statement stmt = conn.createStatement();
int c = stmt.executeUpdate(sql);
stmt.close();
if (info.getTableStrategy() != null) {
String tablekey = table.indexOf('.') > 0 ? table : (conn.getCatalog() + '.' + table);
info.removeDisTable(tablekey);
}
return CompletableFuture.completedFuture(c);
} catch (SQLException e) {
if (info.isTableNotExist(e)) return CompletableFuture.completedFuture(-1);
@@ -421,6 +427,48 @@ public class DataJdbcSource extends DataSqlSource<Connection> {
}
}
@Override
protected <T, K extends Serializable, N extends Number> CompletableFuture<Map<K[], N[]>> queryColumnMapDB(EntityInfo<T> info, String sql, final ColumnNode[] funcNodes, final String[] groupByColumns) {
Connection conn = null;
Map rs = new LinkedHashMap<>();
try {
conn = readPool.poll();
//conn.setReadOnly(true);
final Statement stmt = conn.createStatement();
ResultSet set = stmt.executeQuery(sql);
ResultSetMetaData rsd = set.getMetaData();
boolean[] smallints = null;
while (set.next()) {
int index = 0;
Serializable[] keys = new Serializable[groupByColumns.length];
if (smallints == null) {
smallints = new boolean[keys.length];
for (int i = 0; i < keys.length; i++) {
smallints[i] = rsd == null ? false : rsd.getColumnType(i + 1) == Types.SMALLINT;
}
}
for (int i = 0; i < keys.length; i++) {
keys[i] = (Serializable) ((smallints[i] && index == 0) ? set.getShort(++index) : set.getObject(++index));
}
Number[] vals = new Number[funcNodes.length];
for (int i = 0; i < vals.length; i++) {
vals[i] = (Number) set.getObject(++index);
}
rs.put(keys, vals);
}
set.close();
stmt.close();
return CompletableFuture.completedFuture(rs);
} catch (SQLException e) {
if (info.getTableStrategy() != null && info.isTableNotExist(e)) return CompletableFuture.completedFuture(rs);
CompletableFuture future = new CompletableFuture();
future.completeExceptionally(e);
return future;//return CompletableFuture.failedFuture(e);
} finally {
if (conn != null) readPool.offerConnection(conn);
}
}
@Override
protected <T> CompletableFuture<T> findDB(EntityInfo<T> info, String sql, boolean onlypk, SelectColumn selects) {
Connection conn = null;
@@ -495,7 +543,7 @@ public class DataJdbcSource extends DataSqlSource<Connection> {
}
@Override
protected <T> CompletableFuture<Sheet<T>> querySheetDB(EntityInfo<T> info, final boolean readcache, boolean needtotal, SelectColumn selects, Flipper flipper, FilterNode node) {
protected <T> CompletableFuture<Sheet<T>> querySheetDB(EntityInfo<T> info, final boolean readcache, boolean needtotal, final boolean distinct, SelectColumn selects, Flipper flipper, FilterNode node) {
Connection conn = null;
try {
conn = readPool.poll();
@@ -507,7 +555,7 @@ public class DataJdbcSource extends DataSqlSource<Connection> {
final CharSequence where = node == null ? null : node.createSQLExpress(info, joinTabalis);
final String dbtype = this.readPool.getDbtype();
if ("mysql".equals(dbtype) || "postgresql".equals(dbtype)) {
final String listsql = "SELECT " + info.getQueryColumns("a", selects) + " FROM " + info.getTable(node) + " a" + (join == null ? "" : join)
final String listsql = "SELECT " + (distinct ? "DISTINCT " : "") + info.getQueryColumns("a", selects) + " FROM " + info.getTable(node) + " a" + (join == null ? "" : join)
+ ((where == null || where.length() == 0) ? "" : (" WHERE " + where)) + createSQLOrderby(info, flipper) + (flipper == null || flipper.getLimit() < 1 ? "" : (" LIMIT " + flipper.getLimit() + " OFFSET " + flipper.getOffset()));
if (readcache && info.isLoggable(logger, Level.FINEST, listsql)) {
logger.finest(info.getType().getSimpleName() + " query sql=" + listsql);
@@ -521,7 +569,7 @@ public class DataJdbcSource extends DataSqlSource<Connection> {
ps.close();
long total = list.size();
if (needtotal) {
final String countsql = "SELECT COUNT(*) FROM " + info.getTable(node) + " a" + (join == null ? "" : join) + ((where == null || where.length() == 0) ? "" : (" WHERE " + where));
final String countsql = "SELECT " + (distinct ? "DISTINCT COUNT(" + info.getQueryColumns("a", selects) + ")" : "COUNT(*)") + " FROM " + info.getTable(node) + " a" + (join == null ? "" : join) + ((where == null || where.length() == 0) ? "" : (" WHERE " + where));
if (readcache && info.isLoggable(logger, Level.FINEST, countsql)) {
logger.finest(info.getType().getSimpleName() + " query countsql=" + countsql);
}
@@ -533,7 +581,7 @@ public class DataJdbcSource extends DataSqlSource<Connection> {
}
return CompletableFuture.completedFuture(new Sheet<>(total, list));
}
final String sql = "SELECT " + info.getQueryColumns("a", selects) + " FROM " + info.getTable(node) + " a" + (join == null ? "" : join)
final String sql = "SELECT " + (distinct ? "DISTINCT " : "") + info.getQueryColumns("a", selects) + " FROM " + info.getTable(node) + " a" + (join == null ? "" : join)
+ ((where == null || where.length() == 0) ? "" : (" WHERE " + where)) + info.createSQLOrderby(flipper);
if (readcache && info.isLoggable(logger, Level.FINEST, sql)) {
logger.finest(info.getType().getSimpleName() + " query sql=" + sql + (flipper == null || flipper.getLimit() < 1 ? "" : (" LIMIT " + flipper.getLimit() + " OFFSET " + flipper.getOffset())));
@@ -559,7 +607,7 @@ public class DataJdbcSource extends DataSqlSource<Connection> {
ps.close();
return CompletableFuture.completedFuture(new Sheet<>(total, list));
} catch (SQLException e) {
if (info.getTableStrategy() != null && info.isTableNotExist(e)) return CompletableFuture.completedFuture(new Sheet<>());
if (info.getTableStrategy() != null && info.isTableNotExist(e)) return CompletableFuture.completedFuture(new Sheet<>(0, new ArrayList()));
CompletableFuture future = new CompletableFuture();
future.completeExceptionally(e);
return future;//return CompletableFuture.failedFuture(e);

View File

@@ -94,12 +94,12 @@ public class DataMemorySource extends DataSqlSource<Void> {
}
@Override
protected <T> CompletableFuture<Integer> clearTableDB(EntityInfo<T> info, String sql) {
protected <T> CompletableFuture<Integer> clearTableDB(EntityInfo<T> info, final String table, String sql) {
return CompletableFuture.completedFuture(0);
}
@Override
protected <T> CompletableFuture<Integer> dropTableDB(EntityInfo<T> info, String sql) {
protected <T> CompletableFuture<Integer> dropTableDB(EntityInfo<T> info, final String table, String sql) {
return CompletableFuture.completedFuture(0);
}
@@ -128,6 +128,11 @@ public class DataMemorySource extends DataSqlSource<Void> {
return CompletableFuture.completedFuture(null);
}
@Override
protected <T, K extends Serializable, N extends Number> CompletableFuture<Map<K[], N[]>> queryColumnMapDB(final EntityInfo<T> info, final String sql, final ColumnNode[] funcNodes, final String[] groupByColumns) {
return CompletableFuture.completedFuture(null);
}
@Override
protected <T> CompletableFuture<T> findDB(EntityInfo<T> info, String sql, boolean onlypk, SelectColumn selects) {
return CompletableFuture.completedFuture(null);
@@ -144,8 +149,8 @@ public class DataMemorySource extends DataSqlSource<Void> {
}
@Override
protected <T> CompletableFuture<Sheet<T>> querySheetDB(EntityInfo<T> info, final boolean readcache, boolean needtotal, SelectColumn selects, Flipper flipper, FilterNode node) {
return CompletableFuture.completedFuture(new Sheet<>());
protected <T> CompletableFuture<Sheet<T>> querySheetDB(EntityInfo<T> info, final boolean readcache, boolean needtotal, final boolean distinct, SelectColumn selects, Flipper flipper, FilterNode node) {
return CompletableFuture.completedFuture(new Sheet<>(0, new ArrayList()));
}
}

View File

@@ -43,6 +43,26 @@ public interface DataSource {
*/
public <T> int insert(final T... entitys);
/**
* 新增记录, 多对象必须是同一个Entity类且必须在同一张表中 <br>
*
* @param <T> 泛型
* @param entitys Entity对象
*
* @return 影响的记录条数
*/
public <T> int insert(final Collection<T> entitys);
/**
* 新增记录, 多对象必须是同一个Entity类且必须在同一张表中 <br>
*
* @param <T> 泛型
* @param entitys Entity对象
*
* @return 影响的记录条数
*/
public <T> int insert(final Stream<T> entitys);
/**
* 新增记录, 多对象必须是同一个Entity类且必须在同一张表中 <br>
*
@@ -53,6 +73,26 @@ public interface DataSource {
*/
public <T> CompletableFuture<Integer> insertAsync(final T... entitys);
/**
* 新增记录, 多对象必须是同一个Entity类且必须在同一张表中 <br>
*
* @param <T> 泛型
* @param entitys Entity对象
*
* @return CompletableFuture
*/
public <T> CompletableFuture<Integer> insertAsync(final Collection<T> entitys);
/**
* 新增记录, 多对象必须是同一个Entity类且必须在同一张表中 <br>
*
* @param <T> 泛型
* @param entitys Entity对象
*
* @return CompletableFuture
*/
public <T> CompletableFuture<Integer> insertAsync(final Stream<T> entitys);
//-------------------------deleteAsync--------------------------
/**
* 删除指定主键值的记录, 多对象必须是同一个Entity类且必须在同一张表中 <br>
@@ -662,7 +702,7 @@ public interface DataSource {
/**
* 获取符合过滤条件记录的聚合结果, 无结果返回null <br>
* 等价SQL: SELECT FUNC{column} FROM {table} <br>
* 如 getNumberResultAsync(Record.class, FilterFunc.COUNT, null) 等价于: SELECT COUNT(*) FROM {table} <br>
* 如 getNumberResultAsync(User.class, FilterFunc.COUNT, null) 等价于: SELECT COUNT(*) FROM {table} <br>
*
* @param entityClass Entity类
* @param func 聚合函数
@@ -675,7 +715,7 @@ public interface DataSource {
/**
* 获取符合过滤条件记录的聚合结果, 无结果返回null <br>
* 等价SQL: SELECT FUNC{column} FROM {table} <br>
* 如 getNumberResultAsync(Record.class, FilterFunc.COUNT, null) 等价于: SELECT COUNT(*) FROM {table} <br>
* 如 getNumberResultAsync(User.class, FilterFunc.COUNT, null) 等价于: SELECT COUNT(*) FROM {table} <br>
*
* @param entityClass Entity类
* @param func 聚合函数
@@ -688,7 +728,7 @@ public interface DataSource {
/**
* 获取符合过滤条件记录的聚合结果, 无结果返回null <br>
* 等价SQL: SELECT FUNC{column} FROM {table} WHERE {filter bean} <br>
* 如 getNumberResultAsync(Record.class, FilterFunc.COUNT, null, (FilterBean)null) 等价于: SELECT COUNT(*) FROM {table} <br>
* 如 getNumberResultAsync(User.class, FilterFunc.COUNT, null, (FilterBean)null) 等价于: SELECT COUNT(*) FROM {table} <br>
*
* @param entityClass Entity类
* @param func 聚合函数
@@ -702,7 +742,7 @@ public interface DataSource {
/**
* 获取符合过滤条件记录的聚合结果, 无结果返回null <br>
* 等价SQL: SELECT FUNC{column} FROM {table} WHERE {filter bean} <br>
* 如 getNumberResultAsync(Record.class, FilterFunc.COUNT, null, (FilterBean)null) 等价于: SELECT COUNT(*) FROM {table} <br>
* 如 getNumberResultAsync(User.class, FilterFunc.COUNT, null, (FilterBean)null) 等价于: SELECT COUNT(*) FROM {table} <br>
*
* @param entityClass Entity类
* @param func 聚合函数
@@ -716,7 +756,7 @@ public interface DataSource {
/**
* 获取符合过滤条件记录的聚合结果, 无结果返回null <br>
* 等价SQL: SELECT FUNC{column} FROM {table} WHERE {filter node} <br>
* 如 getNumberResultAsync(Record.class, FilterFunc.MAX, "createtime", (FilterNode)null) 等价于: SELECT MAX(createtime) FROM {table} <br>
* 如 getNumberResultAsync(User.class, FilterFunc.MAX, "createtime", (FilterNode)null) 等价于: SELECT MAX(createtime) FROM {table} <br>
*
* @param entityClass Entity类
* @param func 聚合函数
@@ -730,7 +770,7 @@ public interface DataSource {
/**
* 获取符合过滤条件记录的聚合结果, 无结果返回null <br>
* 等价SQL: SELECT FUNC{column} FROM {table} WHERE {filter node} <br>
* 如 getNumberResultAsync(Record.class, FilterFunc.MAX, "createtime", (FilterNode)null) 等价于: SELECT MAX(createtime) FROM {table} <br>
* 如 getNumberResultAsync(User.class, FilterFunc.MAX, "createtime", (FilterNode)null) 等价于: SELECT MAX(createtime) FROM {table} <br>
*
* @param entityClass Entity类
* @param func 聚合函数
@@ -744,7 +784,7 @@ public interface DataSource {
/**
* 获取符合过滤条件记录的聚合结果, 无结果返回默认值 <br>
* 等价SQL: SELECT FUNC{column} FROM {table} <br>
* 如 getNumberResultAsync(Record.class, FilterFunc.MAX, "createtime") 等价于: SELECT MAX(createtime) FROM {table} <br>
* 如 getNumberResultAsync(User.class, FilterFunc.MAX, "createtime") 等价于: SELECT MAX(createtime) FROM {table} <br>
*
* @param entityClass Entity类
* @param func 聚合函数
@@ -758,7 +798,7 @@ public interface DataSource {
/**
* 获取符合过滤条件记录的聚合结果, 无结果返回默认值 <br>
* 等价SQL: SELECT FUNC{column} FROM {table} <br>
* 如 getNumberResultAsync(Record.class, FilterFunc.MAX, "createtime") 等价于: SELECT MAX(createtime) FROM {table} <br>
* 如 getNumberResultAsync(User.class, FilterFunc.MAX, "createtime") 等价于: SELECT MAX(createtime) FROM {table} <br>
*
* @param entityClass Entity类
* @param func 聚合函数
@@ -772,7 +812,7 @@ public interface DataSource {
/**
* 获取符合过滤条件记录的聚合结果, 无结果返回默认值 <br>
* 等价SQL: SELECT FUNC{column} FROM {table} WHERE {filter bean} <br>
* 如 getNumberResultAsync(Record.class, FilterFunc.MAX, "createtime", (FilterNode)null) 等价于: SELECT MAX(createtime) FROM {table} <br>
* 如 getNumberResultAsync(User.class, FilterFunc.MAX, "createtime", (FilterNode)null) 等价于: SELECT MAX(createtime) FROM {table} <br>
*
* @param entityClass Entity类
* @param func 聚合函数
@@ -787,7 +827,7 @@ public interface DataSource {
/**
* 获取符合过滤条件记录的聚合结果, 无结果返回默认值 <br>
* 等价SQL: SELECT FUNC{column} FROM {table} WHERE {filter bean} <br>
* 如 getNumberResultAsync(Record.class, FilterFunc.MAX, "createtime", (FilterNode)null) 等价于: SELECT MAX(createtime) FROM {table} <br>
* 如 getNumberResultAsync(User.class, FilterFunc.MAX, "createtime", (FilterNode)null) 等价于: SELECT MAX(createtime) FROM {table} <br>
*
* @param entityClass Entity类
* @param func 聚合函数
@@ -802,7 +842,7 @@ public interface DataSource {
/**
* 获取符合过滤条件记录的聚合结果, 无结果返回默认值 <br>
* 等价SQL: SELECT FUNC{column} FROM {table} WHERE {filter node} <br>
* 如 getNumberResultAsync(Record.class, FilterFunc.MAX, "createtime", (FilterNode)null) 等价于: SELECT MAX(createtime) FROM {table} <br>
* 如 getNumberResultAsync(User.class, FilterFunc.MAX, "createtime", (FilterNode)null) 等价于: SELECT MAX(createtime) FROM {table} <br>
*
* @param entityClass Entity类
* @param func 聚合函数
@@ -817,7 +857,7 @@ public interface DataSource {
/**
* 获取符合过滤条件记录的聚合结果, 无结果返回默认值 <br>
* 等价SQL: SELECT FUNC{column} FROM {table} WHERE {filter node} <br>
* 如 getNumberResultAsync(Record.class, FilterFunc.MAX, "createtime", (FilterNode)null) 等价于: SELECT MAX(createtime) FROM {table} <br>
* 如 getNumberResultAsync(User.class, FilterFunc.MAX, "createtime", (FilterNode)null) 等价于: SELECT MAX(createtime) FROM {table} <br>
*
* @param entityClass Entity类
* @param func 聚合函数
@@ -832,7 +872,7 @@ public interface DataSource {
/**
* 获取符合过滤条件记录的聚合结果Map <br>
* 等价SQL: SELECT FUNC1{column1}, FUNC2{column2}, &#183;&#183;&#183; FROM {table} <br>
* 如 getNumberMapAsync(Record.class, new FilterFuncColumn(FilterFunc.MAX, "createtime")) 等价于: SELECT MAX(createtime) FROM {table} <br>
* 如 getNumberMapAsync(User.class, new FilterFuncColumn(FilterFunc.MAX, "createtime")) 等价于: SELECT MAX(createtime) FROM {table} <br>
*
* @param <N> Number
* @param entityClass Entity类
@@ -845,7 +885,7 @@ public interface DataSource {
/**
* 获取符合过滤条件记录的聚合结果Map <br>
* 等价SQL: SELECT FUNC1{column1}, FUNC2{column2}, &#183;&#183;&#183; FROM {table} <br>
* 如 getNumberMapAsync(Record.class, new FilterFuncColumn(FilterFunc.MAX, "createtime")) 等价于: SELECT MAX(createtime) FROM {table} <br>
* 如 getNumberMapAsync(User.class, new FilterFuncColumn(FilterFunc.MAX, "createtime")) 等价于: SELECT MAX(createtime) FROM {table} <br>
*
* @param <N> Number
* @param entityClass Entity类
@@ -858,7 +898,7 @@ public interface DataSource {
/**
* 获取符合过滤条件记录的聚合结果Map <br>
* 等价SQL: SELECT FUNC1{column1}, FUNC2{column2}, &#183;&#183;&#183; FROM {table} WHERE {filter bean} <br>
* 如 getNumberMapAsync(Record.class, (FilterBean)null, new FilterFuncColumn(FilterFunc.MAX, "createtime")) 等价于: SELECT MAX(createtime) FROM {table} <br>
* 如 getNumberMapAsync(User.class, (FilterBean)null, new FilterFuncColumn(FilterFunc.MAX, "createtime")) 等价于: SELECT MAX(createtime) FROM {table} <br>
*
* @param <N> Number
* @param entityClass Entity类
@@ -872,7 +912,7 @@ public interface DataSource {
/**
* 获取符合过滤条件记录的聚合结果Map <br>
* 等价SQL: SELECT FUNC1{column1}, FUNC2{column2}, &#183;&#183;&#183; FROM {table} WHERE {filter bean} <br>
* 如 getNumberMapAsync(Record.class, (FilterBean)null, new FilterFuncColumn(FilterFunc.MAX, "createtime")) 等价于: SELECT MAX(createtime) FROM {table} <br>
* 如 getNumberMapAsync(User.class, (FilterBean)null, new FilterFuncColumn(FilterFunc.MAX, "createtime")) 等价于: SELECT MAX(createtime) FROM {table} <br>
*
* @param <N> Number
* @param entityClass Entity类
@@ -886,7 +926,7 @@ public interface DataSource {
/**
* 获取符合过滤条件记录的聚合结果Map <br>
* 等价SQL: SELECT FUNC1{column1}, FUNC2{column2}, &#183;&#183;&#183; FROM {table} WHERE {filter node} <br>
* 如 getNumberMapAsync(Record.class, (FilterNode)null, new FilterFuncColumn(FilterFunc.MAX, "createtime")) 等价于: SELECT MAX(createtime) FROM {table} <br>
* 如 getNumberMapAsync(User.class, (FilterNode)null, new FilterFuncColumn(FilterFunc.MAX, "createtime")) 等价于: SELECT MAX(createtime) FROM {table} <br>
*
* @param <N> Number
* @param entityClass Entity类
@@ -900,7 +940,7 @@ public interface DataSource {
/**
* 获取符合过滤条件记录的聚合结果Map <br>
* 等价SQL: SELECT FUNC1{column1}, FUNC2{column2}, &#183;&#183;&#183; FROM {table} WHERE {filter node} <br>
* 如 getNumberMapAsync(Record.class, (FilterNode)null, new FilterFuncColumn(FilterFunc.MAX, "createtime")) 等价于: SELECT MAX(createtime) FROM {table} <br>
* 如 getNumberMapAsync(User.class, (FilterNode)null, new FilterFuncColumn(FilterFunc.MAX, "createtime")) 等价于: SELECT MAX(createtime) FROM {table} <br>
*
* @param <N> Number
* @param entityClass Entity类
@@ -914,7 +954,7 @@ public interface DataSource {
/**
* 查询符合过滤条件记录的GROUP BY聚合结果Map <br>
* 等价SQL: SELECT keyColumn, FUNC{funcColumn} FROM {table} GROUP BY {keyColumn} <br>
* 如 queryColumnMapAsync(Record.class, "name", FilterFunc.MAX, "createtime") 等价于: SELECT name, MAX(createtime) FROM record GROUP BY name<br>
* 如 queryColumnMapAsync(User.class, "name", FilterFunc.MAX, "createtime") 等价于: SELECT name, MAX(createtime) FROM user GROUP BY name<br>
*
* @param <T> Entity泛型
* @param <K> Key字段的数据类型
@@ -931,7 +971,7 @@ public interface DataSource {
/**
* 查询符合过滤条件记录的GROUP BY聚合结果Map <br>
* 等价SQL: SELECT keyColumn, FUNC{funcColumn} FROM {table} GROUP BY {keyColumn} <br>
* 如 queryColumnMapAsync(Record.class, "name", FilterFunc.MAX, "createtime") 等价于: SELECT name, MAX(createtime) FROM record GROUP BY name<br>
* 如 queryColumnMapAsync(User.class, "name", FilterFunc.MAX, "createtime") 等价于: SELECT name, MAX(createtime) FROM user GROUP BY name<br>
*
* @param <T> Entity泛型
* @param <K> Key字段的数据类型
@@ -948,7 +988,7 @@ public interface DataSource {
/**
* 查询符合过滤条件记录的GROUP BY聚合结果Map <br>
* 等价SQL: SELECT keyColumn, FUNC{funcColumn} FROM {table} WHERE {filter bean} GROUP BY {keyColumn} <br>
* 如 queryColumnMapAsync(Record.class, "name", FilterFunc.MAX, "createtime", (FilterBean)null) 等价于: SELECT name, MAX(createtime) FROM record GROUP BY name<br>
* 如 queryColumnMapAsync(User.class, "name", FilterFunc.MAX, "createtime", (FilterBean)null) 等价于: SELECT name, MAX(createtime) FROM user GROUP BY name<br>
*
* @param <T> Entity泛型
* @param <K> Key字段的数据类型
@@ -966,7 +1006,7 @@ public interface DataSource {
/**
* 查询符合过滤条件记录的GROUP BY聚合结果Map <br>
* 等价SQL: SELECT keyColumn, FUNC{funcColumn} FROM {table} WHERE {filter bean} GROUP BY {keyColumn} <br>
* 如 queryColumnMapAsync(Record.class, "name", FilterFunc.MAX, "createtime", (FilterBean)null) 等价于: SELECT name, MAX(createtime) FROM record GROUP BY name<br>
* 如 queryColumnMapAsync(User.class, "name", FilterFunc.MAX, "createtime", (FilterBean)null) 等价于: SELECT name, MAX(createtime) FROM user GROUP BY name<br>
*
* @param <T> Entity泛型
* @param <K> Key字段的数据类型
@@ -984,7 +1024,7 @@ public interface DataSource {
/**
* 查询符合过滤条件记录的GROUP BY聚合结果Map <br>
* 等价SQL: SELECT keyColumn, FUNC{funcColumn} FROM {table} WHERE {filter node} GROUP BY {keyColumn} <br>
* 如 queryColumnMapAsync(Record.class, "name", FilterFunc.MAX, "createtime", (FilterNode)null) 等价于: SELECT name, MAX(createtime) FROM record GROUP BY name<br>
* 如 queryColumnMapAsync(User.class, "name", FilterFunc.MAX, "createtime", (FilterNode)null) 等价于: SELECT name, MAX(createtime) FROM user GROUP BY name<br>
*
* @param <T> Entity泛型
* @param <K> Key字段的数据类型
@@ -1002,7 +1042,7 @@ public interface DataSource {
/**
* 查询符合过滤条件记录的GROUP BY聚合结果Map <br>
* 等价SQL: SELECT keyColumn, FUNC{funcColumn} FROM {table} WHERE {filter node} GROUP BY {keyColumn} <br>
* 如 queryColumnMapAsync(Record.class, "name", FilterFunc.MAX, "createtime", (FilterNode)null) 等价于: SELECT name, MAX(createtime) FROM record GROUP BY name<br>
* 如 queryColumnMapAsync(User.class, "name", FilterFunc.MAX, "createtime", (FilterNode)null) 等价于: SELECT name, MAX(createtime) FROM user GROUP BY name<br>
*
* @param <T> Entity泛型
* @param <K> Key字段的数据类型
@@ -1017,6 +1057,218 @@ public interface DataSource {
*/
public <T, K extends Serializable, N extends Number> CompletableFuture<Map<K, N>> queryColumnMapAsync(final Class<T> entityClass, final String keyColumn, final FilterFunc func, final String funcColumn, final FilterNode node);
/**
* 查询符合过滤条件记录的GROUP BY聚合结果Map <br>
* 等价SQL: SELECT col1, FUNC{funcColumn1}, FUNC{funcColumn2} FROM {table} WHERE GROUP BY {col1} <br>
* 如 queryColumnMapAsync(OrderRecord.class, Utility.ofArray(ColumnNodeValue.div(ColumnFuncNode.sum("money"), 100), ColumnFuncNode.avg(ColumnNodeValue.dec("money", 20)))), "targetid")
* 等价于: SELECT targetid, SUM(money) / 100, AVG(money - 20) FROM orderrecord GROUP BY targetid<br>
*
* @param <T> Entity泛型
* @param <K> Key字段的数据类型
* @param <N> Number
* @param entityClass Entity类
* @param funcNodes ColumnNode[]
* @param groupByColumn GROUP BY字段
*
* @return 聚合结果Map CompletableFuture
*/
public <T, K extends Serializable, N extends Number> Map<K, N[]> queryColumnMap(final Class<T> entityClass, final ColumnNode[] funcNodes, final String groupByColumn);
/**
* 查询符合过滤条件记录的GROUP BY聚合结果Map <br>
* 等价SQL: SELECT col1, FUNC{funcColumn1}, FUNC{funcColumn2} FROM {table} GROUP BY {col1} <br>
* 如 queryColumnMapAsync(OrderRecord.class, Utility.ofArray(ColumnNodeValue.div(ColumnFuncNode.sum("money"), 100), ColumnFuncNode.avg(ColumnNodeValue.dec("money", 20)))), "targetid")
* 等价于: SELECT targetid, SUM(money) / 100, AVG(money - 20) FROM orderrecord GROUP BY targetid<br>
*
* @param <T> Entity泛型
* @param <K> Key字段的数据类型
* @param <N> Number
* @param entityClass Entity类
* @param funcNodes ColumnNode[]
* @param groupByColumn GROUP BY字段
*
* @return 聚合结果Map CompletableFuture
*/
public <T, K extends Serializable, N extends Number> CompletableFuture<Map<K, N[]>> queryColumnMapAsync(final Class<T> entityClass, final ColumnNode[] funcNodes, final String groupByColumn);
/**
* 查询符合过滤条件记录的GROUP BY聚合结果Map <br>
* 等价SQL: SELECT col1, FUNC{funcColumn1}, FUNC{funcColumn2} FROM {table} WHERE {filter bean} GROUP BY {col1} <br>
* 如 queryColumnMapAsync(OrderRecord.class, Utility.ofArray(ColumnNodeValue.div(ColumnFuncNode.sum("money"), 100), ColumnFuncNode.avg(ColumnNodeValue.dec("money", 20)))), "targetid", (FilterBean)null)
* 等价于: SELECT targetid, SUM(money) / 100, AVG(money - 20) FROM orderrecord GROUP BY targetid<br>
*
* @param <T> Entity泛型
* @param <K> Key字段的数据类型
* @param <N> Number
* @param entityClass Entity类
* @param funcNodes ColumnNode[]
* @param groupByColumn GROUP BY字段
* @param bean 过滤条件
*
* @return 聚合结果Map CompletableFuture
*/
public <T, K extends Serializable, N extends Number> Map<K, N[]> queryColumnMap(final Class<T> entityClass, final ColumnNode[] funcNodes, final String groupByColumn, final FilterBean bean);
/**
* 查询符合过滤条件记录的GROUP BY聚合结果Map <br>
* 等价SQL: SELECT col1, FUNC{funcColumn1}, FUNC{funcColumn2} FROM {table} WHERE {filter bean} GROUP BY {col1} <br>
* 如 queryColumnMapAsync(OrderRecord.class, Utility.ofArray(ColumnNodeValue.div(ColumnFuncNode.sum("money"), 100), ColumnFuncNode.avg(ColumnNodeValue.dec("money", 20)))), "targetid", (FilterBean)null)
* 等价于: SELECT targetid, SUM(money) / 100, AVG(money - 20) FROM orderrecord GROUP BY targetid<br>
*
* @param <T> Entity泛型
* @param <K> Key字段的数据类型
* @param <N> Number
* @param entityClass Entity类
* @param funcNodes ColumnNode[]
* @param groupByColumn GROUP BY字段
* @param bean 过滤条件
*
* @return 聚合结果Map CompletableFuture
*/
public <T, K extends Serializable, N extends Number> CompletableFuture<Map<K, N[]>> queryColumnMapAsync(final Class<T> entityClass, final ColumnNode[] funcNodes, final String groupByColumn, final FilterBean bean);
/**
* 查询符合过滤条件记录的GROUP BY聚合结果Map <br>
* 等价SQL: SELECT col1, FUNC{funcColumn1}, FUNC{funcColumn2} FROM {table} WHERE {filter node} GROUP BY {col1} <br>
* 如 queryColumnMapAsync(OrderRecord.class, Utility.ofArray(ColumnNodeValue.div(ColumnFuncNode.sum("money"), 100), ColumnFuncNode.avg(ColumnNodeValue.dec("money", 20)))), "targetid", (FilterNode)null)
* 等价于: SELECT targetid, SUM(money) / 100, AVG(money - 20) FROM orderrecord GROUP BY targetid<br>
*
* @param <T> Entity泛型
* @param <K> Key字段的数据类型
* @param <N> Number
* @param entityClass Entity类
* @param funcNodes ColumnNode[]
* @param groupByColumn GROUP BY字段
* @param node 过滤条件
*
* @return 聚合结果Map CompletableFuture
*/
public <T, K extends Serializable, N extends Number> Map<K, N[]> queryColumnMap(final Class<T> entityClass, final ColumnNode[] funcNodes, final String groupByColumn, final FilterNode node);
/**
* 查询符合过滤条件记录的GROUP BY聚合结果Map <br>
* 等价SQL: SELECT col1, FUNC{funcColumn1}, FUNC{funcColumn2} FROM {table} WHERE {filter node} GROUP BY {col1} <br>
* 如 queryColumnMapAsync(OrderRecord.class, Utility.ofArray(ColumnNodeValue.div(ColumnFuncNode.sum("money"), 100), ColumnFuncNode.avg(ColumnNodeValue.dec("money", 20)))), "targetid", (FilterNode)null)
* 等价于: SELECT targetid, SUM(money) / 100, AVG(money - 20) FROM orderrecord GROUP BY targetid<br>
*
* @param <T> Entity泛型
* @param <K> Key字段的数据类型
* @param <N> Number
* @param entityClass Entity类
* @param funcNodes ColumnNode[]
* @param groupByColumn GROUP BY字段
* @param node 过滤条件
*
* @return 聚合结果Map CompletableFuture
*/
public <T, K extends Serializable, N extends Number> CompletableFuture<Map<K, N[]>> queryColumnMapAsync(final Class<T> entityClass, final ColumnNode[] funcNodes, final String groupByColumn, final FilterNode node);
/**
* 查询符合过滤条件记录的GROUP BY聚合结果Map <br>
* 等价SQL: SELECT col1, col2, FUNC{funcColumn1}, FUNC{funcColumn2} FROM {table} GROUP BY {col1}, {col2} <br>
* 如 queryColumnMapAsync(OrderRecord.class, Utility.ofArray(ColumnNodeValue.div(ColumnFuncNode.sum("money"), 100), ColumnFuncNode.avg(ColumnNodeValue.dec("money", 20)))), Utility.ofArray("fromid", "targetid"))
* 等价于: SELECT fromid, targetid, SUM(money) / 100, AVG(money - 20) FROM orderrecord GROUP BY fromid, targetid<br>
*
* @param <T> Entity泛型
* @param <K> Key字段的数据类型
* @param <N> Number
* @param entityClass Entity类
* @param funcNodes ColumnNode[]
* @param groupByColumns GROUP BY字段
*
* @return 聚合结果Map CompletableFuture
*/
public <T, K extends Serializable, N extends Number> Map<K[], N[]> queryColumnMap(final Class<T> entityClass, final ColumnNode[] funcNodes, final String[] groupByColumns);
/**
* 查询符合过滤条件记录的GROUP BY聚合结果Map <br>
* 等价SQL: SELECT col1, col2, FUNC{funcColumn1}, FUNC{funcColumn2} FROM {table} GROUP BY {col1}, {col2} <br>
* 如 queryColumnMapAsync(OrderRecord.class, Utility.ofArray(ColumnNodeValue.div(ColumnFuncNode.sum("money"), 100), ColumnFuncNode.avg(ColumnNodeValue.dec("money", 20)))), Utility.ofArray("fromid", "targetid"))
* 等价于: SELECT fromid, targetid, SUM(money) / 100, AVG(money - 20) FROM orderrecord GROUP BY fromid, targetid<br>
*
* @param <T> Entity泛型
* @param <K> Key字段的数据类型
* @param <N> Number
* @param entityClass Entity类
* @param funcNodes ColumnNode[]
* @param groupByColumns GROUP BY字段
*
* @return 聚合结果Map CompletableFuture
*/
public <T, K extends Serializable, N extends Number> CompletableFuture<Map<K[], N[]>> queryColumnMapAsync(final Class<T> entityClass, final ColumnNode[] funcNodes, final String[] groupByColumns);
/**
* 查询符合过滤条件记录的GROUP BY聚合结果Map <br>
* 等价SQL: SELECT col1, col2, FUNC{funcColumn1}, FUNC{funcColumn2} FROM {table} WHERE {filter bean} GROUP BY {col1}, {col2} <br>
* 如 queryColumnMapAsync(OrderRecord.class, Utility.ofArray(ColumnNodeValue.div(ColumnFuncNode.sum("money"), 100), ColumnFuncNode.avg(ColumnNodeValue.dec("money", 20)))), Utility.ofArray("fromid", "targetid"), (FilterBean)null)
* 等价于: SELECT fromid, targetid, SUM(money) / 100, AVG(money - 20) FROM orderrecord GROUP BY fromid, targetid<br>
*
* @param <T> Entity泛型
* @param <K> Key字段的数据类型
* @param <N> Number
* @param entityClass Entity类
* @param funcNodes ColumnNode[]
* @param groupByColumns GROUP BY字段
* @param bean 过滤条件
*
* @return 聚合结果Map CompletableFuture
*/
public <T, K extends Serializable, N extends Number> Map<K[], N[]> queryColumnMap(final Class<T> entityClass, final ColumnNode[] funcNodes, final String[] groupByColumns, final FilterBean bean);
/**
* 查询符合过滤条件记录的GROUP BY聚合结果Map <br>
* 等价SQL: SELECT col1, col2, FUNC{funcColumn1}, FUNC{funcColumn2} FROM {table} WHERE {filter bean} GROUP BY {col1}, {col2} <br>
* 如 queryColumnMapAsync(OrderRecord.class, Utility.ofArray(ColumnNodeValue.div(ColumnFuncNode.sum("money"), 100), ColumnFuncNode.avg(ColumnNodeValue.dec("money", 20)))), Utility.ofArray("fromid", "targetid"), (FilterBean)null)
* 等价于: SELECT fromid, targetid, SUM(money) / 100, AVG(money - 20) FROM orderrecord GROUP BY fromid, targetid<br>
*
* @param <T> Entity泛型
* @param <K> Key字段的数据类型
* @param <N> Number
* @param entityClass Entity类
* @param funcNodes ColumnNode[]
* @param groupByColumns GROUP BY字段
* @param bean 过滤条件
*
* @return 聚合结果Map CompletableFuture
*/
public <T, K extends Serializable, N extends Number> CompletableFuture<Map<K[], N[]>> queryColumnMapAsync(final Class<T> entityClass, final ColumnNode[] funcNodes, final String[] groupByColumns, final FilterBean bean);
/**
* 查询符合过滤条件记录的GROUP BY聚合结果Map <br>
* 等价SQL: SELECT col1, col2, FUNC{funcColumn1}, FUNC{funcColumn2} FROM {table} WHERE {filter node} GROUP BY {col1}, {col2} <br>
* 如 queryColumnMapAsync(OrderRecord.class, Utility.ofArray(ColumnNodeValue.div(ColumnFuncNode.sum("money"), 100), ColumnFuncNode.avg(ColumnNodeValue.dec("money", 20)))), Utility.ofArray("fromid", "targetid"), (FilterNode)null)
* 等价于: SELECT fromid, targetid, SUM(money) / 100, AVG(money - 20) FROM orderrecord GROUP BY fromid, targetid<br>
*
* @param <T> Entity泛型
* @param <K> Key字段的数据类型
* @param <N> Number
* @param entityClass Entity类
* @param funcNodes ColumnNode[]
* @param groupByColumns GROUP BY字段
* @param node 过滤条件
*
* @return 聚合结果Map
*/
public <T, K extends Serializable, N extends Number> Map<K[], N[]> queryColumnMap(final Class<T> entityClass, final ColumnNode[] funcNodes, final String[] groupByColumns, final FilterNode node);
/**
* 查询符合过滤条件记录的GROUP BY聚合结果Map <br>
* 等价SQL: SELECT col1, col2, FUNC{funcColumn1}, FUNC{funcColumn2} FROM {table} WHERE {filter node} GROUP BY {col1}, {col2} <br>
* 如 queryColumnMapAsync(OrderRecord.class, Utility.ofArray(ColumnNodeValue.div(ColumnFuncNode.sum("money"), 100), ColumnFuncNode.avg(ColumnNodeValue.dec("money", 20)))), Utility.ofArray("fromid", "targetid"), (FilterNode)null)
* 等价于: SELECT fromid, targetid, SUM(money) / 100, AVG(money - 20) FROM orderrecord GROUP BY fromid, targetid<br>
*
* @param <T> Entity泛型
* @param <K> Key字段的数据类型
* @param <N> Number
* @param entityClass Entity类
* @param funcNodes ColumnNode[]
* @param groupByColumns GROUP BY字段
* @param node 过滤条件
*
* @return 聚合结果Map CompletableFuture
*/
public <T, K extends Serializable, N extends Number> CompletableFuture<Map<K[], N[]>> queryColumnMapAsync(final Class<T> entityClass, final ColumnNode[] funcNodes, final String[] groupByColumns, final FilterNode node);
//-----------------------findAsync----------------------------
/**
* 获取指定主键值的单个记录, 返回null表示不存在值 <br>
@@ -1431,7 +1683,7 @@ public interface DataSource {
//-----------------------list set----------------------------
/**
* 查询符合过滤条件记录的某个字段Set集合 <br>
* 等价SQL: SELECT {selectedColumn} FROM {table} WHERE {column} = {key} <br>
* 等价SQL: SELECT DISTINCT {selectedColumn} FROM {table} WHERE {column} = {key} <br>
*
* @param <T> Entity泛型
* @param <V> 字段类型
@@ -1442,11 +1694,11 @@ public interface DataSource {
*
* @return 字段值的集合
*/
public <T, V extends Serializable> HashSet<V> queryColumnSet(final String selectedColumn, final Class<T> clazz, final String column, final Serializable colval);
public <T, V extends Serializable> Set<V> queryColumnSet(final String selectedColumn, final Class<T> clazz, final String column, final Serializable colval);
/**
* 查询符合过滤条件记录的某个字段Set集合 <br>
* 等价SQL: SELECT {selectedColumn} FROM {table} WHERE {column} = {key} <br>
* 等价SQL: SELECT DISTINCT {selectedColumn} FROM {table} WHERE {column} = {key} <br>
*
* @param <T> Entity泛型
* @param <V> 字段类型
@@ -1457,11 +1709,11 @@ public interface DataSource {
*
* @return 字段值的集合CompletableFuture
*/
public <T, V extends Serializable> CompletableFuture<HashSet<V>> queryColumnSetAsync(final String selectedColumn, final Class<T> clazz, final String column, final Serializable colval);
public <T, V extends Serializable> CompletableFuture<Set<V>> queryColumnSetAsync(final String selectedColumn, final Class<T> clazz, final String column, final Serializable colval);
/**
* 查询符合过滤条件记录的某个字段Set集合 <br>
* 等价SQL: SELECT {selectedColumn} FROM {table} WHERE {filter bean} <br>
* 等价SQL: SELECT DISTINCT {selectedColumn} FROM {table} WHERE {filter bean} <br>
*
* @param <T> Entity泛型
* @param <V> 字段类型
@@ -1471,11 +1723,11 @@ public interface DataSource {
*
* @return 字段值的集合
*/
public <T, V extends Serializable> HashSet<V> queryColumnSet(final String selectedColumn, final Class<T> clazz, final FilterBean bean);
public <T, V extends Serializable> Set<V> queryColumnSet(final String selectedColumn, final Class<T> clazz, final FilterBean bean);
/**
* 查询符合过滤条件记录的某个字段Set集合 <br>
* 等价SQL: SELECT {selectedColumn} FROM {table} WHERE {filter bean} <br>
* 等价SQL: SELECT DISTINCT {selectedColumn} FROM {table} WHERE {filter bean} <br>
*
* @param <T> Entity泛型
* @param <V> 字段类型
@@ -1485,11 +1737,11 @@ public interface DataSource {
*
* @return 字段值的集合CompletableFuture
*/
public <T, V extends Serializable> CompletableFuture<HashSet<V>> queryColumnSetAsync(final String selectedColumn, final Class<T> clazz, final FilterBean bean);
public <T, V extends Serializable> CompletableFuture<Set<V>> queryColumnSetAsync(final String selectedColumn, final Class<T> clazz, final FilterBean bean);
/**
* 查询符合过滤条件记录的某个字段Set集合 <br>
* 等价SQL: SELECT {selectedColumn} FROM {table} WHERE {filter node} <br>
* 等价SQL: SELECT DISTINCT {selectedColumn} FROM {table} WHERE {filter node} <br>
*
* @param <T> Entity泛型
* @param <V> 字段类型
@@ -1499,11 +1751,11 @@ public interface DataSource {
*
* @return 字段值的集合
*/
public <T, V extends Serializable> HashSet<V> queryColumnSet(final String selectedColumn, final Class<T> clazz, final FilterNode node);
public <T, V extends Serializable> Set<V> queryColumnSet(final String selectedColumn, final Class<T> clazz, final FilterNode node);
/**
* 查询符合过滤条件记录的某个字段Set集合 <br>
* 等价SQL: SELECT {selectedColumn} FROM {table} WHERE {filter node} <br>
* 等价SQL: SELECT DISTINCT {selectedColumn} FROM {table} WHERE {filter node} <br>
*
* @param <T> Entity泛型
* @param <V> 字段类型
@@ -1513,7 +1765,67 @@ public interface DataSource {
*
* @return 字段值的集合CompletableFuture
*/
public <T, V extends Serializable> CompletableFuture<HashSet<V>> queryColumnSetAsync(final String selectedColumn, final Class<T> clazz, final FilterNode node);
public <T, V extends Serializable> CompletableFuture<Set<V>> queryColumnSetAsync(final String selectedColumn, final Class<T> clazz, final FilterNode node);
/**
* 查询符合过滤条件记录的某个字段Set集合 <br>
* 等价SQL: SELECT DISTINCT {selectedColumn} FROM {table} WHERE {filter bean} ORDER BY {flipper.sort} LIMIT {flipper.limit} <br>
*
* @param <T> Entity泛型
* @param <V> 字段类型
* @param selectedColumn 指定字段
* @param clazz Entity类
* @param flipper 翻页对象
* @param bean 过滤条件
*
* @return 字段值的集合
*/
public <T, V extends Serializable> Set<V> queryColumnSet(final String selectedColumn, final Class<T> clazz, final Flipper flipper, final FilterBean bean);
/**
* 查询符合过滤条件记录的某个字段Set集合 <br>
* 等价SQL: SELECT DISTINCT {selectedColumn} FROM {table} WHERE {filter bean} ORDER BY {flipper.sort} LIMIT {flipper.limit} <br>
*
* @param <T> Entity泛型
* @param <V> 字段类型
* @param selectedColumn 指定字段
* @param clazz Entity类
* @param flipper 翻页对象
* @param bean 过滤条件
*
* @return 字段值的集合CompletableFuture
*/
public <T, V extends Serializable> CompletableFuture<Set<V>> queryColumnSetAsync(final String selectedColumn, final Class<T> clazz, final Flipper flipper, final FilterBean bean);
/**
* 查询符合过滤条件记录的某个字段Set集合 <br>
* 等价SQL: SELECT DISTINCT {selectedColumn} FROM {table} WHERE {filter node} ORDER BY {flipper.sort} LIMIT {flipper.limit} <br>
*
* @param <T> Entity泛型
* @param <V> 字段类型
* @param selectedColumn 指定字段
* @param clazz Entity类
* @param flipper 翻页对象
* @param node 过滤条件
*
* @return 字段值的集合
*/
public <T, V extends Serializable> Set<V> queryColumnSet(final String selectedColumn, final Class<T> clazz, final Flipper flipper, final FilterNode node);
/**
* 查询符合过滤条件记录的某个字段Set集合 <br>
* 等价SQL: SELECT DISTINCT {selectedColumn} FROM {table} WHERE {filter node} ORDER BY {flipper.sort} LIMIT {flipper.limit} <br>
*
* @param <T> Entity泛型
* @param <V> 字段类型
* @param selectedColumn 指定字段
* @param clazz Entity类
* @param flipper 翻页对象
* @param node 过滤条件
*
* @return 字段值的集合CompletableFuture
*/
public <T, V extends Serializable> CompletableFuture<Set<V>> queryColumnSetAsync(final String selectedColumn, final Class<T> clazz, final Flipper flipper, final FilterNode node);
/**
* 查询符合过滤条件记录的某个字段List集合 <br>
@@ -1883,6 +2195,296 @@ public interface DataSource {
*/
public <K extends Serializable, T> CompletableFuture<Map<K, T>> queryMapAsync(final Class<T> clazz, final SelectColumn selects, final FilterNode node);
/**
* 查询符合过滤条件记录的Set集合 <br>
* 等价SQL: SELECT DISTINCT * FROM {table} WHERE {column} = {key} ORDER BY {flipper.sort} LIMIT {flipper.limit} <br>
*
* @param <T> Entity泛型
* @param clazz Entity类
* @param column 过滤字段名
* @param colval 过滤字段值
*
* @return Entity的集合
*/
public <T> Set<T> querySet(final Class<T> clazz, final String column, final Serializable colval);
/**
* 查询符合过滤条件记录的Set集合 <br>
* 等价SQL: SELECT DISTINCT * FROM {table} WHERE {column} = {key} ORDER BY {flipper.sort} LIMIT {flipper.limit} <br>
*
* @param <T> Entity泛型
* @param clazz Entity类
* @param column 过滤字段名
* @param colval 过滤字段值
*
* @return Entity的集合CompletableFuture
*/
public <T> CompletableFuture<Set<T>> querySetAsync(final Class<T> clazz, final String column, final Serializable colval);
/**
* 查询符合过滤条件记录的Set集合 <br>
* 等价SQL: SELECT DISTINCT * FROM {table} WHERE {filter bean} <br>
*
* @param <T> Entity泛型
* @param clazz Entity类
* @param bean 过滤条件
*
* @return Entity的集合
*/
public <T> Set<T> querySet(final Class<T> clazz, final FilterBean bean);
/**
* 查询符合过滤条件记录的Set集合 <br>
* 等价SQL: SELECT DISTINCT * FROM {table} WHERE {filter bean} <br>
*
* @param <T> Entity泛型
* @param clazz Entity类
* @param bean 过滤条件
*
* @return Entity的集合CompletableFuture
*/
public <T> CompletableFuture<Set<T>> querySetAsync(final Class<T> clazz, final FilterBean bean);
/**
* 查询记录的Set集合 <br>
* 等价SQL: SELECT DISTINCT * FROM {table} <br>
*
* @param <T> Entity泛型
* @param clazz Entity类
*
* @return Entity的集合
*/
default <T> Set<T> querySet(final Class<T> clazz) {
return querySet(clazz, (FilterNode) null);
}
/**
* 查询符合过滤条件记录的Set集合 <br>
* 等价SQL: SELECT DISTINCT * FROM {table} WHERE {filter node} <br>
*
* @param <T> Entity泛型
* @param clazz Entity类
* @param node 过滤条件
*
* @return Entity的集合
*/
public <T> Set<T> querySet(final Class<T> clazz, final FilterNode node);
/**
* 查询记录的Set集合 <br>
* 等价SQL: SELECT DISTINCT * FROM {table} <br>
*
* @param <T> Entity泛型
* @param clazz Entity类
*
* @return Entity的集合CompletableFuture
*/
default <T> CompletableFuture<Set<T>> querySetAsync(final Class<T> clazz) {
return querySetAsync(clazz, (FilterNode) null);
}
/**
* 查询符合过滤条件记录的Set集合 <br>
* 等价SQL: SELECT DISTINCT * FROM {table} WHERE {filter node} <br>
*
* @param <T> Entity泛型
* @param clazz Entity类
* @param node 过滤条件
*
* @return Entity的集合CompletableFuture
*/
public <T> CompletableFuture<Set<T>> querySetAsync(final Class<T> clazz, final FilterNode node);
/**
* 查询符合过滤条件记录的Set集合 <br>
* 等价SQL: SELECT DISTINCT {column1},{column2}, &#183;&#183;&#183; FROM {table} WHERE {filter bean} <br>
*
* @param <T> Entity泛型
* @param clazz Entity类
* @param selects 指定字段
* @param bean 过滤条件
*
* @return Entity的集合
*/
public <T> Set<T> querySet(final Class<T> clazz, final SelectColumn selects, final FilterBean bean);
/**
* 查询符合过滤条件记录的Set集合 <br>
* 等价SQL: SELECT DISTINCT {column1},{column2}, &#183;&#183;&#183; FROM {table} WHERE {filter bean} <br>
*
* @param <T> Entity泛型
* @param clazz Entity类
* @param selects 指定字段
* @param bean 过滤条件
*
* @return Entity的集合CompletableFuture
*/
public <T> CompletableFuture<Set<T>> querySetAsync(final Class<T> clazz, final SelectColumn selects, final FilterBean bean);
/**
* 查询符合过滤条件记录的Set集合 <br>
* 等价SQL: SELECT DISTINCT {column1},{column2}, &#183;&#183;&#183; FROM {table} WHERE {filter node} <br>
*
* @param <T> Entity泛型
* @param clazz Entity类
* @param selects 指定字段
* @param node 过滤条件
*
* @return Entity的集合
*/
public <T> Set<T> querySet(final Class<T> clazz, final SelectColumn selects, final FilterNode node);
/**
* 查询符合过滤条件记录的Set集合 <br>
* 等价SQL: SELECT DISTINCT {column1},{column2}, &#183;&#183;&#183; FROM {table} WHERE {filter node} <br>
*
* @param <T> Entity泛型
* @param clazz Entity类
* @param selects 指定字段
* @param node 过滤条件
*
* @return Entity的集合CompletableFuture
*/
public <T> CompletableFuture<Set<T>> querySetAsync(final Class<T> clazz, final SelectColumn selects, final FilterNode node);
/**
* 查询符合过滤条件记录的Set集合 <br>
* 等价SQL: SELECT DISTINCT * FROM {table} WHERE {column} = {key} ORDER BY {flipper.sort} LIMIT {flipper.limit} <br>
*
* @param <T> Entity泛型
* @param clazz Entity类
* @param flipper 翻页对象
* @param column 过滤字段名
* @param colval 过滤字段值
*
* @return Entity的集合
*/
public <T> Set<T> querySet(final Class<T> clazz, final Flipper flipper, final String column, final Serializable colval);
/**
* 查询符合过滤条件记录的Set集合 <br>
* 等价SQL: SELECT DISTINCT * FROM {table} WHERE {column} = {key} ORDER BY {flipper.sort} LIMIT {flipper.limit} <br>
*
* @param <T> Entity泛型
* @param clazz Entity类
* @param flipper 翻页对象
* @param column 过滤字段名
* @param colval 过滤字段值
*
* @return Entity的集合CompletableFuture
*/
public <T> CompletableFuture<Set<T>> querySetAsync(final Class<T> clazz, final Flipper flipper, final String column, final Serializable colval);
/**
* 查询符合过滤条件记录的Set集合 <br>
* 等价SQL: SELECT DISTINCT * FROM {table} WHERE {filter bean} ORDER BY {flipper.sort} LIMIT {flipper.limit} <br>
*
* @param <T> Entity泛型
* @param clazz Entity类
* @param flipper 翻页对象
* @param bean 过滤条件
*
* @return Entity的集合
*/
public <T> Set<T> querySet(final Class<T> clazz, final Flipper flipper, final FilterBean bean);
/**
* 查询符合过滤条件记录的Set集合 <br>
* 等价SQL: SELECT DISTINCT * FROM {table} WHERE {filter bean} ORDER BY {flipper.sort} LIMIT {flipper.limit} <br>
*
* @param <T> Entity泛型
* @param clazz Entity类
* @param flipper 翻页对象
* @param bean 过滤条件
*
* @return Entity的集合CompletableFuture
*/
public <T> CompletableFuture<Set<T>> querySetAsync(final Class<T> clazz, final Flipper flipper, final FilterBean bean);
/**
* 查询符合过滤条件记录的Set集合 <br>
* 等价SQL: SELECT DISTINCT * FROM {table} WHERE {filter node} ORDER BY {flipper.sort} LIMIT {flipper.limit} <br>
*
* @param <T> Entity泛型
* @param clazz Entity类
* @param flipper 翻页对象
* @param node 过滤条件
*
* @return Entity的集合
*
*/
public <T> Set<T> querySet(final Class<T> clazz, final Flipper flipper, final FilterNode node);
/**
* 查询符合过滤条件记录的Set集合 <br>
* 等价SQL: SELECT DISTINCT * FROM {table} WHERE {filter node} ORDER BY {flipper.sort} LIMIT {flipper.limit} <br>
*
* @param <T> Entity泛型
* @param clazz Entity类
* @param flipper 翻页对象
* @param node 过滤条件
*
* @return Entity的集合
*
*/
public <T> CompletableFuture<Set<T>> querySetAsync(final Class<T> clazz, final Flipper flipper, final FilterNode node);
/**
* 查询符合过滤条件记录的Set集合 <br>
* 等价SQL: SELECT DISTINCT {column1},{column2}, &#183;&#183;&#183; FROM {table} WHERE {filter bean} ORDER BY {flipper.sort} LIMIT {flipper.limit} <br>
*
* @param <T> Entity泛型
* @param clazz Entity类
* @param selects 指定字段
* @param flipper 翻页对象
* @param bean 过滤条件
*
* @return Entity的集合
*/
public <T> Set<T> querySet(final Class<T> clazz, final SelectColumn selects, final Flipper flipper, final FilterBean bean);
/**
* 查询符合过滤条件记录的Set集合 <br>
* 等价SQL: SELECT DISTINCT {column1},{column2}, &#183;&#183;&#183; FROM {table} WHERE {filter bean} ORDER BY {flipper.sort} LIMIT {flipper.limit} <br>
*
* @param <T> Entity泛型
* @param clazz Entity类
* @param selects 指定字段
* @param flipper 翻页对象
* @param bean 过滤条件
*
* @return Entity的集合CompletableFuture
*/
public <T> CompletableFuture<Set<T>> querySetAsync(final Class<T> clazz, final SelectColumn selects, final Flipper flipper, final FilterBean bean);
/**
* 查询符合过滤条件记录的Set集合 <br>
* 等价SQL: SELECT DISTINCT {column1},{column2}, &#183;&#183;&#183; FROM {table} WHERE {filter node} ORDER BY {flipper.sort} LIMIT {flipper.limit} <br>
*
* @param <T> Entity泛型
* @param clazz Entity类
* @param selects 指定字段
* @param flipper 翻页对象
* @param node 过滤条件
*
* @return Entity的集合
*/
public <T> Set<T> querySet(final Class<T> clazz, final SelectColumn selects, final Flipper flipper, final FilterNode node);
/**
* 查询符合过滤条件记录的Set集合 <br>
* 等价SQL: SELECT DISTINCT {column1},{column2}, &#183;&#183;&#183; FROM {table} WHERE {filter node} ORDER BY {flipper.sort} LIMIT {flipper.limit} <br>
*
* @param <T> Entity泛型
* @param clazz Entity类
* @param selects 指定字段
* @param flipper 翻页对象
* @param node 过滤条件
*
* @return Entity的集合CompletableFuture
*/
public <T> CompletableFuture<Set<T>> querySetAsync(final Class<T> clazz, final SelectColumn selects, final Flipper flipper, final FilterNode node);
/**
* 查询符合过滤条件记录的List集合 <br>
* 等价SQL: SELECT * FROM {table} WHERE {column} = {key} ORDER BY {flipper.sort} LIMIT {flipper.limit} <br>

View File

@@ -65,7 +65,7 @@ public abstract class DataSqlSource<DBChannel> extends AbstractService implement
if (t != null) logger.log(Level.SEVERE, "CompletableFuture complete error", (Throwable) t);
};
protected final BiFunction<DataSource, Class, List> fullloader = (s, t) -> ((Sheet) querySheetCompose(false, false, t, null, null, (FilterNode) null).join()).list(true);
protected final BiFunction<DataSource, Class, List> fullloader = (s, t) -> ((Sheet) querySheetCompose(false, false, false, t, null, null, (FilterNode) null).join()).list(true);
@SuppressWarnings({"OverridableMethodCallInConstructor", "LeakingThisInConstructor"})
public DataSqlSource(String unitName, URL persistxml, Properties readprop, Properties writeprop) {
@@ -91,7 +91,7 @@ public abstract class DataSqlSource<DBChannel> extends AbstractService implement
} else if (s.length() == 2) {
s = "0" + s;
}
t.setName("Redkale-"+cname + "-Thread-" + s);
t.setName("Redkale-" + cname + "-Thread-" + s);
t.setUncaughtExceptionHandler(ueh);
return t;
});
@@ -137,10 +137,10 @@ public abstract class DataSqlSource<DBChannel> extends AbstractService implement
protected abstract <T> CompletableFuture<Integer> deleteDB(final EntityInfo<T> info, Flipper flipper, final String sql);
//清空表
protected abstract <T> CompletableFuture<Integer> clearTableDB(final EntityInfo<T> info, final String sql);
protected abstract <T> CompletableFuture<Integer> clearTableDB(final EntityInfo<T> info, final String table, final String sql);
//删除表
protected abstract <T> CompletableFuture<Integer> dropTableDB(final EntityInfo<T> info, final String sql);
protected abstract <T> CompletableFuture<Integer> dropTableDB(final EntityInfo<T> info, final String table, final String sql);
//更新纪录
protected abstract <T> CompletableFuture<Integer> updateDB(final EntityInfo<T> info, T... entitys);
@@ -157,6 +157,9 @@ public abstract class DataSqlSource<DBChannel> extends AbstractService implement
//查询Map数据
protected abstract <T, K extends Serializable, N extends Number> CompletableFuture<Map<K, N>> queryColumnMapDB(final EntityInfo<T> info, final String sql, final String keyColumn);
//查询Map数据
protected abstract <T, K extends Serializable, N extends Number> CompletableFuture<Map<K[], N[]>> queryColumnMapDB(final EntityInfo<T> info, final String sql, final ColumnNode[] funcNodes, final String[] groupByColumns);
//查询单条记录
protected abstract <T> CompletableFuture<T> findDB(final EntityInfo<T> info, final String sql, final boolean onlypk, final SelectColumn selects);
@@ -167,7 +170,7 @@ public abstract class DataSqlSource<DBChannel> extends AbstractService implement
protected abstract <T> CompletableFuture<Boolean> existsDB(final EntityInfo<T> info, final String sql, final boolean onlypk);
//查询一页数据
protected abstract <T> CompletableFuture<Sheet<T>> querySheetDB(final EntityInfo<T> info, final boolean readcache, final boolean needtotal, final SelectColumn selects, final Flipper flipper, final FilterNode node);
protected abstract <T> CompletableFuture<Sheet<T>> querySheetDB(final EntityInfo<T> info, final boolean readcache, final boolean needtotal, final boolean distinct, final SelectColumn selects, final Flipper flipper, final FilterNode node);
protected <T> T getEntityValue(EntityInfo<T> info, final SelectColumn sels, final ResultSet set) throws SQLException {
return info.getEntityValue(sels, set);
@@ -328,6 +331,18 @@ public abstract class DataSqlSource<DBChannel> extends AbstractService implement
}).join();
}
@Override
public final <T> int insert(final Collection<T> entitys) {
if (entitys == null || entitys.isEmpty()) return 0;
return insert(entitys.toArray());
}
@Override
public final <T> int insert(final Stream<T> entitys) {
if (entitys == null) return 0;
return insert(entitys.toArray());
}
@Override
public <T> CompletableFuture<Integer> insertAsync(@RpcCall(DataCallArrayAttribute.class) T... entitys) {
if (entitys.length == 0) return CompletableFuture.completedFuture(0);
@@ -353,6 +368,18 @@ public abstract class DataSqlSource<DBChannel> extends AbstractService implement
});
}
@Override
public final <T> CompletableFuture<Integer> insertAsync(final Collection<T> entitys) {
if (entitys == null || entitys.isEmpty()) return CompletableFuture.completedFuture(0);
return insertAsync(entitys.toArray());
}
@Override
public final <T> CompletableFuture<Integer> insertAsync(final Stream<T> entitys) {
if (entitys == null) return CompletableFuture.completedFuture(0);
return insertAsync(entitys.toArray());
}
protected <T> int insertCache(final EntityInfo<T> info, T... entitys) {
final EntityCache<T> cache = info.getCache();
if (cache == null) return 0;
@@ -581,9 +608,10 @@ public abstract class DataSqlSource<DBChannel> extends AbstractService implement
}
protected <T> CompletableFuture<Integer> clearTableCompose(final EntityInfo<T> info, final FilterNode node) {
String sql = "TRUNCATE TABLE " + info.getTable(node);
final String table = info.getTable(node);
String sql = "TRUNCATE TABLE " + table;
if (info.isLoggable(logger, Level.FINEST, sql)) logger.finest(info.getType().getSimpleName() + " clearTable sql=" + sql);
return clearTableDB(info, sql);
return clearTableDB(info, table, sql);
}
//----------------------------- dropTableCompose -----------------------------
@@ -633,9 +661,10 @@ public abstract class DataSqlSource<DBChannel> extends AbstractService implement
}
protected <T> CompletableFuture<Integer> dropTableCompose(final EntityInfo<T> info, final FilterNode node) {
String sql = "DROP TABLE " + info.getTable(node);
final String table = info.getTable(node);
String sql = "DROP TABLE " + table;
if (info.isLoggable(logger, Level.FINEST, sql)) logger.finest(info.getType().getSimpleName() + " dropTable sql=" + sql);
return dropTableDB(info, sql);
return dropTableDB(info, table, sql);
}
protected <T> int clearTableCache(final EntityInfo<T> info, FilterNode node) {
@@ -1152,7 +1181,9 @@ public abstract class DataSqlSource<DBChannel> extends AbstractService implement
blobs.add((byte[]) val);
setsql.append(" = ").append(prepareParamSign(++index));
} else {
setsql.append(" = ").append(info.formatSQLValue(val, sqlFormatter));
CharSequence sqlval = info.formatSQLValue(val, sqlFormatter);
if (sqlval == null && info.isNotNullJson(attr)) sqlval = "''";
setsql.append(" = ").append(sqlval);
}
}
if (neednode) {
@@ -1279,7 +1310,7 @@ public abstract class DataSqlSource<DBChannel> extends AbstractService implement
String column = info.getPrimary().field();
int c = 0;
for (Serializable id : pks) {
Sheet<T> sheet = querySheetCompose(false, true, clazz, null, FLIPPER_ONE, FilterNode.create(column, id)).join();
Sheet<T> sheet = querySheetCompose(false, true, false, clazz, null, FLIPPER_ONE, FilterNode.create(column, id)).join();
T value = sheet.isEmpty() ? null : sheet.list().get(0);
if (value != null) c += cache.update(value);
}
@@ -1508,6 +1539,118 @@ public abstract class DataSqlSource<DBChannel> extends AbstractService implement
return queryColumnMapDB(info, sql, keyColumn);
}
@Override
public <T, K extends Serializable, N extends Number> Map<K, N[]> queryColumnMap(final Class<T> entityClass, final ColumnNode[] funcNodes, final String groupByColumn) {
return queryColumnMap(entityClass, funcNodes, groupByColumn, (FilterNode) null);
}
@Override
public <T, K extends Serializable, N extends Number> CompletableFuture<Map<K, N[]>> queryColumnMapAsync(final Class<T> entityClass, final ColumnNode[] funcNodes, final String groupByColumn) {
return queryColumnMapAsync(entityClass, funcNodes, groupByColumn, (FilterNode) null);
}
@Override
public <T, K extends Serializable, N extends Number> Map<K, N[]> queryColumnMap(final Class<T> entityClass, final ColumnNode[] funcNodes, final String groupByColumn, final FilterBean bean) {
return queryColumnMap(entityClass, funcNodes, groupByColumn, FilterNodeBean.createFilterNode(bean));
}
@Override
public <T, K extends Serializable, N extends Number> CompletableFuture<Map<K, N[]>> queryColumnMapAsync(final Class<T> entityClass, final ColumnNode[] funcNodes, final String groupByColumn, final FilterBean bean) {
return queryColumnMapAsync(entityClass, funcNodes, groupByColumn, FilterNodeBean.createFilterNode(bean));
}
@Override
public <T, K extends Serializable, N extends Number> Map<K, N[]> queryColumnMap(final Class<T> entityClass, final ColumnNode[] funcNodes, final String groupByColumn, final FilterNode node) {
Map<K[], N[]> map = queryColumnMap(entityClass, funcNodes, Utility.ofArray(groupByColumn), node);
final Map<K, N[]> rs = new LinkedHashMap<>();
map.forEach((keys, values) -> rs.put(keys[0], values));
return rs;
}
@Override
public <T, K extends Serializable, N extends Number> CompletableFuture<Map<K, N[]>> queryColumnMapAsync(final Class<T> entityClass, final ColumnNode[] funcNodes, final String groupByColumn, final FilterNode node) {
CompletableFuture<Map<K[], N[]>> future = queryColumnMapAsync(entityClass, funcNodes, Utility.ofArray(groupByColumn), node);
return future.thenApply(map -> {
final Map<K, N[]> rs = new LinkedHashMap<>();
map.forEach((keys, values) -> rs.put(keys[0], values));
return rs;
});
}
@Override
public <T, K extends Serializable, N extends Number> Map<K[], N[]> queryColumnMap(final Class<T> entityClass, final ColumnNode[] funcNodes, final String[] groupByColumns) {
return queryColumnMap(entityClass, funcNodes, groupByColumns, (FilterNode) null);
}
@Override
public <T, K extends Serializable, N extends Number> CompletableFuture<Map<K[], N[]>> queryColumnMapAsync(final Class<T> entityClass, final ColumnNode[] funcNodes, final String[] groupByColumns) {
return queryColumnMapAsync(entityClass, funcNodes, groupByColumns, (FilterNode) null);
}
@Override
public <T, K extends Serializable, N extends Number> Map<K[], N[]> queryColumnMap(final Class<T> entityClass, final ColumnNode[] funcNodes, final String[] groupByColumns, final FilterBean bean) {
return queryColumnMap(entityClass, funcNodes, groupByColumns, FilterNodeBean.createFilterNode(bean));
}
@Override
public <T, K extends Serializable, N extends Number> CompletableFuture<Map<K[], N[]>> queryColumnMapAsync(final Class<T> entityClass, final ColumnNode[] funcNodes, final String[] groupByColumns, final FilterBean bean) {
return queryColumnMapAsync(entityClass, funcNodes, groupByColumns, FilterNodeBean.createFilterNode(bean));
}
@Override
public <T, K extends Serializable, N extends Number> Map<K[], N[]> queryColumnMap(final Class<T> entityClass, final ColumnNode[] funcNodes, final String[] groupByColumns, final FilterNode node) {
final EntityInfo info = loadEntityInfo(entityClass);
final EntityCache cache = info.getCache();
if (cache != null && (isOnlyCache(info) || cache.isFullLoaded())) {
if (node == null || node.isCacheUseable(this)) {
return cache.queryColumnMap(funcNodes, groupByColumns, node);
}
}
return (Map) queryColumnMapCompose(info, funcNodes, groupByColumns, node).join();
}
@Override
public <T, K extends Serializable, N extends Number> CompletableFuture<Map<K[], N[]>> queryColumnMapAsync(final Class<T> entityClass, final ColumnNode[] funcNodes, final String[] groupByColumns, final FilterNode node) {
final EntityInfo info = loadEntityInfo(entityClass);
final EntityCache cache = info.getCache();
if (cache != null && (isOnlyCache(info) || cache.isFullLoaded())) {
if (node == null || node.isCacheUseable(this)) {
return CompletableFuture.completedFuture(cache.queryColumnMap(funcNodes, groupByColumns, node));
}
}
if (isAsync()) return queryColumnMapCompose(info, funcNodes, groupByColumns, node);
return CompletableFuture.supplyAsync(() -> (Map) queryColumnMapCompose(info, funcNodes, groupByColumns, node).join(), getExecutor());
}
protected <T, K extends Serializable, N extends Number> CompletableFuture<Map<K[], N[]>> queryColumnMapCompose(final EntityInfo<T> info, final ColumnNode[] funcNodes, final String[] groupByColumns, final FilterNode node) {
final StringBuilder groupBySqlColumns = new StringBuilder();
if (groupByColumns != null && groupByColumns.length > 0) {
for (int i = 0; i < groupByColumns.length; i++) {
if (groupBySqlColumns.length() > 0) groupBySqlColumns.append(", ");
groupBySqlColumns.append(info.getSQLColumn("a", groupByColumns[i]));
}
}
final StringBuilder funcSqlColumns = new StringBuilder();
for (int i = 0; i < funcNodes.length; i++) {
if (funcSqlColumns.length() > 0) funcSqlColumns.append(", ");
if (funcNodes[i] instanceof ColumnFuncNode) {
funcSqlColumns.append(info.formatSQLValue((Attribute) null, "a", (ColumnFuncNode) funcNodes[i], sqlFormatter));
} else {
funcSqlColumns.append(info.formatSQLValue((Attribute) null, "a", (ColumnNodeValue) funcNodes[i], sqlFormatter));
}
}
final Map<Class, String> joinTabalis = node == null ? null : node.getJoinTabalis();
final Set<String> haset = new HashSet<>();
final CharSequence join = node == null ? null : node.createSQLJoin(this, false, joinTabalis, haset, info);
final CharSequence where = node == null ? null : node.createSQLExpress(info, joinTabalis);
String sql = "SELECT ";
if (groupBySqlColumns.length() > 0) sql += groupBySqlColumns + ", ";
sql += funcSqlColumns + " FROM " + info.getTable(node) + " a" + (join == null ? "" : join) + ((where == null || where.length() == 0) ? "" : (" WHERE " + where));
if (groupBySqlColumns.length() > 0) sql += " GROUP BY " + groupBySqlColumns;
if (info.isLoggable(logger, Level.FINEST, sql)) logger.finest(info.getType().getSimpleName() + " querycolumnmap sql=" + sql);
return queryColumnMapDB(info, sql, funcNodes, groupByColumns);
}
//----------------------------- findCompose -----------------------------
/**
* 根据主键获取对象
@@ -1801,33 +1944,70 @@ public abstract class DataSqlSource<DBChannel> extends AbstractService implement
//-----------------------list set----------------------------
@Override
public <T, V extends Serializable> HashSet<V> queryColumnSet(final String selectedColumn, Class<T> clazz, String column, Serializable colval) {
return new LinkedHashSet<>(queryColumnList(selectedColumn, clazz, null, FilterNode.create(column, colval)));
public <T, V extends Serializable> Set<V> queryColumnSet(final String selectedColumn, final Class<T> clazz, final String column, final Serializable colval) {
return queryColumnSet(selectedColumn, clazz, null, FilterNode.create(column, colval));
}
@Override
public <T, V extends Serializable> CompletableFuture<HashSet<V>> queryColumnSetAsync(final String selectedColumn, Class<T> clazz, String column, Serializable colval) {
return queryColumnListAsync(selectedColumn, clazz, null, FilterNode.create(column, colval)).thenApply((list) -> new LinkedHashSet(list));
public <T, V extends Serializable> CompletableFuture<Set<V>> queryColumnSetAsync(final String selectedColumn, final Class<T> clazz, final String column, final Serializable colval) {
return queryColumnSetAsync(selectedColumn, clazz, null, FilterNode.create(column, colval));
}
@Override
public <T, V extends Serializable> HashSet<V> queryColumnSet(final String selectedColumn, final Class<T> clazz, final FilterBean bean) {
return new LinkedHashSet<>(queryColumnList(selectedColumn, clazz, null, FilterNodeBean.createFilterNode(bean)));
public <T, V extends Serializable> Set<V> queryColumnSet(final String selectedColumn, final Class<T> clazz, final FilterBean bean) {
return queryColumnSet(selectedColumn, clazz, null, FilterNodeBean.createFilterNode(bean));
}
@Override
public <T, V extends Serializable> CompletableFuture<HashSet<V>> queryColumnSetAsync(final String selectedColumn, final Class<T> clazz, final FilterBean bean) {
return queryColumnListAsync(selectedColumn, clazz, null, FilterNodeBean.createFilterNode(bean)).thenApply((list) -> new LinkedHashSet(list));
public <T, V extends Serializable> CompletableFuture<Set<V>> queryColumnSetAsync(final String selectedColumn, final Class<T> clazz, final FilterBean bean) {
return queryColumnSetAsync(selectedColumn, clazz, null, FilterNodeBean.createFilterNode(bean));
}
@Override
public <T, V extends Serializable> HashSet<V> queryColumnSet(String selectedColumn, Class<T> clazz, FilterNode node) {
return new LinkedHashSet<>(queryColumnList(selectedColumn, clazz, null, node));
public <T, V extends Serializable> Set<V> queryColumnSet(final String selectedColumn, final Class<T> clazz, final FilterNode node) {
return queryColumnSet(selectedColumn, clazz, null, node);
}
@Override
public <T, V extends Serializable> CompletableFuture<HashSet<V>> queryColumnSetAsync(final String selectedColumn, final Class<T> clazz, final FilterNode node) {
return queryColumnListAsync(selectedColumn, clazz, null, node).thenApply((list) -> new LinkedHashSet(list));
public <T, V extends Serializable> CompletableFuture<Set<V>> queryColumnSetAsync(final String selectedColumn, final Class<T> clazz, final FilterNode node) {
return queryColumnSetAsync(selectedColumn, clazz, null, node);
}
@Override
public <T, V extends Serializable> Set<V> queryColumnSet(final String selectedColumn, final Class<T> clazz, final Flipper flipper, final FilterBean bean) {
return queryColumnSet(selectedColumn, clazz, flipper, FilterNodeBean.createFilterNode(bean));
}
@Override
public <T, V extends Serializable> CompletableFuture<Set<V>> queryColumnSetAsync(final String selectedColumn, final Class<T> clazz, final Flipper flipper, final FilterBean bean) {
return queryColumnSetAsync(selectedColumn, clazz, flipper, FilterNodeBean.createFilterNode(bean));
}
@Override
public <T, V extends Serializable> Set<V> queryColumnSet(final String selectedColumn, final Class<T> clazz, final Flipper flipper, final FilterNode node) {
final Set<T> list = querySet(clazz, SelectColumn.includes(selectedColumn), flipper, node);
final Set<V> rs = new LinkedHashSet<>();
if (list.isEmpty()) return rs;
final EntityInfo<T> info = loadEntityInfo(clazz);
final Attribute<T, V> selected = (Attribute<T, V>) info.getAttribute(selectedColumn);
for (T t : list) {
rs.add(selected.get(t));
}
return rs;
}
@Override
public <T, V extends Serializable> CompletableFuture<Set<V>> queryColumnSetAsync(final String selectedColumn, final Class<T> clazz, final Flipper flipper, final FilterNode node) {
return querySetAsync(clazz, SelectColumn.includes(selectedColumn), flipper, node).thenApply((Set<T> list) -> {
final Set<V> rs = new LinkedHashSet<>();
if (list.isEmpty()) return rs;
final EntityInfo<T> info = loadEntityInfo(clazz);
final Attribute<T, V> selected = (Attribute<T, V>) info.getAttribute(selectedColumn);
for (T t : list) {
rs.add(selected.get(t));
}
return rs;
});
}
@Override
@@ -2121,6 +2301,145 @@ public abstract class DataSqlSource<DBChannel> extends AbstractService implement
});
}
/**
* 根据指定字段值查询对象集合
*
* @param <T> Entity类的泛型
* @param clazz Entity类
* @param column 过滤字段名
* @param colval 过滤字段值
*
* @return Entity对象的集合
*/
@Override
public <T> Set<T> querySet(final Class<T> clazz, final String column, final Serializable colval) {
return querySet(clazz, (SelectColumn) null, null, FilterNode.create(column, colval));
}
@Override
public <T> CompletableFuture<Set<T>> querySetAsync(final Class<T> clazz, final String column, final Serializable colval) {
return querySetAsync(clazz, (SelectColumn) null, null, FilterNode.create(column, colval));
}
@Override
public <T> Set<T> querySet(final Class<T> clazz) {
return querySet(clazz, (SelectColumn) null, null, (FilterNode) null);
}
@Override
public <T> CompletableFuture<Set<T>> querySetAsync(final Class<T> clazz) {
return querySetAsync(clazz, (SelectColumn) null, null, (FilterNode) null);
}
/**
* 根据过滤对象FilterBean查询对象集合
*
* @param <T> Entity类的泛型
* @param clazz Entity类
* @param bean 过滤Bean
*
* @return Entity对象集合
*/
@Override
public <T> Set<T> querySet(final Class<T> clazz, final FilterBean bean) {
return querySet(clazz, (SelectColumn) null, null, FilterNodeBean.createFilterNode(bean));
}
@Override
public <T> CompletableFuture<Set<T>> querySetAsync(final Class<T> clazz, final FilterBean bean) {
return querySetAsync(clazz, (SelectColumn) null, null, FilterNodeBean.createFilterNode(bean));
}
@Override
public <T> Set<T> querySet(final Class<T> clazz, final FilterNode node) {
return querySet(clazz, (SelectColumn) null, null, node);
}
@Override
public <T> CompletableFuture<Set<T>> querySetAsync(final Class<T> clazz, final FilterNode node) {
return querySetAsync(clazz, (SelectColumn) null, null, node);
}
/**
* 根据过滤对象FilterBean查询对象集合 对象只填充或排除SelectField指定的字段
*
* @param <T> Entity类的泛型
* @param clazz Entity类
* @param selects 收集的字段
* @param bean 过滤Bean
*
* @return Entity对象的集合
*/
@Override
public <T> Set<T> querySet(final Class<T> clazz, final SelectColumn selects, final FilterBean bean) {
return querySet(clazz, selects, null, FilterNodeBean.createFilterNode(bean));
}
@Override
public <T> CompletableFuture<Set<T>> querySetAsync(final Class<T> clazz, SelectColumn selects, final FilterBean bean) {
return querySetAsync(clazz, selects, null, FilterNodeBean.createFilterNode(bean));
}
@Override
public <T> Set<T> querySet(final Class<T> clazz, final SelectColumn selects, final FilterNode node) {
return querySet(clazz, selects, null, node);
}
@Override
public <T> CompletableFuture<Set<T>> querySetAsync(final Class<T> clazz, SelectColumn selects, final FilterNode node) {
return querySetAsync(clazz, selects, null, node);
}
@Override
public <T> Set<T> querySet(final Class<T> clazz, final Flipper flipper, final String column, final Serializable colval) {
return querySet(clazz, null, flipper, FilterNode.create(column, colval));
}
@Override
public <T> CompletableFuture<Set<T>> querySetAsync(final Class<T> clazz, final Flipper flipper, final String column, final Serializable colval) {
return querySetAsync(clazz, null, flipper, FilterNode.create(column, colval));
}
@Override
public <T> Set<T> querySet(final Class<T> clazz, final Flipper flipper, final FilterBean bean) {
return querySet(clazz, null, flipper, FilterNodeBean.createFilterNode(bean));
}
@Override
public <T> CompletableFuture<Set<T>> querySetAsync(final Class<T> clazz, final Flipper flipper, final FilterBean bean) {
return querySetAsync(clazz, null, flipper, FilterNodeBean.createFilterNode(bean));
}
@Override
public <T> Set<T> querySet(final Class<T> clazz, final Flipper flipper, final FilterNode node) {
return querySet(clazz, null, flipper, node);
}
@Override
public <T> CompletableFuture<Set<T>> querySetAsync(final Class<T> clazz, final Flipper flipper, final FilterNode node) {
return querySetAsync(clazz, null, flipper, node);
}
@Override
public <T> Set<T> querySet(final Class<T> clazz, final SelectColumn selects, final Flipper flipper, final FilterBean bean) {
return querySet(clazz, selects, flipper, FilterNodeBean.createFilterNode(bean));
}
@Override
public <T> CompletableFuture<Set<T>> querySetAsync(final Class<T> clazz, final SelectColumn selects, final Flipper flipper, final FilterBean bean) {
return querySetAsync(clazz, selects, flipper, FilterNodeBean.createFilterNode(bean));
}
@Override
public <T> Set<T> querySet(final Class<T> clazz, final SelectColumn selects, final Flipper flipper, final FilterNode node) {
return new LinkedHashSet<>(querySheetCompose(true, false, true, clazz, selects, flipper, node).join().list(true));
}
@Override
public <T> CompletableFuture<Set<T>> querySetAsync(final Class<T> clazz, final SelectColumn selects, final Flipper flipper, final FilterNode node) {
return querySheetCompose(true, false, true, clazz, selects, flipper, node).thenApply((rs) -> new LinkedHashSet<>(rs.list(true)));
}
/**
* 根据指定字段值查询对象集合
*
@@ -2252,12 +2571,12 @@ public abstract class DataSqlSource<DBChannel> extends AbstractService implement
@Override
public <T> List<T> queryList(final Class<T> clazz, final SelectColumn selects, final Flipper flipper, final FilterNode node) {
return querySheetCompose(true, false, clazz, selects, flipper, node).join().list(true);
return querySheetCompose(true, false, false, clazz, selects, flipper, node).join().list(true);
}
@Override
public <T> CompletableFuture<List<T>> queryListAsync(final Class<T> clazz, final SelectColumn selects, final Flipper flipper, final FilterNode node) {
return querySheetCompose(true, false, clazz, selects, flipper, node).thenApply((rs) -> rs.list(true));
return querySheetCompose(true, false, false, clazz, selects, flipper, node).thenApply((rs) -> rs.list(true));
}
//-----------------------sheet----------------------------
@@ -2314,24 +2633,24 @@ public abstract class DataSqlSource<DBChannel> extends AbstractService implement
@Override
public <T> Sheet<T> querySheet(final Class<T> clazz, final SelectColumn selects, final Flipper flipper, final FilterNode node) {
return querySheetCompose(true, true, clazz, selects, flipper, node).join();
return querySheetCompose(true, true, false, clazz, selects, flipper, node).join();
}
@Override
public <T> CompletableFuture<Sheet<T>> querySheetAsync(final Class<T> clazz, final SelectColumn selects, final Flipper flipper, final FilterNode node) {
if (isAsync()) return querySheetCompose(true, true, clazz, selects, flipper, node);
return CompletableFuture.supplyAsync(() -> querySheetCompose(true, true, clazz, selects, flipper, node).join(), getExecutor());
if (isAsync()) return querySheetCompose(true, true, false, clazz, selects, flipper, node);
return CompletableFuture.supplyAsync(() -> querySheetCompose(true, true, false, clazz, selects, flipper, node).join(), getExecutor());
}
protected <T> CompletableFuture<Sheet<T>> querySheetCompose(final boolean readcache, final boolean needtotal, final Class<T> clazz, final SelectColumn selects, final Flipper flipper, final FilterNode node) {
protected <T> CompletableFuture<Sheet<T>> querySheetCompose(final boolean readcache, final boolean needtotal, final boolean distinct, final Class<T> clazz, final SelectColumn selects, final Flipper flipper, final FilterNode node) {
final EntityInfo<T> info = loadEntityInfo(clazz);
final EntityCache<T> cache = info.getCache();
if (readcache && cache != null && cache.isFullLoaded()) {
if (node == null || node.isCacheUseable(this)) {
if (info.isLoggable(logger, Level.FINEST, " cache query predicate = ")) logger.finest(clazz.getSimpleName() + " cache query predicate = " + (node == null ? null : node.createPredicate(cache)));
return CompletableFuture.completedFuture(cache.querySheet(needtotal, selects, flipper, node));
return CompletableFuture.completedFuture(cache.querySheet(needtotal, distinct, selects, flipper, node));
}
}
return querySheetDB(info, readcache, needtotal, selects, flipper, node);
return querySheetDB(info, readcache, needtotal, distinct, selects, flipper, node);
}
}

View File

@@ -310,106 +310,201 @@ public final class EntityCache<T> {
return rs;
}
public <V> Number getNumberResult(final FilterFunc func, final Number defResult, final String column, final FilterNode node) {
final Attribute<T, Serializable> attr = column == null ? null : info.getAttribute(column);
public Map<Serializable[], Number[]> queryColumnMap(final ColumnNode[] funcNodes, final String[] groupByColumns, FilterNode node) {
final Predicate<T> filter = node == null ? null : node.createPredicate(this);
Stream<T> stream = this.list.stream();
if (filter != null) stream = stream.filter(filter);
final Attribute<T, Serializable>[] attrs = new Attribute[groupByColumns.length];
for (int i = 0; i < groupByColumns.length; i++) {
attrs[i] = info.getAttribute(groupByColumns[i]);
}
final Map<String, Serializable[]> valmap = new HashMap<>();
Function<T, Serializable[]> func = t -> {
StringBuilder sb = new StringBuilder();
final Serializable[] vals = new Serializable[attrs.length];
for (int i = 0; i < attrs.length; i++) {
vals[i] = attrs[i].get(t);
sb.append((char) 20).append(vals[i]);
}
final String key = sb.toString();
if (!valmap.containsKey(key)) valmap.put(key, vals);
return valmap.get(key);
};
Map<Serializable[], List<T>> listmap = stream.collect(Collectors.groupingBy(func));
final Map<Serializable[], Number[]> rsmap = new HashMap<>(listmap.size());
listmap.forEach((k, l) -> rsmap.put(k, queryColumnNumbers(l, funcNodes)));
return rsmap;
}
private Number[] queryColumnNumbers(final List<T> list, final ColumnNode[] funcNodes) {
if (true) throw new UnsupportedOperationException("Not supported yet.");
Number[] rs = new Number[funcNodes.length];
for (int i = 0; i < rs.length; i++) {
rs[i] = queryColumnNumber(list, funcNodes[i]);
}
return rs;
}
private Number queryColumnNumber(final List<T> list, final ColumnNode funcNode) {
if (funcNode instanceof ColumnFuncNode) {
return queryColumnNumber(list, (ColumnFuncNode) funcNode);
} else if (funcNode instanceof ColumnNodeValue) {
return queryColumnNumber(list, (ColumnNodeValue) funcNode);
} else {
return null;
}
}
private Number queryColumnNumber(final List<T> list, final ColumnFuncNode funcNode) {
if (funcNode.getValue() instanceof String) {
final Attribute<T, Serializable> attr = info.getAttribute((String) funcNode.getValue());
final Function<T, Number> attrFunc = x -> (Number) attr.get(x);
return getNumberResult(list, funcNode.getFunc(), null, attr.type(), attrFunc, (FilterNode) null);
}
Number num = null;
if (funcNode.getValue() instanceof ColumnFuncNode) {
num = queryColumnNumber(list, (ColumnFuncNode) funcNode.getValue());
} else if (funcNode.getValue() instanceof ColumnNodeValue) {
num = queryColumnNumber(list, (ColumnNodeValue) funcNode.getValue());
}
return num;
}
private Number queryColumnNumber(final List<T> list, final ColumnNodeValue nodeValue) {
return null;
}
private <V> Number getNumberResult(final Collection<T> entityList, final FilterFunc func, final Number defResult, final Class attrType, final Function<T, Number> attrFunc, final FilterNode node) {
final Predicate<T> filter = node == null ? null : node.createPredicate(this);
Stream<T> stream = entityList.stream();
if (filter != null) stream = stream.filter(filter);
switch (func) {
case AVG:
if (attr.type() == int.class || attr.type() == Integer.class || attr.type() == AtomicInteger.class) {
OptionalDouble rs = stream.mapToInt(x -> ((Number) attr.get(x)).intValue()).average();
if (attrType == int.class || attrType == Integer.class || attrType == AtomicInteger.class) {
OptionalDouble rs = stream.mapToInt(x -> ((Number) attrFunc.apply(x)).intValue()).average();
return rs.isPresent() ? (int) rs.getAsDouble() : defResult;
} else if (attr.type() == long.class || attr.type() == Long.class || attr.type() == AtomicLong.class) {
OptionalDouble rs = stream.mapToLong(x -> ((Number) attr.get(x)).longValue()).average();
} else if (attrType == long.class || attrType == Long.class || attrType == AtomicLong.class) {
OptionalDouble rs = stream.mapToLong(x -> ((Number) attrFunc.apply(x)).longValue()).average();
return rs.isPresent() ? (long) rs.getAsDouble() : defResult;
} else if (attr.type() == short.class || attr.type() == Short.class) {
OptionalDouble rs = stream.mapToInt(x -> ((Short) attr.get(x)).intValue()).average();
} else if (attrType == short.class || attrType == Short.class) {
OptionalDouble rs = stream.mapToInt(x -> ((Short) attrFunc.apply(x)).intValue()).average();
return rs.isPresent() ? (short) rs.getAsDouble() : defResult;
} else if (attr.type() == float.class || attr.type() == Float.class) {
OptionalDouble rs = stream.mapToDouble(x -> ((Float) attr.get(x)).doubleValue()).average();
} else if (attrType == float.class || attrType == Float.class) {
OptionalDouble rs = stream.mapToDouble(x -> ((Float) attrFunc.apply(x)).doubleValue()).average();
return rs.isPresent() ? (float) rs.getAsDouble() : defResult;
} else if (attr.type() == double.class || attr.type() == Double.class) {
OptionalDouble rs = stream.mapToDouble(x -> (Double) attr.get(x)).average();
} else if (attrType == double.class || attrType == Double.class) {
OptionalDouble rs = stream.mapToDouble(x -> (Double) attrFunc.apply(x)).average();
return rs.isPresent() ? rs.getAsDouble() : defResult;
}
throw new RuntimeException("getNumberResult error(type:" + type + ", attr.declaringClass: " + attr.declaringClass() + ", attr.field: " + attr.field() + ", attr.type: " + attr.type());
throw new RuntimeException("getNumberResult error(type:" + type + ", attr.type: " + attrType);
case COUNT:
return stream.count();
case DISTINCTCOUNT:
return stream.map(x -> attr.get(x)).distinct().count();
return stream.map(x -> attrFunc.apply(x)).distinct().count();
case MAX:
if (attr.type() == int.class || attr.type() == Integer.class || attr.type() == AtomicInteger.class) {
OptionalInt rs = stream.mapToInt(x -> ((Number) attr.get(x)).intValue()).max();
if (attrType == int.class || attrType == Integer.class || attrType == AtomicInteger.class) {
OptionalInt rs = stream.mapToInt(x -> ((Number) attrFunc.apply(x)).intValue()).max();
return rs.isPresent() ? rs.getAsInt() : defResult;
} else if (attr.type() == long.class || attr.type() == Long.class || attr.type() == AtomicLong.class) {
OptionalLong rs = stream.mapToLong(x -> ((Number) attr.get(x)).longValue()).max();
} else if (attrType == long.class || attrType == Long.class || attrType == AtomicLong.class) {
OptionalLong rs = stream.mapToLong(x -> ((Number) attrFunc.apply(x)).longValue()).max();
return rs.isPresent() ? rs.getAsLong() : defResult;
} else if (attr.type() == short.class || attr.type() == Short.class) {
OptionalInt rs = stream.mapToInt(x -> ((Short) attr.get(x)).intValue()).max();
} else if (attrType == short.class || attrType == Short.class) {
OptionalInt rs = stream.mapToInt(x -> ((Short) attrFunc.apply(x)).intValue()).max();
return rs.isPresent() ? (short) rs.getAsInt() : defResult;
} else if (attr.type() == float.class || attr.type() == Float.class) {
OptionalDouble rs = stream.mapToDouble(x -> ((Float) attr.get(x)).doubleValue()).max();
} else if (attrType == float.class || attrType == Float.class) {
OptionalDouble rs = stream.mapToDouble(x -> ((Float) attrFunc.apply(x)).doubleValue()).max();
return rs.isPresent() ? (float) rs.getAsDouble() : defResult;
} else if (attr.type() == double.class || attr.type() == Double.class) {
OptionalDouble rs = stream.mapToDouble(x -> (Double) attr.get(x)).max();
} else if (attrType == double.class || attrType == Double.class) {
OptionalDouble rs = stream.mapToDouble(x -> (Double) attrFunc.apply(x)).max();
return rs.isPresent() ? rs.getAsDouble() : defResult;
}
throw new RuntimeException("getNumberResult error(type:" + type + ", attr.declaringClass: " + attr.declaringClass() + ", attr.field: " + attr.field() + ", attr.type: " + attr.type());
throw new RuntimeException("getNumberResult error(type:" + type + ", attr.type: " + attrType);
case MIN:
if (attr.type() == int.class || attr.type() == Integer.class || attr.type() == AtomicInteger.class) {
OptionalInt rs = stream.mapToInt(x -> ((Number) attr.get(x)).intValue()).min();
if (attrType == int.class || attrType == Integer.class || attrType == AtomicInteger.class) {
OptionalInt rs = stream.mapToInt(x -> ((Number) attrFunc.apply(x)).intValue()).min();
return rs.isPresent() ? rs.getAsInt() : defResult;
} else if (attr.type() == long.class || attr.type() == Long.class || attr.type() == AtomicLong.class) {
OptionalLong rs = stream.mapToLong(x -> ((Number) attr.get(x)).longValue()).min();
} else if (attrType == long.class || attrType == Long.class || attrType == AtomicLong.class) {
OptionalLong rs = stream.mapToLong(x -> ((Number) attrFunc.apply(x)).longValue()).min();
return rs.isPresent() ? rs.getAsLong() : defResult;
} else if (attr.type() == short.class || attr.type() == Short.class) {
OptionalInt rs = stream.mapToInt(x -> ((Short) attr.get(x)).intValue()).min();
} else if (attrType == short.class || attrType == Short.class) {
OptionalInt rs = stream.mapToInt(x -> ((Short) attrFunc.apply(x)).intValue()).min();
return rs.isPresent() ? (short) rs.getAsInt() : defResult;
} else if (attr.type() == float.class || attr.type() == Float.class) {
OptionalDouble rs = stream.mapToDouble(x -> ((Float) attr.get(x)).doubleValue()).min();
} else if (attrType == float.class || attrType == Float.class) {
OptionalDouble rs = stream.mapToDouble(x -> ((Float) attrFunc.apply(x)).doubleValue()).min();
return rs.isPresent() ? (float) rs.getAsDouble() : defResult;
} else if (attr.type() == double.class || attr.type() == Double.class) {
OptionalDouble rs = stream.mapToDouble(x -> (Double) attr.get(x)).min();
} else if (attrType == double.class || attrType == Double.class) {
OptionalDouble rs = stream.mapToDouble(x -> (Double) attrFunc.apply(x)).min();
return rs.isPresent() ? rs.getAsDouble() : defResult;
}
throw new RuntimeException("getNumberResult error(type:" + type + ", attr.declaringClass: " + attr.declaringClass() + ", attr.field: " + attr.field() + ", attr.type: " + attr.type());
throw new RuntimeException("getNumberResult error(type:" + type + ", attr.type: " + attrType);
case SUM:
if (attr.type() == int.class || attr.type() == Integer.class || attr.type() == AtomicInteger.class) {
return stream.mapToInt(x -> ((Number) attr.get(x)).intValue()).sum();
} else if (attr.type() == long.class || attr.type() == Long.class || attr.type() == AtomicLong.class) {
return stream.mapToLong(x -> ((Number) attr.get(x)).longValue()).sum();
} else if (attr.type() == short.class || attr.type() == Short.class) {
return (short) stream.mapToInt(x -> ((Short) attr.get(x)).intValue()).sum();
} else if (attr.type() == float.class || attr.type() == Float.class) {
return (float) stream.mapToDouble(x -> ((Float) attr.get(x)).doubleValue()).sum();
} else if (attr.type() == double.class || attr.type() == Double.class) {
return stream.mapToDouble(x -> (Double) attr.get(x)).sum();
if (attrType == int.class || attrType == Integer.class || attrType == AtomicInteger.class) {
return stream.mapToInt(x -> ((Number) attrFunc.apply(x)).intValue()).sum();
} else if (attrType == long.class || attrType == Long.class || attrType == AtomicLong.class) {
return stream.mapToLong(x -> ((Number) attrFunc.apply(x)).longValue()).sum();
} else if (attrType == short.class || attrType == Short.class) {
return (short) stream.mapToInt(x -> ((Short) attrFunc.apply(x)).intValue()).sum();
} else if (attrType == float.class || attrType == Float.class) {
return (float) stream.mapToDouble(x -> ((Float) attrFunc.apply(x)).doubleValue()).sum();
} else if (attrType == double.class || attrType == Double.class) {
return stream.mapToDouble(x -> (Double) attrFunc.apply(x)).sum();
}
throw new RuntimeException("getNumberResult error(type:" + type + ", attr.declaringClass: " + attr.declaringClass() + ", attr.field: " + attr.field() + ", attr.type: " + attr.type());
throw new RuntimeException("getNumberResult error(type:" + type + ", attr.type: " + attrType);
}
return defResult;
}
public Sheet<T> querySheet(final SelectColumn selects, final Flipper flipper, final FilterNode node) {
return querySheet(true, selects, flipper, node);
public <V> Number getNumberResult(final FilterFunc func, final Number defResult, final String column, final FilterNode node) {
final Attribute<T, Serializable> attr = column == null ? null : info.getAttribute(column); //COUNT的column=null
final Function<T, Number> attrFunc = attr == null ? null : x -> (Number) attr.get(x);
return getNumberResult(this.list, func, defResult, attr == null ? null : attr.type(), attrFunc, node);
}
public Sheet<T> querySheet(final boolean needtotal, final SelectColumn selects, final Flipper flipper, FilterNode node) {
public Sheet<T> querySheet(final SelectColumn selects, final Flipper flipper, final FilterNode node) {
return querySheet(true, false, selects, flipper, node);
}
protected <T> Stream<T> distinctStream(Stream<T> stream, final List<Attribute<T, Serializable>> keyattrs) {
if (keyattrs == null) return stream;
final Set<String> keys = new HashSet<>();
Predicate<T> filter = t -> {
StringBuilder sb = new StringBuilder();
for (Attribute attr : keyattrs) {
sb.append(attr.get(t));
}
String key = sb.toString();
if (keys.contains(key)) return false;
keys.add(key);
return true;
};
return stream.filter(filter);
}
public Sheet<T> querySheet(final boolean needtotal, final boolean distinct, final SelectColumn selects, final Flipper flipper, FilterNode node) {
final Predicate<T> filter = node == null ? null : node.createPredicate(this);
final Comparator<T> comparator = createComparator(flipper);
long total = 0;
List<Attribute<T, Serializable>> keyattrs = null;
if (distinct) {
final List<Attribute<T, Serializable>> attrs = new ArrayList<>();
info.forEachAttribute((k, v) -> {
if (selects == null || selects.test(k)) attrs.add(v);
});
keyattrs = attrs;
}
if (needtotal) {
Stream<T> stream = this.list.stream();
if (filter != null) stream = stream.filter(filter);
if (distinct) stream = distinctStream(stream, keyattrs);
total = stream.count();
}
if (needtotal && total == 0) return new Sheet<>();
if (needtotal && total == 0) return new Sheet<>(0, new ArrayList());
Stream<T> stream = this.list.stream();
if (filter != null) stream = stream.filter(filter);
if (distinct) stream = distinctStream(stream, keyattrs);
if (comparator != null) stream = stream.sorted(comparator);
if (flipper != null && flipper.getOffset() > 0) stream = stream.skip(flipper.getOffset());
if (flipper != null && flipper.getLimit() > 0) stream = stream.limit(flipper.getLimit());
@@ -596,12 +691,22 @@ public final class EntityCache<T> {
}
}
public <V> T updateColumnDecrement(final Serializable pk, Attribute<T, V> attr, final long incvalue) {
if (pk == null) return null;
T rs = this.map.get(pk);
if (rs == null) return rs;
synchronized (rs) {
return updateColumn(attr, rs, ColumnExpress.DEC, incvalue);
}
}
private <V> T updateColumn(Attribute<T, V> attr, final T entity, final ColumnExpress express, Serializable val) {
final Class ft = attr.type();
Number numb = null;
Serializable newval = null;
switch (express) {
case INC:
case DEC:
case MUL:
case DIV:
case MOD:
@@ -666,7 +771,26 @@ public final class EntityCache<T> {
if (numb == null) {
numb = (Number) val;
} else {
numb = numb.longValue() + ((Number) val).longValue();
if (numb instanceof Float || ((Number) val) instanceof Float) {
numb = numb.floatValue() + ((Number) val).floatValue();
} else if (numb instanceof Double || ((Number) val) instanceof Double) {
numb = numb.doubleValue() + ((Number) val).doubleValue();
} else {
numb = numb.longValue() + ((Number) val).longValue();
}
}
break;
case DEC:
if (numb == null) {
numb = (Number) val;
} else {
if (numb instanceof Float || ((Number) val) instanceof Float) {
numb = numb.floatValue() - ((Number) val).floatValue();
} else if (numb instanceof Double || ((Number) val) instanceof Double) {
numb = numb.doubleValue() - ((Number) val).doubleValue();
} else {
numb = numb.longValue() - ((Number) val).longValue();
}
}
break;
case MUL:

View File

@@ -9,7 +9,7 @@ import java.io.Serializable;
import java.lang.reflect.*;
import java.sql.*;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.*;
import java.util.concurrent.atomic.*;
import java.util.function.*;
import java.util.logging.*;
@@ -95,7 +95,10 @@ public final class EntityInfo<T> {
private final String tablecopySQL;
//用于存在database.table_20160202类似这种分布式表
private final Set<String> tables = new HashSet<>();
private final Set<String> tables = new CopyOnWriteArraySet<>();
//不能为null的字段名
private final Set<String> notNullColumns = new CopyOnWriteArraySet<>();
//分表 策略
private final DistributeTableStrategy<T> tableStrategy;
@@ -304,7 +307,7 @@ public final class EntityInfo<T> {
}
Attribute attr;
try {
attr = Attribute.create(cltmp, field, cryptHandler);
attr = Attribute.create(type, cltmp, field, cryptHandler);
} catch (RuntimeException e) {
continue;
}
@@ -322,6 +325,9 @@ public final class EntityInfo<T> {
updateattrs.add(attr);
updateAttributeMap.put(fieldname, attr);
}
if (col != null && !col.nullable()) {
notNullColumns.add(fieldname);
}
}
queryattrs.add(attr);
fields.add(fieldname);
@@ -522,6 +528,10 @@ public final class EntityInfo<T> {
tables.add(tablekey);
}
public boolean removeDisTable(String tablekey) {
return tables.remove(tablekey);
}
public String getTableNotExistSqlStates2() {
return tablenotexistSqlstates;
}
@@ -862,6 +872,9 @@ public final class EntityInfo<T> {
* @return Object
*/
public Object getSQLValue(String fieldname, Serializable fieldvalue) {
if (fieldvalue == null && fieldname != null && isNotNullable(fieldname)) {
if (isNotNullJson(getAttribute(fieldname))) return "";
}
if (this.cryptmap == null) return fieldvalue;
CryptHandler handler = this.cryptmap.get(fieldname);
if (handler == null) return fieldvalue;
@@ -973,10 +986,14 @@ public final class EntityInfo<T> {
protected CharSequence formatSQLValue(String sqlColumn, Attribute<T, Serializable> attr, final ColumnValue cv, BiFunction<EntityInfo, Object, CharSequence> formatter) {
if (cv == null) return null;
Object val = cv.getValue();
if (val instanceof ColumnNodeValue && cv.getExpress() == ColumnExpress.MOV) return formatSQLValue(attr, (ColumnNodeValue) val, formatter);
//ColumnNodeValue时 cv.getExpress() == ColumnExpress.MOV 只用于updateColumn
if (val instanceof ColumnNodeValue) return formatSQLValue(attr, null, (ColumnNodeValue) val, formatter);
if (val instanceof ColumnFuncNode) return formatSQLValue(attr, null, (ColumnFuncNode) val, formatter);
switch (cv.getExpress()) {
case INC:
return new StringBuilder().append(sqlColumn).append(" + ").append(val);
case DEC:
return new StringBuilder().append(sqlColumn).append(" - ").append(val);
case MUL:
return new StringBuilder().append(sqlColumn).append(" * ").append(val);
case DIV:
@@ -990,29 +1007,45 @@ public final class EntityInfo<T> {
case MOV:
CryptHandler handler = attr.attach();
if (handler != null) val = handler.encrypt(val);
return formatter == null ? formatToString(val) : formatter.apply(this, val);
CharSequence rs = formatter == null ? formatToString(val) : formatter.apply(this, val);
if (rs == null && isNotNullJson(attr)) rs = "";
return rs;
}
CryptHandler handler = attr.attach();
if (handler != null) val = handler.encrypt(val);
return formatter == null ? formatToString(val) : formatter.apply(this, val);
}
protected CharSequence formatSQLValue(Attribute<T, Serializable> attr, final ColumnNodeValue node, BiFunction<EntityInfo, Object, CharSequence> formatter) {
protected CharSequence formatSQLValue(Attribute<T, Serializable> attr, String tabalis, final ColumnFuncNode node, BiFunction<EntityInfo, Object, CharSequence> formatter) {
if (node.getValue() instanceof ColumnNodeValue) {
return node.getFunc().getColumn(formatSQLValue(attr, tabalis, (ColumnNodeValue) node.getValue(), formatter).toString());
} else {
return node.getFunc().getColumn(this.getSQLColumn(tabalis, String.valueOf(node.getValue())));
}
}
protected CharSequence formatSQLValue(Attribute<T, Serializable> attr, String tabalis, final ColumnNodeValue node, BiFunction<EntityInfo, Object, CharSequence> formatter) {
Serializable left = node.getLeft();
if (left instanceof CharSequence) {
left = this.getSQLColumn(null, left.toString());
left = this.getSQLColumn(tabalis, left.toString());
} else if (left instanceof ColumnNodeValue) {
left = "(" + formatSQLValue(attr, (ColumnNodeValue) left, formatter) + ")";
left = "(" + formatSQLValue(attr, tabalis, (ColumnNodeValue) left, formatter) + ")";
} else if (left instanceof ColumnFuncNode) {
left = "(" + formatSQLValue(attr, tabalis, (ColumnFuncNode) left, formatter) + ")";
}
Serializable right = node.getRight();
if (right instanceof CharSequence) {
right = this.getSQLColumn(null, right.toString());
} else if (left instanceof ColumnNodeValue) {
right = "(" + formatSQLValue(attr, (ColumnNodeValue) right, formatter) + ")";
right = "(" + formatSQLValue(attr, tabalis, (ColumnNodeValue) right, formatter) + ")";
} else if (left instanceof ColumnFuncNode) {
right = "(" + formatSQLValue(attr, tabalis, (ColumnFuncNode) right, formatter) + ")";
}
switch (node.getExpress()) {
case INC:
return new StringBuilder().append(left).append(" + ").append(right);
case DEC:
return new StringBuilder().append(left).append(" - ").append(right);
case MUL:
return new StringBuilder().append(left).append(" * ").append(right);
case DIV:
@@ -1048,6 +1081,24 @@ public final class EntityInfo<T> {
return logger.isLoggable(l) && l.intValue() >= this.logLevel;
}
public boolean isNotNullable(String fieldname) {
return notNullColumns.contains(fieldname);
}
public boolean isNotNullable(Attribute<T, Serializable> attr) {
return attr == null ? false : notNullColumns.contains(attr.field());
}
public boolean isNotNullJson(Attribute<T, Serializable> attr) {
if (attr == null) return false;
return notNullColumns.contains(attr.field())
&& !Number.class.isAssignableFrom(attr.type())
&& !CharSequence.class.isAssignableFrom(attr.type())
&& java.util.Date.class != attr.type()
&& !attr.type().getName().startsWith("java.sql.")
&& !attr.type().getName().startsWith("java.time.");
}
/**
* 判断日志级别
*

View File

@@ -34,12 +34,12 @@ public @interface FilterJoinColumn {
/**
*
* 多个关联字段, 默认使用join表(b)的主键, join表与被join表(a)的字段必须一样 <br>
* 例如: SELECT a.* FROM user a INNER JOIN record b ON a.userid = b.userid AND a.usertype = b.usertype <br>
* 那么注解为: &#64;FilterJoinColumn(table = Record.class, columns = {"userid", "usertype"}) <br>
* 例如: SELECT a.* FROM user a INNER JOIN orderinfo b ON a.userid = b.userid AND a.usertype = b.usertype <br>
* 那么注解为: &#64;FilterJoinColumn(table = OrderInfo.class, columns = {"userid", "usertype"}) <br>
* <p>
* columns中的字段名如果不一致可以将两个字段名用=连接成一个字段名 <br>
* 例如: SELECT a.* FROM user a INNER JOIN record b ON a.userid = b.buyerid AND a.usertype = b.usertype <br>
* 那么注解为: &#64;FilterJoinColumn(table = Record.class, columns = {"userid=buyerid", "usertype"}) <br>
* 例如: SELECT a.* FROM user a INNER JOIN orderinfo b ON a.userid = b.buyerid AND a.usertype = b.usertype <br>
* 那么注解为: &#64;FilterJoinColumn(table = OrderInfo.class, columns = {"userid=buyerid", "usertype"}) <br>
*
* @return 关联字段
*/

View File

@@ -884,7 +884,7 @@ public class FilterNode { //FilterNode 不能实现Serializable接口 否则
@Override
public boolean test(T t) {
return ((Number) attr.get(t)).longValue() > ((Number) fkattr.get(t)).longValue();
return ((Comparable) attr.get(t)).compareTo((Comparable) fkattr.get(t)) > 0;
}
@Override
@@ -895,7 +895,7 @@ public class FilterNode { //FilterNode 不能实现Serializable接口 否则
@Override
public boolean test(T t) {
return ((Number) attr.get(t)).longValue() > ((Number) val).longValue();
return ((Comparable) attr.get(t)).compareTo(((Comparable) val)) > 0;
}
@Override
@@ -908,7 +908,7 @@ public class FilterNode { //FilterNode 不能实现Serializable接口 否则
@Override
public boolean test(T t) {
return ((Number) attr.get(t)).longValue() < ((Number) fkattr.get(t)).longValue();
return ((Comparable) attr.get(t)).compareTo((Comparable) fkattr.get(t)) < 0;
}
@Override
@@ -919,7 +919,7 @@ public class FilterNode { //FilterNode 不能实现Serializable接口 否则
@Override
public boolean test(T t) {
return ((Number) attr.get(t)).longValue() < ((Number) val).longValue();
return ((Comparable) attr.get(t)).compareTo(((Comparable) val)) < 0;
}
@Override
@@ -932,7 +932,7 @@ public class FilterNode { //FilterNode 不能实现Serializable接口 否则
@Override
public boolean test(T t) {
return ((Number) attr.get(t)).longValue() >= ((Number) fkattr.get(t)).longValue();
return ((Comparable) attr.get(t)).compareTo((Comparable) fkattr.get(t)) >= 0;
}
@Override
@@ -943,7 +943,7 @@ public class FilterNode { //FilterNode 不能实现Serializable接口 否则
@Override
public boolean test(T t) {
return ((Number) attr.get(t)).longValue() >= ((Number) val).longValue();
return ((Comparable) attr.get(t)).compareTo(((Comparable) val)) >= 0;
}
@Override
@@ -956,7 +956,7 @@ public class FilterNode { //FilterNode 不能实现Serializable接口 否则
@Override
public boolean test(T t) {
return ((Number) attr.get(t)).longValue() <= ((Number) fkattr.get(t)).longValue();
return ((Comparable) attr.get(t)).compareTo((Comparable) fkattr.get(t)) <= 0;
}
@Override
@@ -967,7 +967,7 @@ public class FilterNode { //FilterNode 不能实现Serializable接口 否则
@Override
public boolean test(T t) {
return ((Number) attr.get(t)).longValue() <= ((Number) val).longValue();
return ((Comparable) attr.get(t)).compareTo(((Comparable) val)) <= 0;
}
@Override

View File

@@ -74,9 +74,9 @@ public abstract class PoolSource<DBChannel> {
this.username = prop.getProperty(JDBC_USER, "");
this.password = prop.getProperty(JDBC_PWD, "");
this.encoding = prop.getProperty(JDBC_ENCODING, "");
this.connectTimeoutSeconds = Integer.decode(prop.getProperty(JDBC_CONNECTTIMEOUT_SECONDS, "3"));
this.readTimeoutSeconds = Integer.decode(prop.getProperty(JDBC_READTIMEOUT_SECONDS, "3"));
this.writeTimeoutSeconds = Integer.decode(prop.getProperty(JDBC_WRITETIMEOUT_SECONDS, "3"));
this.connectTimeoutSeconds = Integer.decode(prop.getProperty(JDBC_CONNECTTIMEOUT_SECONDS, "6"));
this.readTimeoutSeconds = Integer.decode(prop.getProperty(JDBC_READTIMEOUT_SECONDS, "6"));
this.writeTimeoutSeconds = Integer.decode(prop.getProperty(JDBC_WRITETIMEOUT_SECONDS, "6"));
this.maxconns = Math.max(8, Integer.decode(prop.getProperty(JDBC_CONNECTIONS_LIMIT, "" + Runtime.getRuntime().availableProcessors() * 100)));
this.semaphore = semaphore == null ? new Semaphore(this.maxconns) : semaphore;
String dbtype0 = "";

View File

@@ -37,6 +37,8 @@ import static org.redkale.asm.Opcodes.*;
*
* private java.lang.reflect.Type _gtype = String.class;
*
* private java.lang.Object _attach;
*
* &#64;Override
* public String field() {
* return "name";
@@ -63,6 +65,11 @@ import static org.redkale.asm.Opcodes.*;
* }
*
* &#64;Override
* public Object attach() {
* return _attach;
* }
*
* &#64;Override
* public Class declaringClass() {
* return Record.class;
* }
@@ -253,6 +260,21 @@ public interface Attribute<T, F> {
return create(clazz, field.getName(), (Class) null, field, (java.lang.reflect.Method) null, (java.lang.reflect.Method) null, null);
}
/**
* 根据一个Class和Field生成 Attribute 对象。
*
* @param <T> 依附类的类型
* @param <F> 字段类型
* @param subclass 指定依附的子类
* @param clazz 指定依附的类
* @param field 字段,如果该字段不存在则抛异常
*
* @return Attribute对象
*/
public static <T, F> Attribute<T, F> create(Class<T> subclass, Class<T> clazz, final java.lang.reflect.Field field) {
return create(subclass, clazz, field.getName(), (Class) null, field, (java.lang.reflect.Method) null, (java.lang.reflect.Method) null, null);
}
/**
* 根据一个Class和Field生成 Attribute 对象。
*
@@ -268,6 +290,22 @@ public interface Attribute<T, F> {
return create(clazz, field.getName(), (Class) null, field, (java.lang.reflect.Method) null, (java.lang.reflect.Method) null, attach);
}
/**
* 根据一个Class和Field生成 Attribute 对象。
*
* @param <T> 依附类的类型
* @param <F> 字段类型
* @param subclass 指定依附的子类
* @param clazz 指定依附的类
* @param field 字段,如果该字段不存在则抛异常
* @param attach 附加对象
*
* @return Attribute对象
*/
public static <T, F> Attribute<T, F> create(Class<T> subclass, Class<T> clazz, final java.lang.reflect.Field field, Object attach) {
return create(subclass, clazz, field.getName(), (Class) null, field, (java.lang.reflect.Method) null, (java.lang.reflect.Method) null, attach);
}
/**
* 根据一个Class、field别名和Field生成 Attribute 对象。
*
@@ -578,6 +616,28 @@ public interface Attribute<T, F> {
*/
@SuppressWarnings("unchecked")
public static <T, F> Attribute<T, F> create(final Class<T> clazz, String fieldalias, final Class<F> fieldtype, final java.lang.reflect.Field field, java.lang.reflect.Method getter, java.lang.reflect.Method setter, Object attach) {
return create(null, clazz, fieldalias, fieldtype, field, getter, setter, attach);
}
/**
* 根据Class、字段别名、字段类型、Field、getter和setter方法生成 Attribute 对象。 fieldalias/fieldtype、Field、tgetter、setter不能同时为null.
*
* @param <T> 依附类的类型
* @param <F> 字段类型
* @param subclass 指定依附的子类
* @param clazz 指定依附的类
* @param fieldalias 字段别名
* @param fieldtype 字段类型
* @param field 字段
* @param getter getter方法
* @param setter setter方法
* @param attach 附加对象
*
* @return Attribute对象
*/
@SuppressWarnings("unchecked")
public static <T, F> Attribute<T, F> create(Class<T> subclass, final Class<T> clazz, String fieldalias, final Class<F> fieldtype, final java.lang.reflect.Field field, java.lang.reflect.Method getter, java.lang.reflect.Method setter, Object attach) {
if (subclass == null) subclass = clazz;
if (fieldalias != null && fieldalias.isEmpty()) fieldalias = null;
int mod = field == null ? java.lang.reflect.Modifier.STATIC : field.getModifiers();
if (field != null && !java.lang.reflect.Modifier.isStatic(mod) && !java.lang.reflect.Modifier.isPublic(mod)) {
@@ -627,6 +687,7 @@ public interface Attribute<T, F> {
final String fieldname = fieldalias;
Class column = fieldtype;
java.lang.reflect.Type generictype = fieldtype;
if (tfield != null) { // public tfield
column = tfield.getType();
generictype = tfield.getGenericType();
@@ -638,20 +699,28 @@ public interface Attribute<T, F> {
generictype = tsetter.getGenericParameterTypes()[0];
} else if (fieldtype == null) {
throw new RuntimeException("[" + clazz + "]have no public field or setter or getter");
} else if (column == null) {
throw new RuntimeException("[" + clazz + "]have no field type");
}
boolean checkCast = false;
if (generictype instanceof java.lang.reflect.TypeVariable) {
checkCast = true;
generictype = TypeToken.getGenericType(generictype, subclass);
if (generictype instanceof Class) column = (Class) generictype;
}
final Class pcolumn = column;
if (column.isPrimitive()) column = java.lang.reflect.Array.get(java.lang.reflect.Array.newInstance(column, 1), 0).getClass();
final String supDynName = Attribute.class.getName().replace('.', '/');
final String interName = clazz.getName().replace('.', '/');
final String interName = subclass.getName().replace('.', '/');
final String columnName = column.getName().replace('.', '/');
final String interDesc = Type.getDescriptor(clazz);
final String interDesc = Type.getDescriptor(subclass);
final String columnDesc = Type.getDescriptor(column);
ClassLoader loader = Thread.currentThread().getContextClassLoader();
String newDynName = supDynName + "_Dyn_" + clazz.getSimpleName() + "_"
String newDynName = supDynName + "_Dyn_" + subclass.getSimpleName() + "_"
+ fieldname.substring(fieldname.indexOf('.') + 1) + "_" + pcolumn.getSimpleName().replace("[]", "Array");
if (String.class.getClassLoader() != clazz.getClassLoader()) {
loader = clazz.getClassLoader();
if (String.class.getClassLoader() != subclass.getClassLoader()) {
loader = subclass.getClassLoader();
newDynName = interName + "_Dyn" + Attribute.class.getSimpleName() + "_"
+ fieldname.substring(fieldname.indexOf('.') + 1) + "_" + pcolumn.getSimpleName().replace("[]", "Array");
}
@@ -735,7 +804,7 @@ public interface Attribute<T, F> {
}
{ //declaringClass 方法
mv = cw.visitMethod(ACC_PUBLIC, "declaringClass", "()Ljava/lang/Class;", null, null);
mv.visitLdcInsn(Type.getType(clazz));
mv.visitLdcInsn(Type.getType(subclass));
mv.visitInsn(ARETURN);
mv.visitMaxs(1, 1);
mv.visitEnd();
@@ -752,6 +821,8 @@ public interface Attribute<T, F> {
if (pcolumn != column) {
mv.visitMethodInsn(INVOKESTATIC, columnName, "valueOf", "(" + Type.getDescriptor(pcolumn) + ")" + columnDesc, false);
m = 2;
} else {
if (checkCast) mv.visitTypeInsn(CHECKCAST, columnName);
}
}
} else {
@@ -760,6 +831,8 @@ public interface Attribute<T, F> {
if (pcolumn != column) {
mv.visitMethodInsn(INVOKESTATIC, columnName, "valueOf", "(" + Type.getDescriptor(pcolumn) + ")" + columnDesc, false);
m = 2;
} else {
if (checkCast) mv.visitTypeInsn(CHECKCAST, columnName);
}
}
mv.visitInsn(ARETURN);

View File

@@ -5,9 +5,9 @@
*/
package org.redkale.util;
import java.nio.*;
import java.nio.ByteBuffer;
import java.nio.charset.*;
import java.util.*;
import java.util.Arrays;
/**
* 简单的byte[]操作类。
@@ -338,10 +338,21 @@ public final class ByteArray {
* @return 字符串
*/
public String toString(final int offset, int len, final Charset charset) {
if (charset == null) return new String(Utility.decodeUTF8(content, offset, len));
if (charset == null) return new String(content, offset, len, StandardCharsets.UTF_8);
return new String(content, offset, len, charset);
}
/**
* 将指定的起始位置和长度按指定字符集并转义后转成字符串
*
* @param charset 字符集
*
* @return 字符串
*/
public String toDecodeString(final Charset charset) {
return toDecodeString(0, count, charset);
}
/**
* 将指定的起始位置和长度按指定字符集并转义后转成字符串
*
@@ -382,7 +393,7 @@ public final class ByteArray {
start = 0;
len = index;
}
if (charset == null) return new String(Utility.decodeUTF8(bs, start, len));
if (charset == null) return new String(bs, start, len, StandardCharsets.UTF_8);
return new String(bs, start, len, charset);
}

View File

@@ -17,7 +17,7 @@ public final class Redkale {
}
public static String getDotedVersion() {
return "2.0.0-rc3";
return "2.0.0-rc4";
}
public static int getMajorVersion() {

View File

@@ -171,6 +171,10 @@ public class SelectColumn implements Predicate<String> {
return new SelectColumn(Utility.append(cols, columns), true);
}
public boolean isOnlyOneColumn() {
return !excludable && columns != null && columns.length == 1;
}
@Override
public boolean test(final String column) {
if (this.columns != null) {

View File

@@ -151,7 +151,13 @@ public abstract class TypeToken<T> {
}
if (atas.length == asts.length) {
for (int i = 0; i < asts.length; i++) {
if (asts[i] == type) return atas[i];
if (asts[i] == type) {
if (atas[i] instanceof Class
&& ((TypeVariable) type).getBounds().length == 1
&& ((TypeVariable) type).getBounds()[0] instanceof Class
&& ((Class) ((TypeVariable) type).getBounds()[0]).isAssignableFrom((Class) atas[i]))
return atas[i];
}
}
}
Type moreType = ((ParameterizedType) superType).getRawType();

View File

@@ -284,6 +284,18 @@ public final class Utility {
return list;
}
/**
* 将多个元素组合成一个Array
*
* @param <T> 泛型
* @param items 元素
*
* @return Array
*/
public static <T> T[] ofArray(T... items) {
return items;
}
/**
* 获取不带"-"的UUID值
*
@@ -1281,8 +1293,8 @@ public final class Utility {
Enumeration<InetAddress> eis = nif.getInetAddresses();
while (eis.hasMoreElements()) {
InetAddress ia = eis.nextElement();
if (ia.isLoopbackAddress()) back = ia;
if (ia.isSiteLocalAddress()) return ia;
if (ia.isLoopbackAddress() && ia instanceof Inet4Address) back = ia;
if (ia.isSiteLocalAddress() && ia instanceof Inet4Address) return ia;
}
}
} catch (Exception e) {

View File

@@ -11,12 +11,12 @@ import java.util.function.BiFunction;
import javax.persistence.Id;
import org.redkale.convert.json.JsonConvert;
import org.redkale.source.*;
import org.redkale.util.Attribute;
/**
*
* @author zhangjx
*/
@VirtualEntity(loader = CacheTestBean.DefaultBeanLoader.class)
public class CacheTestBean {
@Id
@@ -27,19 +27,9 @@ public class CacheTestBean {
private long price;
public static void main(String[] args) throws Exception {
final List<CacheTestBean> list = new ArrayList<>();
list.add(new CacheTestBean(1, "a", 12));
list.add(new CacheTestBean(1, "a", 18));
list.add(new CacheTestBean(2, "b", 20));
list.add(new CacheTestBean(2, "bb", 60));
Attribute idattr = Attribute.create(CacheTestBean.class, "pkgid");
Attribute nameattr = Attribute.create(CacheTestBean.class, "name");
Attribute priceattr = Attribute.create(CacheTestBean.class, "price");
BiFunction<DataSource, Class, List> fullloader = (s, z) -> list;
Method method = EntityInfo.class.getDeclaredMethod("load", Class.class, boolean.class, Properties.class,
DataSource.class, BiFunction.class);
Method method = EntityInfo.class.getDeclaredMethod("load", Class.class, boolean.class, Properties.class, DataSource.class, BiFunction.class);
method.setAccessible(true);
final EntityInfo<CacheTestBean> info = (EntityInfo<CacheTestBean>) method.invoke(null, CacheTestBean.class, true, new Properties(), null, fullloader);
final EntityInfo<CacheTestBean> info = (EntityInfo<CacheTestBean>) method.invoke(null, CacheTestBean.class, true, new Properties(), null, new CacheTestBean.DefaultBeanLoader());
EntityCache<CacheTestBean> cache = new EntityCache(info, null);
cache.fullLoad();
@@ -93,4 +83,17 @@ public class CacheTestBean {
return JsonConvert.root().convertTo(this);
}
public static class DefaultBeanLoader implements BiFunction<DataSource, Class, List> {
@Override
public List apply(DataSource t, Class u) {
final List<CacheTestBean> list = new ArrayList<>();
list.add(new CacheTestBean(1, "a", 12));
list.add(new CacheTestBean(1, "a", 18));
list.add(new CacheTestBean(2, "b", 20));
list.add(new CacheTestBean(2, "bb", 60));
return list;
}
}
}

View File

@@ -78,7 +78,7 @@ public class TestSourceCache {
@Override
public void run() {
for (int k = 0; k < 10; k++) {
info.getCache().querySheet(true, null, flipper, node);
info.getCache().querySheet(true, false, null, flipper, node);
}
cdl.countDown();
}

View File

@@ -65,7 +65,7 @@ public class VideoWebSocketServlet extends WebSocketServlet {
}
@Override
public void onConnected() {
public CompletableFuture onConnected() {
if (repeat) {
super.close();
} else {
@@ -82,6 +82,7 @@ public class VideoWebSocketServlet extends WebSocketServlet {
String msg = ("{'type':'discover_user','user':{'userid':'" + this.getSessionid() + "','username':'" + users.get(this.getSessionid()) + "'}}").replace('\'', '"');
super.broadcastMessage(msg);
}
return null;
}
@Override
@@ -91,10 +92,10 @@ public class VideoWebSocketServlet extends WebSocketServlet {
}
@Override
public void onClose(int code, String reason) {
public CompletableFuture onClose(int code, String reason) {
sessions.remove(this.getSessionid());
String msg = ("{'type':'remove_user','user':{'userid':'" + this.getSessionid() + "','username':'" + users.get(this.getSessionid()) + "'}}").replace('\'', '"');
super.broadcastMessage(msg);
return super.broadcastMessage(msg);
}
@Override