204 Commits

Author SHA1 Message Date
Redkale
51c50415e1 Redkale 2.0.0 正式版 结束 2020-04-08 13:24:51 +08:00
Redkale
2f08fa6476 2020-04-03 15:10:57 +08:00
Redkale
3091972fc5 2020-04-02 17:28:58 +08:00
Redkale
8e14df9a95 2020-04-01 13:33:43 +08:00
Redkale
683cec6f4d 2020-04-01 13:33:04 +08:00
Redkale
ec6e5aa3b1 2020-02-24 07:49:34 +08:00
Redkale
f4c00a9b6f 2020-02-12 11:43:13 +08:00
Redkale
c071ec5d6c 2020-02-09 12:37:40 +08:00
Redkale
29148c4b42 TypeToken.getGenericType加强 2020-02-09 12:37:00 +08:00
Redkale
7340caa4a9 2020-02-09 11:35:07 +08:00
Redkale
7d23dfa73d 2020-02-09 11:34:04 +08:00
Redkale
19932820a9 2020-02-06 16:28:48 +08:00
Redkale
6b59c54087 Redkale 2.0.0.rc4 结束 2020-02-02 10:48:54 +08:00
Redkale
58f5ee999a 2020-01-30 12:04:56 +08:00
Redkale
32e8c033ea 2020-01-30 12:02:42 +08:00
Redkale
84a96f750f 2020-01-30 11:53:17 +08:00
Redkale
6560e71e48 2020-01-30 11:44:46 +08:00
Redkale
ee8a18a528 2020-01-30 11:35:05 +08:00
Redkale
c36fc36964 2020-01-29 12:31:11 +08:00
Redkale
a34f85bfc9 2020-01-29 11:35:49 +08:00
Redkale
6f00efa077 2020-01-29 11:28:24 +08:00
Redkale
469dff8478 2020-01-27 18:39:11 +08:00
Redkale
c50eb79b1d 2020-01-27 15:57:13 +08:00
Redkale
985bf6ed25 2020-01-27 15:56:41 +08:00
Redkale
bd51696e08 2020-01-27 14:03:53 +08:00
Redkale
2764d59a4f 2020-01-27 13:01:43 +08:00
Redkale
c1509bb712 2020-01-27 12:32:22 +08:00
Redkale
b14f32deb3 2020-01-27 12:31:34 +08:00
Redkale
62145a2aad 2020-01-27 12:23:33 +08:00
Redkale
8fb88a57b2 2020-01-26 20:20:17 +08:00
Redkale
44f12ae003 2020-01-26 15:51:05 +08:00
Redkale
bf97ef3a08 2020-01-26 14:03:49 +08:00
Redkale
aeefc3b8a2 2020-01-17 14:43:33 +08:00
Redkale
5763718816 WebSocketRunner加入写队列 2020-01-17 14:39:19 +08:00
Redkale
3b142b7504 PoolSource的默认超时时间从3秒改成6秒 2020-01-17 14:04:15 +08:00
Redkale
0f52d32424 TcpAioAsyncConnection去掉队列写 2020-01-17 13:57:41 +08:00
Redkale
f5f3c48f38 2020-01-16 17:24:46 +08:00
Redkale
9f9f5aa000 HttpResponse增加retResultHandler属性 2020-01-16 16:59:37 +08:00
Redkale
be4ca0287b HttpResponse增加retResultHandler属性 2020-01-16 11:11:34 +08:00
Redkale
a84ed72f28 2020-01-14 16:16:28 +08:00
Redkale
8eb5f56f42 2020-01-14 10:59:45 +08:00
Redkale
df1aa2b379 2020-01-11 22:02:42 +08:00
Redkale
da2befcb97 2020-01-11 21:22:34 +08:00
Redkale
92d0f7e796 2020-01-11 21:17:11 +08:00
Redkale
6aa3949d05 DataSource增加可group by的queryColumnMap系列方法,缓存EntityCache部分未实现 2020-01-11 13:32:31 +08:00
Redkale
fa833d9224 2020-01-10 20:30:08 +08:00
Redkale
c9261f8475 增加ofArray方法 2020-01-10 19:25:17 +08:00
Redkale
a1df62af08 2020-01-10 19:20:32 +08:00
Redkale
aa12413f4e 2020-01-10 19:15:37 +08:00
Redkale
2ccd9ba10f ColumnExpress增加减法DEC 2020-01-10 19:10:10 +08:00
Redkale
81ae68c571 ColumnExpress增加减法DEC 2020-01-10 19:08:50 +08:00
Redkale
51b45f4713 DataSource.insert增加Collection、Stream方法 2020-01-08 13:36:59 +08:00
Redkale
66e1f58879 2020-01-08 10:42:08 +08:00
Redkale
06bb5180cf 【不兼容】WebSocket中onConnected、onClose方法的返回值由void改成CompletableFuture 2020-01-08 10:36:54 +08:00
Redkale
d83d7f879c 2020-01-07 16:32:03 +08:00
Redkale
b8e92c949e 2020-01-07 16:31:32 +08:00
Redkale
91548a0ca9 2020-01-07 16:31:02 +08:00
Redkale
dbca25cd54 2020-01-07 16:29:38 +08:00
Redkale
539ea15ae5 2020-01-07 16:28:11 +08:00
Redkale
ad1d9f33d4 WebSocket增加getUserSet方法 2020-01-07 13:07:04 +08:00
Redkale
df98c1a58e 优化querySet和queryColumnSet系列方法 2020-01-07 11:48:30 +08:00
Redkale
56d1969c96 2020-01-07 11:46:34 +08:00
Redkale
4b341436af 2020-01-07 11:01:08 +08:00
Redkale
4d3d73b4c1 WebSocketRange增加几个小方法 2020-01-07 10:21:01 +08:00
Redkale
55ab279e7f 2020-01-02 09:51:37 +08:00
Redkale
82ab994608 2019-12-19 10:55:11 +08:00
Redkale
1c4035e677 2019-12-18 23:50:29 +08:00
Redkale
afdc9e7207 2019-12-18 23:00:23 +08:00
Redkale
9b83abb06a Attribute增加subclass参数用于识别泛型的子类 2019-12-18 22:06:54 +08:00
Redkale
26d1a10bd0 2019-12-18 19:51:35 +08:00
Redkale
7e55dcc46d FilterNode兼容String的 >= > < <= 2019-12-11 19:01:45 +08:00
Redkale
45802d2403 修复2019.6.20改动时remoteAddrHeader带来的bug 2019-12-07 21:02:13 +08:00
Redkale
3660a2a4e5 2019-12-07 19:43:43 +08:00
Redkale
bed81bd93d 修复WriteMoreCompletionHandler的bug 2019-11-30 11:14:44 +08:00
Redkale
131855cdc5 2019-11-30 09:04:01 +08:00
Redkale
8b69e7d02b Redkale 2.0.0.rc3 结束 2019-11-25 16:33:48 +08:00
Redkale
02a10bf014 2019-11-23 10:55:29 +08:00
Redkale
004b83172e 2019-11-23 09:28:18 +08:00
Redkale
fee4555cef 2019-11-23 09:04:08 +08:00
Redkale
758bd7de72 2019-11-23 09:01:53 +08:00
Redkale
b2dd366640 2019-11-19 20:47:16 +08:00
Redkale
934c82eadd 优化JsonWrite.writeSmallString方法 2019-11-18 13:42:55 +08:00
Redkale
c7ed6574cc 优化JsonWrite.writeSmallString方法 2019-11-18 13:34:51 +08:00
Redkale
2ea2667fa7 Utility增加byteArray方法 2019-11-18 13:25:51 +08:00
Redkale
34ae2d38c5 2019-11-15 15:54:34 +08:00
Redkale
a1c95544cb 2019-11-15 15:19:16 +08:00
Redkale
c6dc38c35c PoolTcpSource增加ping接口 2019-11-14 12:04:22 +08:00
Redkale
39203ab598 2019-11-13 10:11:35 +08:00
Redkale
51a95a84aa 去掉AsyncConnection内的ByteBufferPool,HttpResponse合并header和body的Buffer 2019-11-13 10:07:48 +08:00
Redkale
8a8d45e642 DataSource增加判断字符串字段值长度的FilterExpress 2019-11-11 11:10:32 +08:00
Redkale
52eb7dbc0c 2019-11-07 09:55:16 +08:00
Redkale
0e14b60f12 2019-11-05 21:21:57 +08:00
Redkale
d373ab7204 2019-11-05 09:31:38 +08:00
Redkale
4f9a563ba7 2019-11-05 09:26:15 +08:00
Redkale
852da19b1e Redkale 2.0.0.rc2 结束 2019-11-05 09:21:49 +08:00
Redkale
ddfc040a53 2019-11-05 09:16:34 +08:00
Redkale
df3ccb763a 2019-11-04 11:53:11 +08:00
Redkale
f42561ca93 convert支持sql包的几个date类型 2019-11-03 11:47:54 +08:00
Redkale
580e28519a 2019-11-02 15:08:38 +08:00
Redkale
9ecc1d8f19 2019-11-02 14:20:07 +08:00
Redkale
40629ed7b9 2019-10-28 13:14:38 +08:00
Redkale
5790135add 2019-10-28 11:59:23 +08:00
Redkale
fd862ed6c6 Convert兼容java.util.Map.Entry 2019-10-28 11:56:45 +08:00
Redkale
33763af96c 兼容TypeToken.typeToClass 方法 2019-10-26 16:03:36 +08:00
Redkale
7c05df3cfb 2019-10-26 15:32:16 +08:00
Redkale
f471e2d4c5 配置支持远程地址 2019-10-26 09:45:33 +08:00
Redkale
d4fd093521 2019-10-25 11:42:50 +08:00
Redkale
40003c7789 2019-10-24 11:45:51 +08:00
Redkale
b94f99f338 2019-10-22 09:11:37 +08:00
Redkale
bd21644571 Redkale 2.0.0.rc1 结束 2019-10-18 08:42:28 +08:00
Redkale
5f3599d9b8 2019-10-17 18:51:08 +08:00
Redkale
1e4a30bd70 2019-10-16 15:17:04 +08:00
Redkale
e7dc5de9f2 2019-10-15 22:18:44 +08:00
Redkale
ccb9cb28f5 修复JsonByteBufferWriter的utf8问题 2019-10-15 21:55:31 +08:00
Redkale
4d9b72c922 2019-10-15 21:43:45 +08:00
Redkale
a51ae13a39 2019-10-15 18:02:34 +08:00
Redkale
dfca186688 2019-10-15 18:01:34 +08:00
Redkale
fadd229a89 2019-10-14 15:29:28 +08:00
Redkale
7acc69adc4 2019-10-14 12:32:53 +08:00
Redkale
d88e4120a1 2019-10-13 17:29:12 +08:00
Redkale
ef98edd91a Convert.newConvert 增加第2个Function参数 2019-10-13 12:55:00 +08:00
Redkale
f4548bbe34 2019-10-11 11:13:41 +08:00
Redkale
11a5faca1d Utility增加reverseSort方法 2019-10-11 08:59:40 +08:00
Redkale
c37b0e8cb5 Redkale 2.0.0.beta5 结束 2019-10-10 09:21:48 +08:00
Redkale
a20570a6eb 2019-10-10 09:17:42 +08:00
Redkale
5e3edb7e1d 2019-10-09 13:43:11 +08:00
Redkale
ad8f1d2da6 增加util.ResourceInjectLoader功能 2019-10-09 13:42:29 +08:00
Redkale
24b23c894f 修复FileSimpledCoder的instance写错的bug 2019-10-09 13:41:39 +08:00
Redkale
c551d5fb81 2019-09-26 16:32:36 +08:00
Redkale
fba43894c1 2019-09-26 16:27:42 +08:00
Redkale
22cc7e086c RetResult增加Convert属性 2019-09-26 16:22:56 +08:00
Redkale
1791008729 2019-09-25 15:41:19 +08:00
Redkale
90e15dd253 2019-09-25 15:40:47 +08:00
Redkale
7db73c076c 2019-09-25 15:33:18 +08:00
Redkale
95ad6e99d9 2019-09-23 18:57:45 +08:00
Redkale
0b2a5d0f61 2019-09-23 08:59:03 +08:00
Redkale
b7acce0814 Redkale 2.0.0.beta4 结束 2019-09-20 08:58:51 +08:00
Redkale
8744e76cad 2019-09-19 21:52:52 +08:00
Redkale
446b3c13dc 2019-09-19 21:38:53 +08:00
Redkale
3951e28148 2019-09-19 21:38:22 +08:00
Redkale
b74d679608 修复ByteBuffer Utility.encodeUTF8的bug 2019-09-19 21:35:05 +08:00
Redkale
edbc878b73 修复Utility.encodeUTF8Length的bug 2019-09-19 21:22:47 +08:00
Redkale
f706209ec1 RestResult合并进HttpResult 2019-09-19 21:08:36 +08:00
Redkale
bf000b188f 增加RestResult功能 2019-09-19 20:30:30 +08:00
Redkale
def1736a9b 2019-09-19 13:22:56 +08:00
Redkale
0242f4c0c3 2019-09-19 13:22:20 +08:00
Redkale
5cd399b2df 2019-09-19 13:12:33 +08:00
Redkale
9ddb662016 Convert增加BiFunction<Attribute, Object, Object> fieldFunc参数 2019-09-19 12:05:03 +08:00
Redkale
2947275d54 2019-09-18 11:27:54 +08:00
Redkale
e43f814872 Utility增加containsMatch、removeMatch方法 2019-09-18 11:22:28 +08:00
Redkale
6e16f52e28 2019-09-18 11:02:13 +08:00
Redkale
fca13557df 【不兼容】删掉javax.persistence.GeneratedValue功能 2019-09-16 16:35:41 +08:00
Redkale
824a6df55a 2019-09-16 16:29:12 +08:00
Redkale
b98b526c50 2019-09-09 16:35:08 +08:00
Redkale
8f6aa4f4a5 2019-09-09 16:29:48 +08:00
Redkale
00a07a79b2 2019-09-09 16:25:10 +08:00
Redkale
264dfbef2e HttpServlet增加postStart方法 2019-09-04 10:00:29 +08:00
Redkale
2e27814809 2019-09-04 09:22:42 +08:00
Redkale
f767f40e56 StringConvertWrapper 改成 StringWrapper 2019-08-30 15:18:03 +08:00
Redkale
543ecc071a 2019-08-30 10:50:24 +08:00
Redkale
2a14f39495 2019-08-30 09:30:41 +08:00
Redkale
95c8ae2334 2019-08-30 08:05:28 +08:00
Redkale
338ea13828 2019-08-29 17:44:13 +08:00
Redkale
4a8b9e5fec 2019-08-29 16:43:45 +08:00
Redkale
e281cac3d3 2019-08-28 16:15:28 +08:00
Redkale
a495829a3c RestHeader支持InetSocketAddress类型 2019-08-28 14:31:33 +08:00
Redkale
b0deed2a89 WebSocket增加getSncpAddress方法,获取分布式下的sncp地址 2019-08-28 12:56:50 +08:00
Redkale
70a75abf74 Redkale 2.0.0.beta3 结束 2019-08-15 13:11:17 +08:00
Redkale
1aa97f8e79 2019-08-06 10:08:24 +08:00
Redkale
b4000235ac 2019-08-05 17:51:26 +08:00
Redkale
4b93f29a1c 2019-08-05 17:14:14 +08:00
Redkale
4892a50670 2019-08-05 16:26:52 +08:00
Redkale
2fe0ac0ef9 修复ColumnValue.mov时字段类型是long,而参数值是int会报错的bug 2019-07-31 17:19:35 +08:00
Redkale
333ae72148 2019-07-30 11:05:11 +08:00
Redkale
5fd5b7f303 2019-07-30 10:58:37 +08:00
Redkale
528cf45f3f ColumnExpress增加除法、取模表达式 2019-07-30 10:31:48 +08:00
Redkale
c7308e7320 修复ConvertFacotry.register方法中column参数没有对应field时会导致method的ignore失效的bug 2019-07-29 13:20:23 +08:00
Redkale
e2a49eaab7 2019-07-29 11:40:33 +08:00
Redkale
44bd6f235c 2019-07-25 16:58:20 +08:00
Redkale
601d15b513 2019-07-17 19:00:18 +08:00
Redkale
9e93485a97 新增HttpRequest.getQueryBytes方法 2019-07-17 18:42:45 +08:00
Redkale
ad87b2115d 修改多个@RetLabel不能正确根据locale获取对应值的bug 2019-07-17 15:14:48 +08:00
Redkale
27a587d31f 修复mysql下update操作值带转义字符导致失败的bug 2019-07-12 16:52:46 +08:00
Redkale
fc8fa27602 RetResult增加map静态方法 2019-07-11 14:53:51 +08:00
Redkale
f9aebc8ee3 修复同一个Entity类被多个source源分表分库操作时判断表是否已建有误的bug 2019-07-09 14:18:07 +08:00
Redkale
1167da8f4c Redkale 2.0.0.beta2 结束 2019-07-08 09:15:26 +08:00
Redkale
7a5fbcdccd 2019-07-03 11:35:37 +08:00
Redkale
345e929712 [不兼容修改]CacheSource的getCollectionMap序列方法增加一个set参数 2019-06-27 18:21:19 +08:00
Redkale
358862fe59 修复Entity类带boolean字段调DataSource.insert出现异常的bug 2019-06-26 16:51:48 +08:00
Redkale
3dde9bb293 2019-06-21 17:00:40 +08:00
Redkale
99ae4ccadd 从Context中移除BufferPool和ResponsePool 2019-06-20 15:26:20 +08:00
Redkale
98e9ffe0ef 2019-06-20 10:02:24 +08:00
Redkale
6927bfe8ac 2019-06-19 16:52:00 +08:00
Redkale
340a3a8fa3 2019-06-19 16:46:12 +08:00
Redkale
4724763991 2019-06-19 16:45:45 +08:00
Redkale
03353ad08c 2019-06-19 16:21:05 +08:00
Redkale
95c3354fcd WebSocketParam增加getAnnotations系列方法 2019-06-19 15:47:46 +08:00
Redkale
1bda2f92b9 HttpRequest增加getAnnotation系列方法 2019-06-18 22:59:07 +08:00
Redkale
bd3c706934 修复DataSource中json字段不为Serializable时会异常的bug 2019-06-13 22:34:53 +08:00
Redkale
ef3663aa36 修复DataSource中json字段不为Serializable时会异常的bug 2019-06-13 22:23:30 +08:00
Redkale
427ff717d4 UDP协议下bufferCapacity默认值为1350字节 2019-05-30 12:07:09 +08:00
Redkale
b409300412 UDP协议下bufferCapacity默认值为1480字节 2019-05-30 12:04:13 +08:00
Redkale
ca1f974dbe 2019-05-28 17:43:48 +08:00
Redkale
6a8c86096b DataSource的clearTable、dropTable在表不存在的情况下由抛异常改为结果值返回-1 2019-05-28 15:51:59 +08:00
127 changed files with 5212 additions and 1495 deletions

View File

@@ -1 +0,0 @@
<EFBFBD>Լ<EFBFBD><EFBFBD><EFBFBD>ҵ<EFBFBD><EFBFBD>jarĬ<EFBFBD>Ϸ<EFBFBD><EFBFBD>ڴ˴<EFBFBD>

View File

@@ -16,10 +16,6 @@
<directory>${project.basedir}/conf</directory>
<outputDirectory>conf</outputDirectory>
</fileSet>
<fileSet>
<directory>${project.basedir}/libs</directory>
<outputDirectory>libs</outputDirectory>
</fileSet>
<fileSet>
<directory>${project.basedir}/logs</directory>
<outputDirectory>logs</outputDirectory>

View File

@@ -77,6 +77,7 @@
<version>2.6</version>
<configuration>
<archive>
<addMavenDescriptor>false</addMavenDescriptor>
<manifest>
<mainClass>org.redkale.boot.Application</mainClass>
</manifest>

View File

@@ -116,10 +116,10 @@
excludelibs: 排除lib.path与excludes中的正则表达式匹配的路径, 多个正则表达式用分号;隔开
charset: 文本编码, 默认: UTF-8
backlog: 默认10K
threads 线程数, 默认: CPU核数*32
threads 线程数, 默认: CPU核数*2最小8个
maxconns最大连接数, 小于1表示无限制 默认: 0
maxbody: request.body最大值 默认: 64K
bufferCapacity: ByteBuffer的初始化大小 默认: 32K; (HTTP 2.0、WebSocket必须要16k以上)
bufferCapacity: ByteBuffer的初始化大小 TCP默认: 32K; (HTTP 2.0、WebSocket必须要16k以上); UDP默认: 1350B
bufferPoolSize ByteBuffer池的大小默认: 线程数*4
responsePoolSize Response池的大小默认: 线程数*2
aliveTimeoutSeconds: KeepAlive读操作超时秒数 默认30 0表示永久不超时; -1表示禁止KeepAlive

View File

@@ -1,63 +0,0 @@
/** *****************************************************************************
* Copyright (c) 2008 - 2013 Oracle Corporation. All rights reserved.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0
* which accompanies this distribution.
* The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html
* and the Eclipse Distribution License is available at
* http://www.eclipse.org/org/documents/edl-v10.php.
*
* Contributors:
* Linda DeMichiel - Java Persistence 2.1
* Linda DeMichiel - Java Persistence 2.0
*
***************************************************************************** */
package javax.persistence;
import java.lang.annotation.Target;
import java.lang.annotation.Retention;
import static java.lang.annotation.ElementType.FIELD;
import static java.lang.annotation.ElementType.METHOD;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
/**
* Provides for the specification of generation strategies for the
* values of primary keys.
*
* <p>
* The <code>GeneratedValue</code> annotation
* may be applied to a primary key property or field of an entity or
* mapped superclass in conjunction with the {@link Id} annotation.
* The use of the <code>GeneratedValue</code> annotation is only
* required to be supported for simple primary keys. Use of the
* <code>GeneratedValue</code> annotation is not supported for derived
* primary keys.
*
* <pre>
*
* Example 1:
*
* &#064;Id
* &#064;GeneratedValue(strategy=SEQUENCE, generator="CUST_SEQ")
* &#064;Column(name="CUST_ID")
* public Long getId() { return id; }
*
* Example 2:
*
* &#064;Id
* &#064;GeneratedValue(strategy=TABLE, generator="CUST_GEN")
* &#064;Column(name="CUST_ID")
* Long id;
* </pre>
*
* @see Id
*
* @since Java Persistence 1.0
*/
@Target({METHOD, FIELD})
@Retention(RUNTIME)
public @interface GeneratedValue {
}

View File

@@ -45,7 +45,7 @@ import static java.lang.annotation.RetentionPolicy.RUNTIME;
* </pre>
*
* @see Column
* @see GeneratedValue
* see GeneratedValue
*
* @since Java Persistence 1.0
*/

View File

@@ -30,5 +30,7 @@ module org.redkale {
exports org.redkale.util;
exports org.redkale.watch;
uses org.redkale.source.SourceLoader;
uses org.redkale.util.ResourceInjectLoader;
}
*/

View File

@@ -196,7 +196,7 @@ public final class ApiDocsService {
final FileOutputStream out = new FileOutputStream(new File(app.getHome(), "apidoc.json"));
out.write(json.getBytes("UTF-8"));
out.close();
File doctemplate = new File(app.getConfPath(), "apidoc-template.html");
File doctemplate = new File(app.getConfPath().toString(), "apidoc-template.html");
InputStream in = null;
if (doctemplate.isFile() && doctemplate.canRead()) {
in = new FileInputStream(doctemplate);

View File

@@ -57,12 +57,12 @@ public final class Application {
public static final String RESNAME_APP_TIME = "APP_TIME";
/**
* 当前进程的根目录, 类型String、File、Path
* 当前进程的根目录, 类型String、File、Path、URI
*/
public static final String RESNAME_APP_HOME = "APP_HOME";
/**
* 当前进程的配置目录如果不是绝对路径则视为HOME目录下的相对路径 类型String、File、Path
* 当前进程的配置目录如果不是绝对路径则视为HOME目录下的相对路径 类型String、File、Path、URI
*/
public static final String RESNAME_APP_CONF = "APP_CONF";
@@ -143,7 +143,7 @@ public final class Application {
private final File home;
//配置文件目录
private final File confPath;
private final URI confPath;
//日志
private final Logger logger;
@@ -176,16 +176,19 @@ public final class Application {
this.resourceFactory.register(RESNAME_APP_TIME, long.class, this.startTime);
this.resourceFactory.register(RESNAME_APP_HOME, Path.class, root.toPath());
this.resourceFactory.register(RESNAME_APP_HOME, File.class, root);
this.resourceFactory.register(RESNAME_APP_HOME, URI.class, root.toURI());
try {
this.resourceFactory.register(RESNAME_APP_HOME, root.getCanonicalPath());
this.home = root.getCanonicalFile();
String confsubpath = System.getProperty(RESNAME_APP_CONF, "conf");
if (confsubpath.charAt(0) == '/' || confsubpath.indexOf(':') > 0) {
this.confPath = new File(confsubpath).getCanonicalFile();
if (confsubpath.contains("://")) {
this.confPath = new URI(confsubpath);
} else if (confsubpath.charAt(0) == '/' || confsubpath.indexOf(':') > 0) {
this.confPath = new File(confsubpath).getCanonicalFile().toURI();
} else {
this.confPath = new File(this.home, confsubpath).getCanonicalFile();
this.confPath = new File(this.home, confsubpath).getCanonicalFile().toURI();
}
} catch (IOException e) {
} catch (Exception e) {
throw new RuntimeException(e);
}
String localaddr = config.getValue("address", "").trim();
@@ -209,11 +212,12 @@ public final class Application {
System.setProperty(RESNAME_APP_NODE, node);
}
//以下是初始化日志配置
final File logconf = new File(confPath, "logging.properties");
if (logconf.isFile() && logconf.canRead()) {
final URI logConfURI = "file".equals(confPath.getScheme()) ? new File(new File(confPath), "logging.properties").toURI()
: URI.create(confPath.toString() + (confPath.toString().endsWith("/") ? "" : "/") + "logging.properties");
if (!"file".equals(confPath.getScheme()) || (new File(logConfURI).isFile() && new File(logConfURI).canRead())) {
try {
final String rootpath = root.getCanonicalPath().replace('\\', '/');
FileInputStream fin = new FileInputStream(logconf);
InputStream fin = logConfURI.toURL().openStream();
Properties properties = new Properties();
properties.load(fin);
fin.close();
@@ -301,7 +305,7 @@ public final class Application {
transportExec = Executors.newFixedThreadPool(threads, (Runnable r) -> {
Thread t = new Thread(r);
t.setDaemon(true);
t.setName("Transport-Thread-" + counter.incrementAndGet());
t.setName("Redkale-Transport-Thread-" + counter.incrementAndGet());
return t;
});
transportGroup = AsynchronousChannelGroup.withCachedThreadPool(transportExec, 1);
@@ -316,7 +320,7 @@ public final class Application {
transportExec = Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors() * 8, (Runnable r) -> {
Thread t = new Thread(r);
t.setDaemon(true);
t.setName("Transport-Thread-" + counter.incrementAndGet());
t.setName("Redkale-Transport-Thread-" + counter.incrementAndGet());
return t;
});
try {
@@ -375,7 +379,7 @@ public final class Application {
return home;
}
public File getConfPath() {
public URI getConfPath() {
return confPath;
}
@@ -398,10 +402,14 @@ public final class Application {
System.setProperty("convert.bson.writer.buffer.defsize", "4096");
System.setProperty("convert.json.writer.buffer.defsize", "4096");
File persist = new File(this.confPath, "persistence.xml");
final String confpath = this.confPath.toString();
final String homepath = this.home.getCanonicalPath();
final String confpath = this.confPath.getCanonicalPath();
if (persist.isFile()) System.setProperty(DataSources.DATASOURCE_CONFPATH, persist.getCanonicalPath());
if ("file".equals(this.confPath.getScheme())) {
File persist = new File(new File(confPath), "persistence.xml");
if (persist.isFile()) System.setProperty(DataSources.DATASOURCE_CONFPATH, persist.getCanonicalPath());
} else {
System.setProperty(DataSources.DATASOURCE_CONFPATH, confpath + (confpath.endsWith("/") ? "" : "/") + "persistence.xml");
}
String pidstr = "";
try { //JDK 9+
Class phclass = Class.forName("java.lang.ProcessHandle");
@@ -425,13 +433,17 @@ public final class Application {
if (dfloads != null) {
for (String dfload : dfloads.split(";")) {
if (dfload.trim().isEmpty()) continue;
final File df = (dfload.indexOf('/') < 0) ? new File(confPath, "/" + dfload) : new File(dfload);
if (df.isFile()) {
final URI df = (dfload.indexOf('/') < 0) ? URI.create(confpath + (confpath.endsWith("/") ? "" : "/") + dfload) : new File(dfload).toURI();
if (!"file".equals(df.getScheme()) || new File(df).isFile()) {
Properties ps = new Properties();
InputStream in = new FileInputStream(df);
ps.load(in);
in.close();
ps.forEach((x, y) -> resourceFactory.register("property." + x, y.toString().replace("${APP_HOME}", homepath)));
try {
InputStream in = df.toURL().openStream();
ps.load(in);
in.close();
ps.forEach((x, y) -> resourceFactory.register("property." + x, y.toString().replace("${APP_HOME}", homepath)));
} catch (Exception e) {
logger.log(Level.WARNING, "load properties(" + dfload + ") error", e);
}
}
}
}
@@ -558,9 +570,10 @@ public final class Application {
}
public void restoreConfig() throws IOException {
if (!"file".equals(this.confPath.getScheme())) return;
synchronized (this) {
File confFile = new File(this.confPath, "application.xml");
confFile.renameTo(new File(this.confPath, "application_" + String.format("%1$tY%1$tm%1$td%1$tH%1$tM%1$tS", System.currentTimeMillis()) + ".xml"));
File confFile = new File(this.confPath.toString(), "application.xml");
confFile.renameTo(new File(this.confPath.toString(), "application_" + String.format("%1$tY%1$tm%1$td%1$tH%1$tM%1$tS", System.currentTimeMillis()) + ".xml"));
final PrintStream ps = new PrintStream(new FileOutputStream(confFile));
ps.append(config.toXML("application"));
ps.close();
@@ -571,7 +584,7 @@ public final class Application {
final Application application = this;
new Thread() {
{
setName("Application-Control-Thread");
setName("Redkale-Application-SelfServer-Thread");
}
@Override
@@ -742,7 +755,7 @@ public final class Application {
Thread thread = new Thread() {
{
String host = serconf.getValue("host", "0.0.0.0").replace("0.0.0.0", "*");
setName(serconf.getValue("protocol", "Server").toUpperCase() + "-" + host + ":" + serconf.getIntValue("port") + "-Thread");
setName("Redkale-" + serconf.getValue("protocol", "Server").toUpperCase() + "-" + host + ":" + serconf.getIntValue("port") + "-Thread");
this.setDaemon(true);
}
@@ -843,18 +856,20 @@ public final class Application {
final String home = new File(System.getProperty(RESNAME_APP_HOME, "")).getCanonicalPath().replace('\\', '/');
System.setProperty(RESNAME_APP_HOME, home);
String confsubpath = System.getProperty(RESNAME_APP_CONF, "conf");
File appfile;
if (confsubpath.charAt(0) == '/' || confsubpath.indexOf(':') > 0) {
appfile = new File(confsubpath).getCanonicalFile();
URI appconf;
if (confsubpath.contains("://")) {
appconf = URI.create(confsubpath + (confsubpath.endsWith("/") ? "" : "/") + "application.xml");
} else if (confsubpath.charAt(0) == '/' || confsubpath.indexOf(':') > 0) {
appconf = new File(confsubpath, "application.xml").toURI();
} else {
appfile = new File(new File(home), confsubpath);
appconf = new File(new File(home, confsubpath), "application.xml").toURI();
}
File appconf = new File(appfile, "application.xml");
return new Application(singleton, load(new FileInputStream(appconf)));
return new Application(singleton, load(appconf.toURL().openStream()));
}
public static void main(String[] args) throws Exception {
Utility.midnight(); //先初始化一下Utility
Thread.currentThread().setName("Redkale-Application-Main-Thread");
//运行主程序
final Application application = Application.create(false);
if (System.getProperty("CMD") != null) {

View File

@@ -207,7 +207,9 @@ public final class ClassFilter<T> {
} catch (Throwable cfe) {
if (finest && !clazzname.startsWith("sun.") && !clazzname.startsWith("javax.")
&& !clazzname.startsWith("com.sun.") && !clazzname.startsWith("jdk.") && !clazzname.startsWith("META-INF")
&& (!(cfe instanceof NoClassDefFoundError) || (cfe instanceof UnsupportedClassVersionError) || ((NoClassDefFoundError) cfe).getMessage().startsWith("java.lang.NoClassDefFoundError: java"))) {
&& !clazzname.startsWith("com.mysql.") && !clazzname.startsWith("com.microsoft.")
&& !clazzname.startsWith("org.redkale") && (clazzname.contains("Service") || clazzname.contains("Servlet"))) {
//&& (!(cfe instanceof NoClassDefFoundError) || (cfe instanceof UnsupportedClassVersionError) || ((NoClassDefFoundError) cfe).getMessage().startsWith("java.lang.NoClassDefFoundError: java"))) {
logger.log(Level.FINEST, ClassFilter.class.getSimpleName() + " filter error for class: " + clazzname + (url == null ? "" : (" in " + url)), cfe);
}
}

View File

@@ -46,19 +46,19 @@ public class LogFileHandler extends Handler {
private static final String format = "%1$tY-%1$tm-%1$td %1$tH:%1$tM:%1$tS.%tL %4$s %2$s\r\n%5$s%6$s\r\n";
@Override
public String format(LogRecord record) {
public String format(LogRecord log) {
String source;
if (record.getSourceClassName() != null) {
source = record.getSourceClassName();
if (record.getSourceMethodName() != null) {
source += " " + record.getSourceMethodName();
if (log.getSourceClassName() != null) {
source = log.getSourceClassName();
if (log.getSourceMethodName() != null) {
source += " " + log.getSourceMethodName();
}
} else {
source = record.getLoggerName();
source = log.getLoggerName();
}
String message = formatMessage(record);
String message = formatMessage(log);
String throwable = "";
if (record.getThrown() != null) {
if (log.getThrown() != null) {
StringWriter sw = new StringWriter();
PrintWriter pw = new PrintWriter(sw) {
@Override
@@ -67,22 +67,22 @@ public class LogFileHandler extends Handler {
}
};
pw.println();
record.getThrown().printStackTrace(pw);
log.getThrown().printStackTrace(pw);
pw.close();
throwable = sw.toString();
}
return String.format(format,
System.currentTimeMillis(),
source,
record.getLoggerName(),
record.getLevel().getName(),
log.getLoggerName(),
log.getLevel().getName(),
message,
throwable);
}
}
protected final LinkedBlockingQueue<LogRecord> records = new LinkedBlockingQueue();
protected final LinkedBlockingQueue<LogRecord> logqueue = new LinkedBlockingQueue();
private String pattern;
@@ -133,7 +133,7 @@ public class LogFileHandler extends Handler {
}
private void open() {
final String name = "Logging-" + getClass().getSimpleName() + "-Thread";
final String name = "Redkale-Logging-" + getClass().getSimpleName() + "-Thread";
new Thread() {
{
setName(name);
@@ -144,9 +144,9 @@ public class LogFileHandler extends Handler {
public void run() {
while (true) {
try {
LogRecord record = records.take();
LogRecord log = logqueue.take();
final boolean bigger = (limit > 0 && limit <= loglength.get());
final boolean changeday = tomorrow <= record.getMillis();
final boolean changeday = tomorrow <= log.getMillis();
if (bigger || changeday) {
updateTomorrow();
if (logstream != null) {
@@ -193,12 +193,12 @@ public class LogFileHandler extends Handler {
logunusualstream = new FileOutputStream(logunusualfile, append);
}
//----------------------写日志-------------------------
String message = getFormatter().format(record);
String message = getFormatter().format(log);
String encoding = getEncoding();
byte[] bytes = encoding == null ? message.getBytes() : message.getBytes(encoding);
logstream.write(bytes);
loglength.addAndGet(bytes.length);
if (unusual != null && (record.getLevel() == Level.WARNING || record.getLevel() == Level.SEVERE)) {
if (unusual != null && (log.getLevel() == Level.WARNING || log.getLevel() == Level.SEVERE)) {
logunusualstream.write(bytes);
logunusuallength.addAndGet(bytes.length);
}
@@ -310,21 +310,21 @@ public class LogFileHandler extends Handler {
}
@Override
public void publish(LogRecord record) {
final String sourceClassName = record.getSourceClassName();
public void publish(LogRecord log) {
final String sourceClassName = log.getSourceClassName();
if (sourceClassName == null || true) {
StackTraceElement[] ses = new Throwable().getStackTrace();
for (int i = 2; i < ses.length; i++) {
if (ses[i].getClassName().startsWith("java.util.logging")) continue;
record.setSourceClassName('[' + Thread.currentThread().getName() + "] " + ses[i].getClassName());
record.setSourceMethodName(ses[i].getMethodName());
log.setSourceClassName('[' + Thread.currentThread().getName() + "] " + ses[i].getClassName());
log.setSourceMethodName(ses[i].getMethodName());
break;
}
} else {
record.setSourceClassName('[' + Thread.currentThread().getName() + "] " + sourceClassName);
log.setSourceClassName('[' + Thread.currentThread().getName() + "] " + sourceClassName);
}
if (denyreg != null && denyreg.matcher(record.getMessage()).find()) return;
records.offer(record);
if (denyreg != null && denyreg.matcher(log.getMessage()).find()) return;
logqueue.offer(log);
}
@Override

View File

@@ -303,7 +303,7 @@ public abstract class NodeServer {
//NodeServer.this.watchFactory.inject(src);
if (source instanceof Service && needinit) ((Service) source).init(sourceConf);
} catch (Exception e) {
logger.log(Level.SEVERE, "DataSource inject error", e);
logger.log(Level.SEVERE, "[" + Thread.currentThread().getName() + "] DataSource inject error", e);
}
}, DataSource.class);
@@ -437,7 +437,7 @@ public abstract class NodeServer {
final ResourceFactory.ResourceLoader resourceLoader = (ResourceFactory rf, final Object src, final String resourceName, Field field, final Object attachment) -> {
try {
if (SncpClient.parseMethod(serviceImplClass).isEmpty() && serviceImplClass.getAnnotation(Priority.class) == null) { //class没有可用的方法且没有标记启动优先级的 通常为BaseService
logger.log(Level.FINE, serviceImplClass + " cannot load because not found less one public non-final method");
if (!serviceImplClass.getName().startsWith("org.redkale.")) logger.log(Level.FINE, serviceImplClass + " cannot load because not found less one public non-final method");
return;
}

View File

@@ -35,7 +35,7 @@ public class FilterWatchService extends AbstractWatchService {
@Resource
protected Application application;
@RestMapping(name = "addfilter", auth = false, comment = "动态增加Filter")
@RestMapping(name = "addFilter", auth = false, comment = "动态增加Filter")
public RetResult addFilter(@RestUploadFile(maxLength = 10 * 1024 * 1024, fileNameReg = "\\.jar$") byte[] jar,
@RestParam(name = "server", comment = "Server节点名") final String serverName,
@RestParam(name = "type", comment = "Filter类名") final String filterType) throws IOException {

View File

@@ -50,7 +50,7 @@ public class ServerWatchService extends AbstractWatchService {
return new RetResult(rs);
}
@RestMapping(name = "changeaddress", comment = "更改Server的监听地址和端口")
@RestMapping(name = "changeAddress", comment = "更改Server的监听地址和端口")
public RetResult changeAddress(@RestParam(name = "#port:") final int oldport,
@RestParam(name = "#newhost:") final String newhost, @RestParam(name = "#newport:") final int newport) {
if (oldport < 1) return new RetResult(RET_WATCH_PARAMS_ILLEGAL, "not found param `oldport`");

View File

@@ -30,8 +30,8 @@ public class ServiceWatchService extends AbstractWatchService {
protected Application application;
@RestConvert(type = void.class)
@RestMapping(name = "setfield", auth = false, comment = "设置Service中指定字段的内容")
public RetResult setfield(@RestParam(name = "name", comment = "Service的资源名") String name,
@RestMapping(name = "setField", auth = false, comment = "设置Service中指定字段的内容")
public RetResult setField(@RestParam(name = "name", comment = "Service的资源名") String name,
@RestParam(name = "type", comment = "Service的类名") String type,
@RestParam(name = "field", comment = "字段名") String field,
@RestParam(name = "value", comment = "字段值") String value) {
@@ -65,8 +65,8 @@ public class ServiceWatchService extends AbstractWatchService {
}
@RestConvert(type = void.class)
@RestMapping(name = "getfield", auth = false, comment = "查询Service中指定字段的内容")
public RetResult getfield(@RestParam(name = "name", comment = "Service的资源名") String name,
@RestMapping(name = "getField", auth = false, comment = "查询Service中指定字段的内容")
public RetResult getField(@RestParam(name = "name", comment = "Service的资源名") String name,
@RestParam(name = "type", comment = "Service的类名") String type,
@RestParam(name = "field", comment = "字段名") String field) {
if (name == null) name = "";
@@ -98,8 +98,8 @@ public class ServiceWatchService extends AbstractWatchService {
}
@RestConvert(type = void.class)
@RestMapping(name = "runmethod", auth = false, comment = "调用Service中指定方法")
public RetResult runmethod(@RestParam(name = "name", comment = "Service的资源名") String name,
@RestMapping(name = "runMethod", auth = false, comment = "调用Service中指定方法")
public RetResult runMethod(@RestParam(name = "name", comment = "Service的资源名") String name,
@RestParam(name = "type", comment = "Service的类名") String type,
@RestParam(name = "method", comment = "Service的方法名") String method,
@RestParam(name = "params", comment = "方法的参数值") List<String> params,
@@ -169,28 +169,28 @@ public class ServiceWatchService extends AbstractWatchService {
return dest;
}
@RestMapping(name = "load", auth = false, comment = "动态增加Service")
@RestMapping(name = "loadService", auth = false, comment = "动态增加Service")
public RetResult loadService(@RestParam(name = "type", comment = "Service的类名") String type,
@RestUploadFile(maxLength = 10 * 1024 * 1024, fileNameReg = "\\.jar$") byte[] jar) {
//待开发
return RetResult.success();
}
@RestMapping(name = "reload", auth = false, comment = "重新加载Service")
@RestMapping(name = "reloadService", auth = false, comment = "重新加载Service")
public RetResult reloadService(@RestParam(name = "name", comment = "Service的资源名") String name,
@RestParam(name = "type", comment = "Service的类名") String type) {
//待开发
return RetResult.success();
}
@RestMapping(name = "stop", auth = false, comment = "动态停止Service")
@RestMapping(name = "stopService", auth = false, comment = "动态停止Service")
public RetResult stopService(@RestParam(name = "name", comment = "Service的资源名") String name,
@RestParam(name = "type", comment = "Service的类名") String type) {
//待开发
return RetResult.success();
}
@RestMapping(name = "find", auth = false, comment = "查找Service")
@RestMapping(name = "findService", auth = false, comment = "查找Service")
public RetResult find(@RestParam(name = "name", comment = "Service的资源名") String name,
@RestParam(name = "type", comment = "Service的类名") String type) {
//待开发

View File

@@ -25,13 +25,13 @@ public class ServletWatchService extends AbstractWatchService {
@Resource
protected TransportFactory transportFactory;
//
// @RestMapping(name = "load", auth = false, comment = "动态增加Servlet")
// @RestMapping(name = "loadServlet", auth = false, comment = "动态增加Servlet")
// public RetResult loadServlet(String type, @RestUploadFile(maxLength = 10 * 1024 * 1024, fileNameReg = "\\.jar$") byte[] jar) {
// //待开发
// return RetResult.success();
// }
//
// @RestMapping(name = "stop", auth = false, comment = "动态停止Servlet")
// @RestMapping(name = "stopServlet", auth = false, comment = "动态停止Servlet")
// public RetResult stopServlet(String type) {
// //待开发
// return RetResult.success();

View File

@@ -7,7 +7,8 @@ package org.redkale.convert;
import java.lang.reflect.Type;
import java.nio.ByteBuffer;
import java.util.function.Supplier;
import java.util.function.*;
import org.redkale.util.Attribute;
/**
* 序列化/反序列化操作类
@@ -31,6 +32,20 @@ public abstract class Convert<R extends Reader, W extends Writer> {
return this.factory;
}
protected <S extends W> S configWrite(S writer) {
return writer;
}
protected <S extends W> S fieldFunc(S writer, BiFunction<Attribute, Object, Object> objFieldFunc, Function<Object, ConvertField[]> objExtFunc) {
writer.objFieldFunc = objFieldFunc;
writer.objExtFunc = objExtFunc;
return writer;
}
public abstract Convert<R, W> newConvert(final BiFunction<Attribute, Object, Object> objFieldFunc);
public abstract Convert<R, W> newConvert(final BiFunction<Attribute, Object, Object> objFieldFunc, Function<Object, ConvertField[]> objExtFunc);
public abstract boolean isBinary();
public abstract <T> T convertFrom(final Type type, final byte[] bytes);

View File

@@ -91,8 +91,9 @@ public abstract class ConvertFactory<R extends Reader, W extends Writer> {
this.register(Number.class, NumberSimpledCoder.instance);
this.register(String.class, StringSimpledCoder.instance);
this.register(StringConvertWrapper.class, StringConvertWrapperSimpledCoder.instance);
this.register(StringWrapper.class, StringWrapperSimpledCoder.instance);
this.register(CharSequence.class, CharSequenceSimpledCoder.instance);
this.register(StringBuilder.class, CharSequenceSimpledCoder.StringBuilderSimpledCoder.instance);
this.register(java.util.Date.class, DateSimpledCoder.instance);
this.register(java.time.Duration.class, DurationSimpledCoder.instance);
this.register(AtomicInteger.class, AtomicIntegerSimpledCoder.instance);
@@ -132,6 +133,54 @@ public abstract class ConvertFactory<R extends Reader, W extends Writer> {
}
});
try {
Class sqldateClass = Class.forName("java.sql.Date");
this.register(sqldateClass, new SimpledCoder<R, W, java.sql.Date>() {
@Override
public void convertTo(W out, java.sql.Date value) {
out.writeSmallString(value == null ? null : value.toString());
}
@Override
public java.sql.Date convertFrom(R in) {
String t = in.readSmallString();
return t == null ? null : java.sql.Date.valueOf(t);
}
});
Class sqltimeClass = Class.forName("java.sql.Time");
this.register(sqltimeClass, new SimpledCoder<R, W, java.sql.Time>() {
@Override
public void convertTo(W out, java.sql.Time value) {
out.writeSmallString(value == null ? null : value.toString());
}
@Override
public java.sql.Time convertFrom(R in) {
String t = in.readSmallString();
return t == null ? null : java.sql.Time.valueOf(t);
}
});
Class timestampClass = Class.forName("java.sql.Timestamp");
this.register(timestampClass, new SimpledCoder<R, W, java.sql.Timestamp>() {
@Override
public void convertTo(W out, java.sql.Timestamp value) {
out.writeSmallString(value == null ? null : value.toString());
}
@Override
public java.sql.Timestamp convertFrom(R in) {
String t = in.readSmallString();
return t == null ? null : java.sql.Timestamp.valueOf(t);
}
});
} catch (Throwable t) {
}
}
}
@@ -433,25 +482,32 @@ public abstract class ConvertFactory<R extends Reader, W extends Writer> {
public final boolean register(final Class type, String column, ConvertColumnEntry entry) {
if (type == null || column == null || entry == null) return false;
Field field = null;
try {
final Field field = type.getDeclaredField(column);
String get = "get";
if (field.getType() == boolean.class || field.getType() == Boolean.class) get = "is";
char[] cols = column.toCharArray();
cols[0] = Character.toUpperCase(cols[0]);
String col2 = new String(cols);
try {
register(type.getMethod(get + col2), entry);
} catch (Exception ex) {
}
try {
register(type.getMethod("set" + col2, field.getType()), entry);
} catch (Exception ex) {
}
return register(field, entry);
field = type.getDeclaredField(column);
} catch (Exception e) {
return false;
}
String get = "get";
if (field != null && (field.getType() == boolean.class || field.getType() == Boolean.class)) get = "is";
char[] cols = column.toCharArray();
cols[0] = Character.toUpperCase(cols[0]);
final String bigColumn = new String(cols);
try {
register(type.getMethod(get + bigColumn), entry);
} catch (NoSuchMethodException mex) {
if (get.length() >= 3) { //get
try {
register(type.getMethod("is" + bigColumn), entry);
} catch (Exception ex) {
}
}
} catch (Exception ex) {
}
try {
register(type.getMethod("set" + bigColumn, field.getType()), entry);
} catch (Exception ex) {
}
return field == null ? true : register(field, entry);
}
public final <E> boolean register(final AccessibleObject field, final ConvertColumnEntry entry) {
@@ -703,7 +759,8 @@ public abstract class ConvertFactory<R extends Reader, W extends Writer> {
encoder = new OptionalCoder(this, type);
} else if (clazz == Object.class) {
return (Encodeable<W, E>) this.anyEncoder;
} else if (!clazz.getName().startsWith("java.") || java.net.HttpCookie.class == clazz || java.util.AbstractMap.SimpleEntry.class == clazz) {
} else if (!clazz.getName().startsWith("java.") || java.net.HttpCookie.class == clazz
|| java.util.Map.Entry.class == clazz || java.util.AbstractMap.SimpleEntry.class == clazz) {
Encodeable simpleCoder = null;
for (final Method method : clazz.getDeclaredMethods()) {
if (!Modifier.isStatic(method.getModifiers())) continue;

View File

@@ -0,0 +1,102 @@
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package org.redkale.convert;
import java.io.Serializable;
import java.lang.reflect.Type;
import org.redkale.convert.json.JsonConvert;
/**
* newConvert参数中的Function返回结果的数据类
*
* <p>
* 详情见: https://redkale.org
*
* @author zhangjx
*/
public class ConvertField implements Serializable {
protected String name;
protected Type type;
protected int position;
protected Object value;
public ConvertField() {
}
public ConvertField(String name, Object value) {
this.name = name;
this.value = value;
}
public ConvertField(String name, int position, Object value) {
this.name = name;
this.position = position;
this.value = value;
}
public ConvertField(String name, Type type, Object value) {
this.name = name;
this.type = type;
this.value = value;
}
public ConvertField(String name, Type type, int position, Object value) {
this.name = name;
this.type = type;
this.position = position;
this.value = value;
}
public static ConvertField[] ofArray(Object... items) {
int len = items.length / 2;
ConvertField[] rs = new ConvertField[len];
for (int i = 0; i < len; i++) {
rs[i] = new ConvertField(items[i * 2].toString(), items[i * 2 + 1]);
}
return rs;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public Type getType() {
return type;
}
public void setType(Type type) {
this.type = type;
}
public int getPosition() {
return position;
}
public void setPosition(int position) {
this.position = position;
}
public Object getValue() {
return value;
}
public void setValue(Object value) {
this.value = value;
}
@Override
public String toString() {
return JsonConvert.root().convertTo(this);
}
}

View File

@@ -164,9 +164,22 @@ public class ObjectEncoder<W extends Writer, T> implements Encodeable<W, T> {
return;
}
if (out.writeObjectB(value) < 0) {
int maxPosition = 0;
for (EnMember member : members) {
maxPosition = member.getPosition();
out.writeObjectField(member, value);
}
if (out.objExtFunc != null) {
ConvertField[] extFields = out.objExtFunc.apply(value);
if (extFields != null) {
Encodeable<W, ?> anyEncoder = factory.getAnyEncoder();
for (ConvertField en : extFields) {
if (en == null) continue;
maxPosition++;
out.writeObjectField(en.getName(), en.getType(), Math.max(en.getPosition(), maxPosition), anyEncoder, en.getValue());
}
}
}
}
out.writeObjectE(value);
}

View File

@@ -1,63 +0,0 @@
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package org.redkale.convert;
import org.redkale.convert.json.JsonConvert;
/**
* 序列化去掉引号的String对象。
* <blockquote><pre>
* 场景: JavaBean bean = ... ;
* Map map = new HashMap();
* map.put("bean", a);
* records.add(map);
* records需要在后期序列化写入库。 但是在这期间bean的内部字段值可能就变化了会导致入库时并不是records.add的快照信息。
* 所以需要使用StringConvertWrapper
* Map map = new HashMap();
* map.put("bean", new StringConvertWrapper(bean.toString()));
* records.add(map);
* 这样既可以保持快照又不会在bean的值上面多一层引号。
* </pre></blockquote>
* <p>
* 详情见: https://redkale.org
*
* @author zhangjx
*/
public class StringConvertWrapper {
protected String value;
public StringConvertWrapper() {
}
public StringConvertWrapper(String value) {
this.value = value;
}
public static StringConvertWrapper create(Object value) {
return create(JsonConvert.root(), value);
}
public static StringConvertWrapper create(TextConvert convert, Object value) {
if (value == null) return new StringConvertWrapper(null);
if (value instanceof String) return new StringConvertWrapper((String) value);
return new StringConvertWrapper(convert.convertTo(value));
}
public String getValue() {
return value;
}
public void setValue(String value) {
this.value = value;
}
@Override
public String toString() {
return value;
}
}

View File

@@ -6,6 +6,8 @@
package org.redkale.convert;
import java.lang.reflect.*;
import java.util.function.*;
import org.redkale.util.*;
/**
* 序列化的数据输出流
@@ -23,6 +25,12 @@ public abstract class Writer {
//convertTo时是否以指定Type的ObjectEncoder进行处理
protected Type specify;
//对某个字段值进行动态处理
protected BiFunction<Attribute, Object, Object> objFieldFunc;
//对某个对象进行动态扩展字段值处理
protected Function<Object, ConvertField[]> objExtFunc;
/**
* 设置specify
*
@@ -38,6 +46,11 @@ public abstract class Writer {
}
}
protected boolean recycle() {
this.objFieldFunc = null;
return true;
}
/**
* 返回specify
*
@@ -105,7 +118,12 @@ public abstract class Writer {
*/
@SuppressWarnings("unchecked")
public void writeObjectField(final EnMember member, Object obj) {
Object value = member.attribute.get(obj);
Object value;
if (objFieldFunc == null) {
value = member.attribute.get(obj);
} else {
value = objFieldFunc.apply(member.attribute, obj);
}
if (value == null) return;
if (tiny()) {
if (member.istring) {
@@ -114,11 +132,49 @@ public abstract class Writer {
if (!((Boolean) value)) return;
}
}
this.writeFieldName(member);
Attribute attr = member.getAttribute();
this.writeFieldName(attr.field(), attr.genericType(), member.getPosition());
member.encoder.convertTo(this, value);
this.comma = true;
}
/**
* 输出一个对象的某个扩展字段
*
*
* @param fieldName 字段名称
* @param fieldType 字段类型
* @param fieldPos 字段顺序
* @param anyEncoder Encoder
* @param value 写入的字段对象
*/
@SuppressWarnings("unchecked")
public void writeObjectField(final String fieldName, Type fieldType, int fieldPos, Encodeable anyEncoder, Object value) {
if (value == null) return;
if (fieldType == null) fieldType = value.getClass();
if (tiny() && fieldType instanceof Class) {
Class clazz = (Class) fieldType;
if (CharSequence.class.isAssignableFrom(clazz)) {
if (((CharSequence) value).length() == 0) return;
} else if (clazz == boolean.class || clazz == Boolean.class) {
if (!((Boolean) value)) return;
}
}
this.writeFieldName(fieldName, fieldType, fieldPos);
anyEncoder.convertTo(this, value);
this.comma = true;
}
/**
* 输出一个字段名
*
* @param member 字段
*/
public final void writeFieldName(final EnMember member) {
Attribute attr = member.getAttribute();
this.writeFieldName(attr.field(), attr.genericType(), member.getPosition());
}
/**
* 输出一个对象后的操作
*
@@ -176,9 +232,11 @@ public abstract class Writer {
/**
* 输出一个字段名
*
* @param member 字段的EnMember对象
* @param fieldName 字段名称
* @param fieldType 字段类型
* @param fieldPos 字段顺序
*/
public abstract void writeFieldName(EnMember member);
public abstract void writeFieldName(String fieldName, Type fieldType, int fieldPos);
/**
* 写入一个boolean值
@@ -262,5 +320,5 @@ public abstract class Writer {
*
* @param value StringConvertWrapper值
*/
public abstract void writeWrapper(StringConvertWrapper value);
public abstract void writeWrapper(StringWrapper value);
}

View File

@@ -131,6 +131,7 @@ public class BsonByteBufferWriter extends BsonWriter {
@Override
protected boolean recycle() {
super.recycle();
this.index = 0;
this.specify = null;
this.buffers = null;

View File

@@ -37,7 +37,7 @@ import org.redkale.util.*;
*
* @author zhangjx
*/
public final class BsonConvert extends BinaryConvert<BsonReader, BsonWriter> {
public class BsonConvert extends BinaryConvert<BsonReader, BsonWriter> {
private static final ObjectPool<BsonReader> readerPool = BsonReader.createPool(Integer.getInteger("convert.bson.pool.size", 16));
@@ -59,6 +59,21 @@ public final class BsonConvert extends BinaryConvert<BsonReader, BsonWriter> {
return BsonFactory.root().getConvert();
}
@Override
public BsonConvert newConvert(final BiFunction<Attribute, Object, Object> fieldFunc) {
return newConvert(fieldFunc, null);
}
@Override
public BsonConvert newConvert(final BiFunction<Attribute, Object, Object> fieldFunc, Function<Object, ConvertField[]> objExtFunc) {
return new BsonConvert(getFactory(), tiny) {
@Override
protected <S extends BsonWriter> S configWrite(S writer) {
return fieldFunc(writer, fieldFunc, objExtFunc);
}
};
}
//------------------------------ reader -----------------------------------------------------------
public BsonReader pollBsonReader(final ByteBuffer... buffers) {
return new BsonByteBufferReader((ConvertMask) null, buffers);
@@ -78,11 +93,11 @@ public final class BsonConvert extends BinaryConvert<BsonReader, BsonWriter> {
//------------------------------ writer -----------------------------------------------------------
public BsonByteBufferWriter pollBsonWriter(final Supplier<ByteBuffer> supplier) {
return new BsonByteBufferWriter(tiny, supplier);
return configWrite(new BsonByteBufferWriter(tiny, supplier));
}
public BsonWriter pollBsonWriter(final OutputStream out) {
return new BsonStreamWriter(tiny, out);
return configWrite(new BsonStreamWriter(tiny, out));
}
public BsonWriter pollBsonWriter() {
@@ -94,6 +109,7 @@ public final class BsonConvert extends BinaryConvert<BsonReader, BsonWriter> {
}
//------------------------------ convertFrom -----------------------------------------------------------
@Override
public <T> T convertFrom(final Type type, final byte[] bytes) {
if (bytes == null) return null;
return convertFrom(type, bytes, 0, bytes.length);
@@ -173,33 +189,33 @@ public final class BsonConvert extends BinaryConvert<BsonReader, BsonWriter> {
public void convertTo(final OutputStream out, final Object value) {
if (value == null) {
new BsonStreamWriter(tiny, out).writeNull();
pollBsonWriter(out).writeNull();
} else {
factory.loadEncoder(value.getClass()).convertTo(new BsonStreamWriter(tiny, out), value);
factory.loadEncoder(value.getClass()).convertTo(pollBsonWriter(out), value);
}
}
public void convertTo(final OutputStream out, final Type type, final Object value) {
if (type == null) return;
if (value == null) {
new BsonStreamWriter(tiny, out).writeNull();
pollBsonWriter(out).writeNull();
} else {
factory.loadEncoder(type).convertTo(new BsonStreamWriter(tiny, out), value);
factory.loadEncoder(type).convertTo(pollBsonWriter(out), value);
}
}
public void convertMapTo(final OutputStream out, final Object... values) {
if (values == null) {
new BsonStreamWriter(tiny, out).writeNull();
pollBsonWriter(out).writeNull();
} else {
((AnyEncoder) factory.getAnyEncoder()).convertMapTo(new BsonStreamWriter(tiny, out), values);
((AnyEncoder) factory.getAnyEncoder()).convertMapTo(pollBsonWriter(out), values);
}
}
@Override
public ByteBuffer[] convertTo(final Supplier<ByteBuffer> supplier, final Object value) {
if (supplier == null) return null;
BsonByteBufferWriter out = new BsonByteBufferWriter(tiny, supplier);
BsonByteBufferWriter out = pollBsonWriter(supplier);
if (value == null) {
out.writeNull();
} else {
@@ -211,7 +227,7 @@ public final class BsonConvert extends BinaryConvert<BsonReader, BsonWriter> {
@Override
public ByteBuffer[] convertTo(final Supplier<ByteBuffer> supplier, final Type type, final Object value) {
if (supplier == null || type == null) return null;
BsonByteBufferWriter out = new BsonByteBufferWriter(tiny, supplier);
BsonByteBufferWriter out = pollBsonWriter(supplier);
if (value == null) {
out.writeNull();
} else {
@@ -223,7 +239,7 @@ public final class BsonConvert extends BinaryConvert<BsonReader, BsonWriter> {
@Override
public ByteBuffer[] convertMapTo(final Supplier<ByteBuffer> supplier, final Object... values) {
if (supplier == null) return null;
BsonByteBufferWriter out = new BsonByteBufferWriter(tiny, supplier);
BsonByteBufferWriter out = pollBsonWriter(supplier);
if (values == null) {
out.writeNull();
} else {

View File

@@ -5,6 +5,7 @@
*/
package org.redkale.convert.bson;
import java.lang.reflect.Type;
import java.nio.ByteBuffer;
import org.redkale.convert.*;
import org.redkale.convert.ext.ByteSimpledCoder;
@@ -43,7 +44,7 @@ public class BsonWriter extends Writer {
}
protected BsonWriter(byte[] bs) {
this.content = bs;
this.content = bs == null ? new byte[0] : bs;
}
public BsonWriter() {
@@ -97,10 +98,12 @@ public class BsonWriter extends Writer {
count += len;
}
@Override
protected boolean recycle() {
super.recycle();
this.count = 0;
this.specify = null;
if (this.content.length > defaultSize) {
if (this.content != null && this.content.length > defaultSize) {
this.content = new byte[defaultSize];
}
return true;
@@ -198,11 +201,10 @@ public class BsonWriter extends Writer {
}
@Override
public final void writeFieldName(EnMember member) {
Attribute attribute = member.getAttribute();
public final void writeFieldName(String fieldName, Type fieldType, int fieldPos) {
writeByte(BsonReader.SIGN_HASNEXT);
writeSmallString(attribute.field());
writeByte(BsonFactory.typeEnum(attribute.type()));
writeSmallString(fieldName);
writeByte(BsonFactory.typeEnum(fieldType));
}
/**
@@ -242,7 +244,7 @@ public class BsonWriter extends Writer {
}
@Override
public final void writeWrapper(StringConvertWrapper value) {
public final void writeWrapper(StringWrapper value) {
this.writeString(value == null ? null : value.getValue());
}

View File

@@ -8,9 +8,11 @@ package org.redkale.convert.ext;
import org.redkale.convert.*;
/**
* CharSequence 的SimpledCoder实现
* CharSequence 的SimpledCoder实现
*
* <p>
* 详情见: https://redkale.org
*
* <p> 详情见: https://redkale.org
* @author zhangjx
* @param <R> Reader输入的子类型
* @param <W> Writer输出的子类型
@@ -28,4 +30,20 @@ public class CharSequenceSimpledCoder<R extends Reader, W extends Writer> extend
public CharSequence convertFrom(R in) {
return in.readString();
}
public static class StringBuilderSimpledCoder<R extends Reader, W extends Writer> extends SimpledCoder<R, W, StringBuilder> {
public static final StringBuilderSimpledCoder instance = new StringBuilderSimpledCoder();
@Override
public void convertTo(W out, StringBuilder value) {
out.writeString(value == null ? null : value.toString());
}
@Override
public StringBuilder convertFrom(R in) {
String rs = in.readString();
return rs == null ? null : new StringBuilder(rs);
}
}
}

View File

@@ -20,7 +20,7 @@ import org.redkale.convert.*;
*/
public class FileSimpledCoder<R extends Reader, W extends Writer> extends SimpledCoder<R, W, File> {
public static final PatternSimpledCoder instance = new PatternSimpledCoder();
public static final FileSimpledCoder instance = new FileSimpledCoder();
@Override
public void convertTo(W out, File value) {

View File

@@ -6,6 +6,7 @@
package org.redkale.convert.ext;
import org.redkale.convert.*;
import org.redkale.util.StringWrapper;
/**
* String 的SimpledCoder实现
@@ -17,18 +18,18 @@ import org.redkale.convert.*;
* @param <R> Reader输入的子类型
* @param <W> Writer输出的子类型
*/
public final class StringConvertWrapperSimpledCoder<R extends Reader, W extends Writer> extends SimpledCoder<R, W, StringConvertWrapper> {
public final class StringWrapperSimpledCoder<R extends Reader, W extends Writer> extends SimpledCoder<R, W, StringWrapper> {
public static final StringConvertWrapperSimpledCoder instance = new StringConvertWrapperSimpledCoder();
public static final StringWrapperSimpledCoder instance = new StringWrapperSimpledCoder();
@Override
public void convertTo(W out, StringConvertWrapper value) {
public void convertTo(W out, StringWrapper value) {
out.writeWrapper(value);
}
@Override
public StringConvertWrapper convertFrom(R in) {
return new StringConvertWrapper(in.readString());
public StringWrapper convertFrom(R in) {
return new StringWrapper(in.readString());
}
}

View File

@@ -48,6 +48,7 @@ public class JsonByteBufferWriter extends JsonWriter {
@Override
protected boolean recycle() {
super.recycle();
this.index = 0;
this.specify = null;
this.charset = null;
@@ -114,7 +115,7 @@ public class JsonByteBufferWriter extends JsonWriter {
int byteLength = quote ? 2 : 0;
ByteBuffer bb = null;
if (charset == null) {
byteLength += encodeUTF8Length(chs, start, len);
byteLength += Utility.encodeUTF8Length(chs, start, len);
} else {
bb = charset.encode(CharBuffer.wrap(chs, start, len));
byteLength += bb.remaining();
@@ -133,6 +134,13 @@ public class JsonByteBufferWriter extends JsonWriter {
} else if (c < 0x800) {
buffer.put((byte) (0xc0 | (c >> 6)));
buffer.put((byte) (0x80 | (c & 0x3f)));
} else if (Character.isSurrogate(c)) { //连取两个
int uc = Character.toCodePoint(c, chs[i + 1]);
buffer.put((byte) (0xf0 | ((uc >> 18))));
buffer.put((byte) (0x80 | ((uc >> 12) & 0x3f)));
buffer.put((byte) (0x80 | ((uc >> 6) & 0x3f)));
buffer.put((byte) (0x80 | (uc & 0x3f)));
i++;
} else {
buffer.put((byte) (0xe0 | ((c >> 12))));
buffer.put((byte) (0x80 | ((c >> 6) & 0x3f)));
@@ -154,7 +162,34 @@ public class JsonByteBufferWriter extends JsonWriter {
if (charset == null) { //UTF-8
final int limit = start + len;
for (int i = start; i < limit; i++) {
buffer = putUTF8Char(buffer, chs[i]);
char c = chs[i];
if (c < 0x80) {
if (!buffer.hasRemaining()) buffer = nextByteBuffer();
buffer.put((byte) c);
} else if (c < 0x800) {
if (!buffer.hasRemaining()) buffer = nextByteBuffer();
buffer.put((byte) (0xc0 | (c >> 6)));
if (!buffer.hasRemaining()) buffer = nextByteBuffer();
buffer.put((byte) (0x80 | (c & 0x3f)));
} else if (Character.isSurrogate(c)) { //连取两个
int uc = Character.toCodePoint(c, chs[i + 1]);
if (!buffer.hasRemaining()) buffer = nextByteBuffer();
buffer.put((byte) (0xf0 | ((uc >> 18))));
if (!buffer.hasRemaining()) buffer = nextByteBuffer();
buffer.put((byte) (0x80 | ((uc >> 12) & 0x3f)));
if (!buffer.hasRemaining()) buffer = nextByteBuffer();
buffer.put((byte) (0x80 | ((uc >> 6) & 0x3f)));
if (!buffer.hasRemaining()) buffer = nextByteBuffer();
buffer.put((byte) (0x80 | (uc & 0x3f)));
i++;
} else {
if (!buffer.hasRemaining()) buffer = nextByteBuffer();
buffer.put((byte) (0xe0 | ((c >> 12))));
if (!buffer.hasRemaining()) buffer = nextByteBuffer();
buffer.put((byte) (0x80 | ((c >> 6) & 0x3f)));
if (!buffer.hasRemaining()) buffer = nextByteBuffer();
buffer.put((byte) (0x80 | (c & 0x3f)));
}
}
} else {
while (bb.hasRemaining()) {
@@ -168,50 +203,18 @@ public class JsonByteBufferWriter extends JsonWriter {
}
}
private ByteBuffer putUTF8Char(ByteBuffer buffer, char c) {
if (c < 0x80) {
if (!buffer.hasRemaining()) buffer = nextByteBuffer();
buffer.put((byte) c);
} else if (c < 0x800) {
if (!buffer.hasRemaining()) buffer = nextByteBuffer();
buffer.put((byte) (0xc0 | (c >> 6)));
if (!buffer.hasRemaining()) buffer = nextByteBuffer();
buffer.put((byte) (0x80 | (c & 0x3f)));
} else {
if (!buffer.hasRemaining()) buffer = nextByteBuffer();
buffer.put((byte) (0xe0 | ((c >> 12))));
if (!buffer.hasRemaining()) buffer = nextByteBuffer();
buffer.put((byte) (0x80 | ((c >> 6) & 0x3f)));
if (!buffer.hasRemaining()) buffer = nextByteBuffer();
buffer.put((byte) (0x80 | (c & 0x3f)));
}
return buffer;
}
private ByteBuffer nextByteBuffer() {
this.buffers[this.index].flip();
return this.buffers[++this.index];
}
protected static int encodeUTF8Length(final char[] text, final int start, final int len) {
char c;
int size = 0;
final char[] chars = text;
final int limit = start + len;
for (int i = start; i < limit; i++) {
c = chars[i];
size += (c < 0x80 ? 1 : (c < 0x800 ? 2 : 3));
}
return size;
}
protected static int encodeEscapeUTF8Length(final char[] text, final int start, final int len) {
char c;
int size = 0;
final char[] chars = text;
final char[] chs = text;
final int limit = start + len;
for (int i = start; i < limit; i++) {
c = chars[i];
c = chs[i];
switch (c) {
case '\n': size += 2;
break;
@@ -224,7 +227,7 @@ public class JsonByteBufferWriter extends JsonWriter {
case '"': size += 2;
break;
default:
size += (c < 0x80 ? 1 : (c < 0x800 ? 2 : 3));
size += (c < 0x80 ? 1 : (c < 0x800 || Character.isSurrogate(c) ? 2 : 3));
break;
}
}
@@ -238,19 +241,39 @@ public class JsonByteBufferWriter extends JsonWriter {
* @param value String值
*/
@Override
public void writeTo(final boolean quote, final String value) {
char[] chs = Utility.charArray(value);
writeTo(-1, quote, chs, 0, chs.length);
public void writeLatin1To(final boolean quote, final String value) {
byte[] bs = Utility.byteArray(value);
int expandsize = expand(bs.length + (quote ? 2 : 0));
if (expandsize == 0) {// 只需要一个buffer
final ByteBuffer buffer = this.buffers[index];
if (quote) buffer.put((byte) '"');
buffer.put(bs);
if (quote) buffer.put((byte) '"');
} else {
ByteBuffer buffer = this.buffers[index];
if (quote) {
if (!buffer.hasRemaining()) buffer = nextByteBuffer();
buffer.put((byte) '"');
}
for (byte b : bs) {
if (!buffer.hasRemaining()) buffer = nextByteBuffer();
buffer.put(b);
}
if (quote) {
if (!buffer.hasRemaining()) buffer = nextByteBuffer();
buffer.put((byte) '"');
}
}
}
@Override
public void writeInt(int value) {
writeTo(false, String.valueOf(value));
writeLatin1To(false, String.valueOf(value));
}
@Override
public void writeLong(long value) {
writeTo(false, String.valueOf(value));
writeLatin1To(false, String.valueOf(value));
}
@Override
@@ -288,7 +311,8 @@ public class JsonByteBufferWriter extends JsonWriter {
if (expandsize == 0) { // 只需要一个buffer
final ByteBuffer buffer = this.buffers[index];
buffer.put((byte) '"');
for (char c : chs) {
for (int i = 0; i < chs.length; i++) {
char c = chs[i];
switch (c) {
case '\n': buffer.put((byte) '\\').put((byte) 'n');
break;
@@ -306,6 +330,13 @@ public class JsonByteBufferWriter extends JsonWriter {
} else if (c < 0x800) {
buffer.put((byte) (0xc0 | (c >> 6)));
buffer.put((byte) (0x80 | (c & 0x3f)));
} else if (Character.isSurrogate(c)) { //连取两个
int uc = Character.toCodePoint(c, chs[i + 1]);
buffer.put((byte) (0xf0 | ((uc >> 18))));
buffer.put((byte) (0x80 | ((uc >> 12) & 0x3f)));
buffer.put((byte) (0x80 | ((uc >> 6) & 0x3f)));
buffer.put((byte) (0x80 | (uc & 0x3f)));
i++;
} else {
buffer.put((byte) (0xe0 | ((c >> 12))));
buffer.put((byte) (0x80 | ((c >> 6) & 0x3f)));

View File

@@ -21,7 +21,7 @@ import org.redkale.util.*;
* @author zhangjx
*/
@SuppressWarnings("unchecked")
public final class JsonConvert extends TextConvert<JsonReader, JsonWriter> {
public class JsonConvert extends TextConvert<JsonReader, JsonWriter> {
public static final Type TYPE_MAP_STRING_STRING = new TypeToken<java.util.HashMap<String, String>>() {
}.getType();
@@ -46,6 +46,21 @@ public final class JsonConvert extends TextConvert<JsonReader, JsonWriter> {
return JsonFactory.root().getConvert();
}
@Override
public JsonConvert newConvert(final BiFunction<Attribute, Object, Object> fieldFunc) {
return newConvert(fieldFunc, null);
}
@Override
public JsonConvert newConvert(final BiFunction<Attribute, Object, Object> fieldFunc, Function<Object, ConvertField[]> objExtFunc) {
return new JsonConvert(getFactory(), tiny) {
@Override
protected <S extends JsonWriter> S configWrite(S writer) {
return fieldFunc(writer, fieldFunc, objExtFunc);
}
};
}
//------------------------------ reader -----------------------------------------------------------
public JsonReader pollJsonReader(final ByteBuffer... buffers) {
return new JsonByteBufferReader((ConvertMask) null, buffers);
@@ -65,19 +80,19 @@ public final class JsonConvert extends TextConvert<JsonReader, JsonWriter> {
//------------------------------ writer -----------------------------------------------------------
public JsonByteBufferWriter pollJsonWriter(final Supplier<ByteBuffer> supplier) {
return new JsonByteBufferWriter(tiny, supplier);
return configWrite(new JsonByteBufferWriter(tiny, supplier));
}
public JsonWriter pollJsonWriter(final OutputStream out) {
return new JsonStreamWriter(tiny, out);
return configWrite(new JsonStreamWriter(tiny, out));
}
public JsonWriter pollJsonWriter(final Charset charset, final OutputStream out) {
return new JsonStreamWriter(tiny, charset, out);
return configWrite(new JsonStreamWriter(tiny, charset, out));
}
public JsonWriter pollJsonWriter() {
return writerPool.get().tiny(tiny);
return configWrite(writerPool.get().tiny(tiny));
}
public void offerJsonWriter(final JsonWriter writer) {
@@ -85,6 +100,7 @@ public final class JsonConvert extends TextConvert<JsonReader, JsonWriter> {
}
//------------------------------ convertFrom -----------------------------------------------------------
@Override
public <T> T convertFrom(final Type type, final byte[] bytes) {
if (bytes == null) return null;
return convertFrom(type, new String(bytes, StandardCharsets.UTF_8));
@@ -190,7 +206,7 @@ public final class JsonConvert extends TextConvert<JsonReader, JsonWriter> {
public String convertTo(final Type type, final Object value) {
if (type == null) return null;
if (value == null) return "null";
final JsonWriter writer = writerPool.get().tiny(tiny);
final JsonWriter writer = pollJsonWriter();
writer.specify(type);
factory.loadEncoder(type).convertTo(writer, value);
String result = writer.toString();
@@ -201,7 +217,7 @@ public final class JsonConvert extends TextConvert<JsonReader, JsonWriter> {
@Override
public String convertMapTo(final Object... values) {
if (values == null) return "null";
final JsonWriter writer = writerPool.get().tiny(tiny);
final JsonWriter writer = pollJsonWriter();
((AnyEncoder) factory.getAnyEncoder()).convertMapTo(writer, values);
String result = writer.toString();
writerPool.accept(writer);
@@ -210,7 +226,7 @@ public final class JsonConvert extends TextConvert<JsonReader, JsonWriter> {
public void convertTo(final OutputStream out, final Object value) {
if (value == null) {
new JsonStreamWriter(tiny, out).writeNull();
pollJsonWriter(out).writeNull();
} else {
convertTo(out, value.getClass(), value);
}
@@ -219,9 +235,9 @@ public final class JsonConvert extends TextConvert<JsonReader, JsonWriter> {
public void convertTo(final OutputStream out, final Type type, final Object value) {
if (type == null) return;
if (value == null) {
new JsonStreamWriter(tiny, out).writeNull();
pollJsonWriter(out).writeNull();
} else {
final JsonWriter writer = writerPool.get().tiny(tiny);
final JsonWriter writer = pollJsonWriter();
writer.specify(type);
factory.loadEncoder(type).convertTo(writer, value);
byte[] bs = writer.toBytes();
@@ -236,9 +252,9 @@ public final class JsonConvert extends TextConvert<JsonReader, JsonWriter> {
public void convertMapTo(final OutputStream out, final Object... values) {
if (values == null) {
new JsonStreamWriter(tiny, out).writeNull();
pollJsonWriter(out).writeNull();
} else {
final JsonWriter writer = writerPool.get().tiny(tiny);
final JsonWriter writer = pollJsonWriter();
((AnyEncoder) factory.getAnyEncoder()).convertMapTo(writer, values);
byte[] bs = writer.toBytes();
writerPool.accept(writer);
@@ -253,7 +269,7 @@ public final class JsonConvert extends TextConvert<JsonReader, JsonWriter> {
@Override
public ByteBuffer[] convertTo(final Supplier<ByteBuffer> supplier, final Object value) {
if (supplier == null) return null;
JsonByteBufferWriter out = new JsonByteBufferWriter(tiny, null, supplier);
JsonByteBufferWriter out = pollJsonWriter(supplier);
if (value == null) {
out.writeNull();
} else {
@@ -265,7 +281,7 @@ public final class JsonConvert extends TextConvert<JsonReader, JsonWriter> {
@Override
public ByteBuffer[] convertTo(final Supplier<ByteBuffer> supplier, final Type type, final Object value) {
if (supplier == null || type == null) return null;
JsonByteBufferWriter out = new JsonByteBufferWriter(tiny, null, supplier);
JsonByteBufferWriter out = pollJsonWriter(supplier);
if (value == null) {
out.writeNull();
} else {
@@ -278,7 +294,7 @@ public final class JsonConvert extends TextConvert<JsonReader, JsonWriter> {
@Override
public ByteBuffer[] convertMapTo(final Supplier<ByteBuffer> supplier, final Object... values) {
if (supplier == null) return null;
JsonByteBufferWriter out = new JsonByteBufferWriter(tiny, null, supplier);
JsonByteBufferWriter out = pollJsonWriter(supplier);
if (values == null) {
out.writeNull();
} else {
@@ -320,14 +336,14 @@ public final class JsonConvert extends TextConvert<JsonReader, JsonWriter> {
public JsonWriter convertToWriter(final Type type, final Object value) {
if (type == null) return null;
final JsonWriter writer = writerPool.get().tiny(tiny);
final JsonWriter writer = pollJsonWriter();
writer.specify(type);
factory.loadEncoder(type).convertTo(writer, value);
return writer;
}
public JsonWriter convertMapToWriter(final Object... values) {
final JsonWriter writer = writerPool.get().tiny(tiny);
final JsonWriter writer = pollJsonWriter();
((AnyEncoder) factory.getAnyEncoder()).convertMapTo(writer, values);
return writer;
}

View File

@@ -26,10 +26,7 @@ public final class JsonFactory extends ConvertFactory<JsonReader, JsonWriter> {
private static final JsonFactory instance = new JsonFactory(null, Boolean.getBoolean("convert.json.tiny"));
static {
instance.register(InetAddress.class, InetAddressSimpledCoder.InetAddressJsonSimpledCoder.instance);
instance.register(InetSocketAddress.class, InetAddressSimpledCoder.InetSocketAddressJsonSimpledCoder.instance);
instance.register(DLong.class, DLongSimpledCoder.DLongJsonSimpledCoder.instance);
instance.register(BigInteger.class, BigIntegerSimpledCoder.BigIntegerJsonSimpledCoder.instance);
instance.register(Serializable.class, instance.loadEncoder(Object.class));
instance.register(AnyValue.class, instance.loadDecoder(AnyValue.DefaultAnyValue.class));
@@ -38,6 +35,12 @@ public final class JsonFactory extends ConvertFactory<JsonReader, JsonWriter> {
private JsonFactory(JsonFactory parent, boolean tiny) {
super(parent, tiny);
if (parent == null) {
this.register(InetAddress.class, InetAddressSimpledCoder.InetAddressJsonSimpledCoder.instance);
this.register(InetSocketAddress.class, InetAddressSimpledCoder.InetSocketAddressJsonSimpledCoder.instance);
this.register(DLong.class, DLongSimpledCoder.DLongJsonSimpledCoder.instance);
this.register(BigInteger.class, BigIntegerSimpledCoder.BigIntegerJsonSimpledCoder.instance);
}
}
@Override

View File

@@ -87,19 +87,19 @@ class JsonStreamWriter extends JsonByteBufferWriter {
* @param value String值
*/
@Override
public void writeTo(final boolean quote, final String value) {
public void writeLatin1To(final boolean quote, final String value) {
char[] chs = Utility.charArray(value);
writeTo(quote, chs, 0, chs.length);
}
@Override
public void writeInt(int value) {
writeTo(false, String.valueOf(value));
writeLatin1To(false, String.valueOf(value));
}
@Override
public void writeLong(long value) {
writeTo(false, String.valueOf(value));
writeLatin1To(false, String.valueOf(value));
}
@Override

View File

@@ -5,6 +5,7 @@
*/
package org.redkale.convert.json;
import java.lang.reflect.Type;
import java.nio.ByteBuffer;
import org.redkale.convert.*;
import org.redkale.util.*;
@@ -88,7 +89,7 @@ public class JsonWriter extends Writer {
* @param quote 是否加双引号
* @param value 非null且不含需要转义的字符的String值
*/
public void writeTo(final boolean quote, final String value) {
public void writeLatin1To(final boolean quote, final String value) {
int len = value.length();
expand(len + (quote ? 2 : 0));
if (quote) content[count++] = '"';
@@ -97,10 +98,12 @@ public class JsonWriter extends Writer {
if (quote) content[count++] = '"';
}
@Override
protected boolean recycle() {
super.recycle();
this.count = 0;
this.specify = null;
if (this.content.length > defaultSize) {
if (this.content != null && this.content.length > defaultSize) {
this.content = new char[defaultSize];
}
return true;
@@ -157,15 +160,15 @@ public class JsonWriter extends Writer {
}
@Override
public final void writeFieldName(EnMember member) {
public final void writeFieldName(String fieldName, Type fieldType, int fieldPos) {
if (this.comma) writeTo(',');
writeTo(true, member.getAttribute().field());
writeLatin1To(true, fieldName);
writeTo(':');
}
@Override
public final void writeSmallString(String value) {
writeTo(true, value);
writeLatin1To(true, value);
}
@Override
@@ -311,17 +314,17 @@ public class JsonWriter extends Writer {
@Override
public final void writeFloat(float value) {
writeTo(false, String.valueOf(value));
writeLatin1To(false, String.valueOf(value));
}
@Override
public final void writeDouble(double value) {
writeTo(false, String.valueOf(value));
writeLatin1To(false, String.valueOf(value));
}
@Override
public final void writeWrapper(StringConvertWrapper value) {
writeTo(false, String.valueOf(value));
public final void writeWrapper(StringWrapper value) {
writeLatin1To(false, String.valueOf(value));
}
@Override

View File

@@ -14,7 +14,7 @@ import java.util.concurrent.*;
import java.util.concurrent.atomic.*;
import java.util.function.*;
import javax.net.ssl.SSLContext;
import org.redkale.util.ObjectPool;
import org.redkale.util.*;
/**
*
@@ -52,10 +52,6 @@ public abstract class AsyncConnection implements ReadableByteChannel, WritableBy
//关联的事件数, 小于1表示没有事件
protected final AtomicInteger eventing = new AtomicInteger();
protected AsyncConnection(Context context) {
this(context.getBufferSupplier(), context.getBufferConsumer(), context.getSSLContext());
}
protected AsyncConnection(ObjectPool<ByteBuffer> bufferPool, SSLContext sslContext) {
this(bufferPool, bufferPool, sslContext);
}
@@ -68,6 +64,14 @@ public abstract class AsyncConnection implements ReadableByteChannel, WritableBy
this.sslContext = sslContext;
}
public Supplier<ByteBuffer> getBufferSupplier() {
return this.bufferSupplier;
}
public Consumer<ByteBuffer> getBufferConsumer() {
return this.bufferConsumer;
}
public final long getLastReadTime() {
return readtime;
}
@@ -114,8 +118,6 @@ public abstract class AsyncConnection implements ReadableByteChannel, WritableBy
public abstract void read(CompletionHandler<Integer, ByteBuffer> handler);
public abstract void read(long timeout, TimeUnit unit, CompletionHandler<Integer, ByteBuffer> handler);
@Override
public abstract int write(ByteBuffer src) throws IOException;
@@ -127,9 +129,9 @@ public abstract class AsyncConnection implements ReadableByteChannel, WritableBy
public abstract <A> void write(ByteBuffer[] srcs, int offset, int length, A attachment, CompletionHandler<Integer, ? super A> handler);
public void setReadBuffer(Buffer buffer) {
public void setReadBuffer(ByteBuffer buffer) {
if (this.readBuffer != null) throw new RuntimeException("repeat AsyncConnection.setReadBuffer");
this.readBuffer = (ByteBuffer) buffer;
this.readBuffer = buffer;
}
public ByteBuffer pollReadBuffer() {
@@ -138,22 +140,40 @@ public abstract class AsyncConnection implements ReadableByteChannel, WritableBy
this.readBuffer = null;
return rs;
}
// Thread thread = Thread.currentThread();
// if (thread instanceof IOThread) {
// return ((IOThread) thread).getBufferPool().get();
// }
return bufferSupplier.get();
}
public void offerBuffer(Buffer buffer) {
public void offerBuffer(ByteBuffer buffer) {
if (buffer == null) return;
bufferConsumer.accept((ByteBuffer) buffer);
// Thread thread = Thread.currentThread();
// if (thread instanceof IOThread) {
// ((IOThread) thread).getBufferPool().accept((ByteBuffer) buffer);
// return;
// }
bufferConsumer.accept(buffer);
}
public void offerBuffer(Buffer... buffers) {
public void offerBuffer(ByteBuffer... buffers) {
if (buffers == null) return;
for (Buffer buffer : buffers) {
bufferConsumer.accept((ByteBuffer) buffer);
Consumer<ByteBuffer> consumer = this.bufferConsumer;
// Thread thread = Thread.currentThread();
// if (thread instanceof IOThread) {
// consumer = ((IOThread) thread).getBufferPool();
// }
for (ByteBuffer buffer : buffers) {
consumer.accept(buffer);
}
}
public ByteBuffer pollWriteBuffer() {
// Thread thread = Thread.currentThread();
// if (thread instanceof IOThread) {
// return ((IOThread) thread).getBufferPool().get();
// }
return bufferSupplier.get();
}
@@ -186,7 +206,12 @@ public abstract class AsyncConnection implements ReadableByteChannel, WritableBy
}
}
if (this.readBuffer != null) {
bufferConsumer.accept(this.readBuffer);
Consumer<ByteBuffer> consumer = this.bufferConsumer;
// Thread thread = Thread.currentThread();
// if (thread instanceof IOThread) {
// consumer = ((IOThread) thread).getBufferPool();
// }
consumer.accept(this.readBuffer);
}
if (attributes == null) return;
try {
@@ -245,22 +270,6 @@ public abstract class AsyncConnection implements ReadableByteChannel, WritableBy
return createTCP(bufferPool, group, null, address, readTimeoutSeconds, writeTimeoutSeconds);
}
/**
* 创建TCP协议客户端连接
*
* @param context Context
* @param address 连接点子
* @param group 连接AsynchronousChannelGroup
* @param readTimeoutSeconds 读取超时秒数
* @param writeTimeoutSeconds 写入超时秒数
*
* @return 连接CompletableFuture
*/
public static CompletableFuture<AsyncConnection> createTCP(final Context context, final AsynchronousChannelGroup group,
final SocketAddress address, final int readTimeoutSeconds, final int writeTimeoutSeconds) {
return createTCP(context.getBufferSupplier(), context.getBufferConsumer(), group, context.getSSLContext(), address, readTimeoutSeconds, writeTimeoutSeconds);
}
/**
* 创建TCP协议客户端连接
*
@@ -371,35 +380,6 @@ public abstract class AsyncConnection implements ReadableByteChannel, WritableBy
return new UdpBioAsyncConnection(bufferPool, bufferPool, ch, sslContext, addr, client0, readTimeoutSeconds0, writeTimeoutSeconds0, livingCounter, closedCounter);
}
public static AsyncConnection create(final Context context, final AsynchronousSocketChannel ch) {
return create(context, ch, (SocketAddress) null, 0, 0);
}
public static AsyncConnection create(final Context context, final AsynchronousSocketChannel ch,
final SocketAddress addr0, final AtomicLong livingCounter, final AtomicLong closedCounter) {
return new TcpAioAsyncConnection(context.getBufferSupplier(), context.getBufferConsumer(), ch, context.sslContext, addr0, context.readTimeoutSeconds, context.writeTimeoutSeconds, livingCounter, closedCounter);
}
public static AsyncConnection create(final Context context, final AsynchronousSocketChannel ch,
final SocketAddress addr0, final int readTimeoutSeconds, final int writeTimeoutSeconds) {
return new TcpAioAsyncConnection(context.getBufferSupplier(), context.getBufferConsumer(), ch, null, addr0, readTimeoutSeconds, writeTimeoutSeconds, null, null);
}
public static AsyncConnection create(final Context context, final AsynchronousSocketChannel ch, SSLContext sslContext,
final SocketAddress addr0, final int readTimeoutSeconds, final int writeTimeoutSeconds) {
return new TcpAioAsyncConnection(context.getBufferSupplier(), context.getBufferConsumer(), ch, sslContext, addr0, readTimeoutSeconds, writeTimeoutSeconds, null, null);
}
public static AsyncConnection create(final Context context, final AsynchronousSocketChannel ch,
final SocketAddress addr0, final int readTimeoutSeconds, final int writeTimeoutSeconds, final AtomicLong livingCounter, final AtomicLong closedCounter) {
return new TcpAioAsyncConnection(context.getBufferSupplier(), context.getBufferConsumer(), ch, null, addr0, readTimeoutSeconds, writeTimeoutSeconds, livingCounter, closedCounter);
}
public static AsyncConnection create(final Context context, final AsynchronousSocketChannel ch, SSLContext sslContext,
final SocketAddress addr0, final int readTimeoutSeconds, final int writeTimeoutSeconds, final AtomicLong livingCounter, final AtomicLong closedCounter) {
return new TcpAioAsyncConnection(context.getBufferSupplier(), context.getBufferConsumer(), ch, sslContext, addr0, readTimeoutSeconds, writeTimeoutSeconds, livingCounter, closedCounter);
}
public static AsyncConnection create(final ObjectPool<ByteBuffer> bufferPool, final AsynchronousSocketChannel ch) {
return create(bufferPool, ch, null, 0, 0);
}

View File

@@ -6,11 +6,8 @@
package org.redkale.net;
import java.net.*;
import java.nio.*;
import java.nio.charset.*;
import java.util.Collection;
import java.util.concurrent.*;
import java.util.function.*;
import java.util.logging.*;
import javax.net.ssl.SSLContext;
import org.redkale.convert.bson.*;
@@ -39,12 +36,6 @@ public class Context {
//ByteBuffer的容量默认8K
protected final int bufferCapacity;
//ByteBuffer对象池
protected final ObjectPool<ByteBuffer> bufferPool;
//Response对象池
protected final ObjectPool<Response> responsePool;
//服务的根Servlet
protected final PrepareServlet prepare;
@@ -83,22 +74,18 @@ public class Context {
public Context(ContextConfig config) {
this(config.serverStartTime, config.logger, config.executor, config.sslContext,
config.bufferCapacity, config.bufferPool, config.responsePool, config.maxconns, config.maxbody,
config.charset, config.address, config.resourceFactory, config.prepare,
config.aliveTimeoutSeconds, config.readTimeoutSeconds, config.writeTimeoutSeconds);
config.bufferCapacity, config.maxconns, config.maxbody, config.charset, config.address, config.resourceFactory,
config.prepare, config.aliveTimeoutSeconds, config.readTimeoutSeconds, config.writeTimeoutSeconds);
}
public Context(long serverStartTime, Logger logger, ThreadPoolExecutor executor, SSLContext sslContext,
int bufferCapacity, ObjectPool<ByteBuffer> bufferPool, ObjectPool<Response> responsePool, final int maxconns,
final int maxbody, Charset charset, InetSocketAddress address, ResourceFactory resourceFactory,
final PrepareServlet prepare, final int aliveTimeoutSeconds, final int readTimeoutSeconds, final int writeTimeoutSeconds) {
int bufferCapacity, final int maxconns, final int maxbody, Charset charset, InetSocketAddress address,
ResourceFactory resourceFactory, PrepareServlet prepare, int aliveTimeoutSeconds, int readTimeoutSeconds, int writeTimeoutSeconds) {
this.serverStartTime = serverStartTime;
this.logger = logger;
this.executor = executor;
this.sslContext = sslContext;
this.bufferCapacity = bufferCapacity;
this.bufferPool = bufferPool;
this.responsePool = responsePool;
this.maxconns = maxconns;
this.maxbody = maxbody;
this.charset = StandardCharsets.UTF_8.equals(charset) ? null : charset;
@@ -160,36 +147,6 @@ public class Context {
return bufferCapacity;
}
public Supplier<ByteBuffer> getBufferSupplier() {
return bufferPool;
}
public Consumer<ByteBuffer> getBufferConsumer() {
return bufferPool;
}
public ByteBuffer pollBuffer() {
return bufferPool.get();
}
public void offerBuffer(ByteBuffer buffer) {
bufferPool.accept(buffer);
}
public void offerBuffer(ByteBuffer... buffers) {
if (buffers == null) return;
for (ByteBuffer buffer : buffers) {
bufferPool.accept(buffer);
}
}
public void offerBuffer(Collection<ByteBuffer> buffers) {
if (buffers == null) return;
for (ByteBuffer buffer : buffers) {
bufferPool.accept(buffer);
}
}
public Logger getLogger() {
return logger;
}
@@ -228,12 +185,6 @@ public class Context {
//ByteBuffer的容量默认8K
public int bufferCapacity;
//ByteBuffer对象池
public ObjectPool<ByteBuffer> bufferPool;
//Response对象池
public ObjectPool<Response> responsePool;
//服务的根Servlet
public PrepareServlet prepare;

View File

@@ -0,0 +1,61 @@
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package org.redkale.net;
import java.nio.ByteBuffer;
import java.util.concurrent.ExecutorService;
import org.redkale.util.*;
/**
* 协议处理的IO线程类
*
* <p>
* 详情见: https://redkale.org
*
* @author zhangjx
*/
public class IOThread extends Thread {
protected Thread localThread;
protected final ExecutorService executor;
protected ObjectPool<ByteBuffer> bufferPool;
public IOThread(ExecutorService executor, ObjectPool<ByteBuffer> bufferPool, Runnable runner) {
super(runner);
this.executor = executor;
this.bufferPool = bufferPool;
this.setDaemon(true);
}
public void runAsync(Runnable runner) {
executor.execute(runner);
}
public ExecutorService getExecutor() {
return executor;
}
public ObjectPool<ByteBuffer> getBufferPool() {
return bufferPool;
}
@Override
public void run() {
this.localThread = Thread.currentThread();
super.run();
}
public boolean inSameThread() {
return this.localThread == Thread.currentThread();
}
public boolean inSameThread(Thread thread) {
return this.localThread == thread;
}
}

View File

@@ -8,7 +8,6 @@ package org.redkale.net;
import java.io.IOException;
import java.nio.*;
import java.nio.channels.*;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.logging.*;
import org.redkale.util.*;
@@ -28,12 +27,15 @@ public class PrepareRunner implements Runnable {
private final Context context;
private final ObjectPool<Response> responsePool;
private ByteBuffer data;
private Response response;
public PrepareRunner(Context context, AsyncConnection channel, ByteBuffer data, Response response) {
public PrepareRunner(Context context, ObjectPool<Response> responsePool, AsyncConnection channel, ByteBuffer data, Response response) {
this.context = context;
this.responsePool = responsePool;
this.channel = channel;
this.data = data;
this.response = response;
@@ -41,8 +43,6 @@ public class PrepareRunner implements Runnable {
@Override
public void run() {
final boolean keepalive = response != null;
final ObjectPool<? extends Response> responsePool = context.responsePool;
if (data != null) { //BIO模式的UDP连接创建AsyncConnection时已经获取到ByteBuffer数据了
if (response == null) response = responsePool.get();
try {
@@ -56,12 +56,12 @@ public class PrepareRunner implements Runnable {
}
if (response == null) response = responsePool.get();
try {
channel.read(keepalive ? context.getAliveTimeoutSeconds() : context.getReadTimeoutSeconds(), TimeUnit.SECONDS,
new CompletionHandler<Integer, ByteBuffer>() {
channel.read(new CompletionHandler<Integer, ByteBuffer>() {
@Override
public void completed(Integer count, ByteBuffer buffer) {
if (count < 1) {
response.request.offerReadBuffer(buffer);
buffer.clear();
channel.setReadBuffer(buffer);
channel.dispose();// response.init(channel); 在调用之前异常
response.removeChannel();
response.finish(true);
@@ -85,7 +85,8 @@ public class PrepareRunner implements Runnable {
@Override
public void failed(Throwable exc, ByteBuffer buffer) {
response.request.offerReadBuffer(buffer);
buffer.clear();
channel.setReadBuffer(buffer);
channel.dispose();// response.init(channel); 在调用之前异常
response.removeChannel();
response.finish(true);
@@ -98,7 +99,7 @@ public class PrepareRunner implements Runnable {
channel.dispose();// response.init(channel); 在调用之前异常
response.removeChannel();
response.finish(true);
if (te != null && context.logger.isLoggable(Level.FINEST)) {
if (context.logger.isLoggable(Level.FINEST)) {
context.logger.log(Level.FINEST, "Servlet read channel erroneous, force to close channel ", te);
}
}
@@ -117,7 +118,8 @@ public class PrepareRunner implements Runnable {
if (buffer.hasRemaining()) {
request.setMoredata(buffer);
} else {
response.request.offerReadBuffer(buffer);
buffer.clear();
channel.setReadBuffer(buffer);
}
preparer.prepare(request, response);
} else {
@@ -138,7 +140,8 @@ public class PrepareRunner implements Runnable {
if (attachment.hasRemaining()) {
request.setMoredata(attachment);
} else {
response.request.offerReadBuffer(attachment);
attachment.clear();
channel.setReadBuffer(attachment);
}
try {
preparer.prepare(request, response);
@@ -152,7 +155,8 @@ public class PrepareRunner implements Runnable {
@Override
public void failed(Throwable exc, ByteBuffer attachment) {
preparer.illRequestCounter.incrementAndGet();
response.request.offerReadBuffer(attachment);
attachment.clear();
channel.setReadBuffer(attachment);
response.finish(true);
if (exc != null) request.context.logger.log(Level.FINER, "Servlet read channel erroneous, force to close channel ", exc);
}
@@ -165,7 +169,7 @@ public class PrepareRunner implements Runnable {
}
protected Response pollResponse() {
return context.responsePool.get();
return responsePool.get();
}
protected Request pollRequest(Response response) {
@@ -176,19 +180,4 @@ public class PrepareRunner implements Runnable {
return response.removeChannel();
}
protected ByteBuffer pollReadBuffer(Request request) {
return request.pollReadBuffer();
}
protected ByteBuffer pollReadBuffer(Response response) {
return response.request.pollReadBuffer();
}
protected void offerReadBuffer(Request request, ByteBuffer buffer) {
request.offerReadBuffer(buffer);
}
protected void offerReadBuffer(Response response, ByteBuffer buffer) {
response.request.offerReadBuffer(buffer);
}
}

View File

@@ -43,7 +43,7 @@ public abstract class ProtocolServer {
public abstract <T> void setOption(SocketOption<T> name, T value) throws IOException;
public abstract void accept() throws IOException;
public abstract void accept(Server server) throws IOException;
public abstract void close() throws IOException;

View File

@@ -9,6 +9,7 @@ import java.nio.ByteBuffer;
import java.util.*;
import org.redkale.convert.bson.BsonConvert;
import org.redkale.convert.json.JsonConvert;
import org.redkale.util.ObjectPool;
/**
* 协议请求对象
@@ -23,6 +24,8 @@ public abstract class Request<C extends Context> {
protected final C context;
protected final ObjectPool<ByteBuffer> bufferPool;
protected final BsonConvert bsonConvert;
protected final JsonConvert jsonConvert;
@@ -37,8 +40,6 @@ public abstract class Request<C extends Context> {
protected AsyncConnection channel;
protected ByteBuffer readBuffer;
/**
* properties 与 attributes 的区别在于调用recycle时 attributes会被清空而properties会保留;
* properties 通常存放需要永久绑定在request里的一些对象
@@ -47,9 +48,9 @@ public abstract class Request<C extends Context> {
protected final Map<String, Object> attributes = new HashMap<>();
protected Request(C context) {
protected Request(C context, ObjectPool<ByteBuffer> bufferPool) {
this.context = context;
this.readBuffer = context.pollBuffer();
this.bufferPool = bufferPool;
this.bsonConvert = context.getBsonConvert();
this.jsonConvert = context.getJsonConvert();
}
@@ -64,23 +65,6 @@ public abstract class Request<C extends Context> {
return rs;
}
protected ByteBuffer pollReadBuffer() {
ByteBuffer buffer = this.readBuffer;
this.readBuffer = null;
if (buffer == null) buffer = context.pollBuffer();
return buffer;
}
protected void offerReadBuffer(ByteBuffer buffer) {
if (buffer == null) return;
if (this.readBuffer == null) {
buffer.clear();
this.readBuffer = buffer;
} else {
context.offerBuffer(buffer);
}
}
/**
* 返回值Integer.MIN_VALUE: 帧数据; -1数据不合法 0解析完毕 &gt;0: 需再读取的字节数。
*

View File

@@ -10,6 +10,7 @@ import java.nio.ByteBuffer;
import java.nio.channels.CompletionHandler;
import java.util.function.*;
import java.util.logging.Level;
import org.redkale.util.ObjectPool;
/**
* 协议响应对象
@@ -26,14 +27,12 @@ public abstract class Response<C extends Context, R extends Request<C>> {
protected final C context;
protected final ObjectPool<Response> responsePool;
protected final R request;
protected AsyncConnection channel;
protected ByteBuffer writeHeadBuffer;
protected ByteBuffer writeBodyBuffer;
private volatile boolean inited = true;
protected Object output; //输出的结果对象
@@ -44,8 +43,6 @@ public abstract class Response<C extends Context, R extends Request<C>> {
protected Servlet<C, R, ? extends Response<C, R>> servlet;
private Supplier<ByteBuffer> bodyBufferSupplier;
private final CompletionHandler finishHandler = new CompletionHandler<Integer, ByteBuffer>() {
@Override
@@ -53,31 +50,21 @@ public abstract class Response<C extends Context, R extends Request<C>> {
if (attachment.hasRemaining()) {
channel.write(attachment, attachment, this);
} else {
offerResponseBuffer(attachment);
channel.offerBuffer(attachment);
ByteBuffer data = request.removeMoredata();
final boolean more = data != null && request.keepAlive;
request.more = more;
finish();
if (more) new PrepareRunner(context, responsePool, request.channel, null, Response.this).run();
}
}
@Override
public void failed(Throwable exc, ByteBuffer attachment) {
offerResponseBuffer(attachment);
channel.offerBuffer(attachment);
finish(true);
}
private void offerResponseBuffer(ByteBuffer attachment) {
if (writeHeadBuffer == null) {
if (context.bufferPool.getRecyclerPredicate().test(attachment)) {
writeHeadBuffer = attachment;
}
} else if (writeBodyBuffer == null) {
if (context.bufferPool.getRecyclerPredicate().test(attachment)) {
writeBodyBuffer = attachment;
}
} else {
context.offerBuffer(attachment);
}
}
};
private final CompletionHandler finishHandler2 = new CompletionHandler<Integer, ByteBuffer[]>() {
@@ -94,70 +81,37 @@ public abstract class Response<C extends Context, R extends Request<C>> {
if (index >= 0) {
channel.write(attachments, index, attachments.length - index, attachments, this);
} else {
offerResponseBuffer(attachments);
for (ByteBuffer attachment : attachments) {
channel.offerBuffer(attachment);
}
ByteBuffer data = request.removeMoredata();
final boolean more = data != null && request.keepAlive;
request.more = more;
finish();
if (more) new PrepareRunner(context, responsePool, request.channel, null, Response.this).run();
}
}
@Override
public void failed(Throwable exc, final ByteBuffer[] attachments) {
offerResponseBuffer(attachments);
for (ByteBuffer attachment : attachments) {
channel.offerBuffer(attachment);
}
finish(true);
}
private void offerResponseBuffer(ByteBuffer[] attachments) {
int start = 0;
if (writeHeadBuffer == null && attachments.length > start) {
if (context.bufferPool.getRecyclerPredicate().test(attachments[start])) {
writeHeadBuffer = attachments[start];
start++;
}
}
if (writeBodyBuffer == null && attachments.length > start) {
if (context.bufferPool.getRecyclerPredicate().test(attachments[start])) {
writeBodyBuffer = attachments[start];
start++;
}
}
for (int i = start; i < attachments.length; i++) {
context.offerBuffer(attachments[i]);
}
}
};
protected Response(C context, final R request) {
protected Response(C context, final R request, ObjectPool<Response> responsePool) {
this.context = context;
this.request = request;
this.writeHeadBuffer = context.pollBuffer();
this.writeBodyBuffer = context.pollBuffer();
this.bodyBufferSupplier = () -> {
ByteBuffer buffer = writeBodyBuffer;
if (buffer == null) return context.pollBuffer();
writeBodyBuffer = null;
return buffer;
};
}
protected ByteBuffer pollWriteReadBuffer() {
ByteBuffer buffer = this.writeHeadBuffer;
this.writeHeadBuffer = null;
if (buffer == null) buffer = context.pollBuffer();
return buffer;
}
protected ByteBuffer pollWriteBodyBuffer() {
ByteBuffer buffer = this.writeBodyBuffer;
this.writeBodyBuffer = null;
if (buffer == null) buffer = context.pollBuffer();
return buffer;
}
protected Supplier<ByteBuffer> getBodyBufferSupplier() {
return bodyBufferSupplier;
this.responsePool = responsePool;
}
protected void offerBuffer(ByteBuffer... buffers) {
context.offerBuffer(buffers);
for (ByteBuffer buffer : buffers) {
channel.offerBuffer(buffer);
}
}
protected AsyncConnection removeChannel() {
@@ -257,19 +211,19 @@ public abstract class Response<C extends Context, R extends Request<C>> {
AsyncConnection conn = removeChannel();
this.recycle();
this.prepare();
new PrepareRunner(context, conn, null, this).run();
new PrepareRunner(context, this.responsePool, conn, null, this).run();
} else {
channel.dispose();
}
} else {
this.context.responsePool.accept(this);
this.responsePool.accept(this);
}
}
public void finish(final byte[] bs) {
if (!this.inited) return; //避免重复关闭
if (this.context.bufferCapacity == bs.length) {
ByteBuffer buffer = this.context.pollBuffer();
ByteBuffer buffer = channel.bufferSupplier.get();
buffer.put(bs);
buffer.flip();
this.finish(buffer);
@@ -280,33 +234,33 @@ public abstract class Response<C extends Context, R extends Request<C>> {
public void finish(ByteBuffer buffer) {
if (!this.inited) return; //避免重复关闭
ByteBuffer data = this.request.removeMoredata();
final AsyncConnection conn = this.channel;
final boolean more = data != null && this.request.keepAlive;
this.request.more = more;
// ByteBuffer data = this.request.removeMoredata();
// final boolean more = data != null && this.request.keepAlive;
// this.request.more = more;
conn.write(buffer, buffer, finishHandler);
if (more) new PrepareRunner(this.context, conn, data, null).run();
// if (more) new PrepareRunner(this.context, this.responsePool, conn, data, null).run();
}
public void finish(boolean kill, ByteBuffer buffer) {
if (!this.inited) return; //避免重复关闭
if (kill) refuseAlive();
ByteBuffer data = this.request.removeMoredata();
final AsyncConnection conn = this.channel;
final boolean more = data != null && this.request.keepAlive;
this.request.more = more;
// ByteBuffer data = this.request.removeMoredata();
// final boolean more = data != null && this.request.keepAlive;
// this.request.more = more;
conn.write(buffer, buffer, finishHandler);
if (more) new PrepareRunner(this.context, conn, data, null).run();
// if (more) new PrepareRunner(this.context, this.responsePool, conn, data, null).run();
}
public void finish(ByteBuffer... buffers) {
if (!this.inited) return; //避免重复关闭
final AsyncConnection conn = this.channel;
ByteBuffer data = this.request.removeMoredata();
final boolean more = data != null && this.request.keepAlive;
this.request.more = more;
// ByteBuffer data = this.request.removeMoredata();
// final boolean more = data != null && this.request.keepAlive;
// this.request.more = more;
conn.write(buffers, buffers, finishHandler2);
if (more) new PrepareRunner(this.context, conn, data, null).run();
// if (more) new PrepareRunner(this.context, this.responsePool, conn, data, null).run();
}
public void finish(boolean kill, ByteBuffer... buffers) {
@@ -317,7 +271,7 @@ public abstract class Response<C extends Context, R extends Request<C>> {
final boolean more = data != null && this.request.keepAlive;
this.request.more = more;
conn.write(buffers, buffers, finishHandler2);
if (more) new PrepareRunner(this.context, conn, data, null).run();
if (more) new PrepareRunner(this.context, this.responsePool, conn, data, null).run();
}
protected <A> void send(final ByteBuffer buffer, final A attachment, final CompletionHandler<Integer, A> handler) {
@@ -328,14 +282,14 @@ public abstract class Response<C extends Context, R extends Request<C>> {
if (buffer.hasRemaining()) {
channel.write(buffer, attachment, this);
} else {
context.offerBuffer(buffer);
channel.offerBuffer(buffer);
if (handler != null) handler.completed(result, attachment);
}
}
@Override
public void failed(Throwable exc, A attachment) {
context.offerBuffer(buffer);
channel.offerBuffer(buffer);
if (handler != null) handler.failed(exc, attachment);
}
@@ -353,7 +307,7 @@ public abstract class Response<C extends Context, R extends Request<C>> {
index = i;
break;
}
context.offerBuffer(buffers[i]);
channel.offerBuffer(buffers[i]);
}
if (index == 0) {
channel.write(buffers, attachment, this);
@@ -367,7 +321,7 @@ public abstract class Response<C extends Context, R extends Request<C>> {
@Override
public void failed(Throwable exc, A attachment) {
for (ByteBuffer buffer : buffers) {
context.offerBuffer(buffer);
channel.offerBuffer(buffer);
}
if (handler != null) handler.failed(exc, attachment);
}

View File

@@ -7,14 +7,14 @@ package org.redkale.net;
import java.io.*;
import java.net.*;
import java.nio.ByteBuffer;
import java.nio.charset.Charset;
import java.text.*;
import java.util.*;
import java.util.concurrent.*;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.*;
import java.util.logging.*;
import javax.net.ssl.SSLContext;
import org.redkale.net.Filter;
import org.redkale.util.*;
/**
@@ -125,11 +125,11 @@ public abstract class Server<K extends Serializable, C extends Context, R extend
this.aliveTimeoutSeconds = config.getIntValue("aliveTimeoutSeconds", 30);
this.readTimeoutSeconds = config.getIntValue("readTimeoutSeconds", 0);
this.writeTimeoutSeconds = config.getIntValue("writeTimeoutSeconds", 0);
this.backlog = parseLenth(config.getValue("backlog"), 8 * 1024);
this.backlog = parseLenth(config.getValue("backlog"), 1024);
this.maxbody = parseLenth(config.getValue("maxbody"), 64 * 1024);
int bufCapacity = parseLenth(config.getValue("bufferCapacity"), 32 * 1024);
this.bufferCapacity = bufCapacity < 8 * 1024 ? 8 * 1024 : bufCapacity;
this.threads = config.getIntValue("threads", Runtime.getRuntime().availableProcessors() * 32);
int bufCapacity = parseLenth(config.getValue("bufferCapacity"), "UDP".equalsIgnoreCase(protocol) ? 1350 : 32 * 1024);
this.bufferCapacity = "UDP".equalsIgnoreCase(protocol) ? bufCapacity : (bufCapacity < 8 * 1024 ? 8 * 1024 : bufCapacity);
this.threads = config.getIntValue("threads", Math.max(8, Runtime.getRuntime().availableProcessors() * 2));
this.bufferPoolSize = config.getIntValue("bufferPoolSize", this.threads * 4);
this.responsePoolSize = config.getIntValue("responsePoolSize", this.threads * 2);
this.name = config.getValue("name", "Server-" + protocol + "-" + this.address.getPort());
@@ -153,7 +153,7 @@ public abstract class Server<K extends Serializable, C extends Context, R extend
final String n = name;
this.executor = (ThreadPoolExecutor) Executors.newFixedThreadPool(threads, (Runnable r) -> {
Thread t = new WorkThread(executor, r);
t.setName(n + "-ServletThread-" + f.format(counter.incrementAndGet()));
t.setName("Redkale-" + n + "-ServletThread-" + f.format(counter.incrementAndGet()));
return t;
});
}
@@ -281,13 +281,17 @@ public abstract class Server<K extends Serializable, C extends Context, R extend
this.serverChannel = ProtocolServer.create(this.protocol, context, this.serverClassLoader, config == null ? null : config.getValue("netimpl"));
this.serverChannel.open(config);
serverChannel.bind(address, backlog);
serverChannel.accept();
serverChannel.accept(this);
final String threadName = "[" + Thread.currentThread().getName() + "] ";
postStart();
logger.info(threadName + this.getClass().getSimpleName() + ("TCP".equalsIgnoreCase(protocol) ? "" : ("." + protocol)) + " listen: " + address
+ ", threads: " + threads + ", maxbody: " + formatLenth(context.maxbody) + ", bufferCapacity: " + formatLenth(bufferCapacity) + ", bufferPoolSize: " + bufferPoolSize + ", responsePoolSize: " + responsePoolSize
+ ", started in " + (System.currentTimeMillis() - context.getServerStartTime()) + " ms");
}
protected void postStart() {
}
public void changeAddress(final InetSocketAddress addr) throws IOException {
long s = System.currentTimeMillis();
Objects.requireNonNull(addr);
@@ -299,7 +303,7 @@ public abstract class Server<K extends Serializable, C extends Context, R extend
newServerChannel = ProtocolServer.create(this.protocol, context, this.serverClassLoader, config == null ? null : config.getValue("netimpl"));
newServerChannel.open(config);
newServerChannel.bind(addr, backlog);
newServerChannel.accept();
newServerChannel.accept(this);
} catch (IOException e) {
context.address = oldAddress;
throw e;
@@ -358,6 +362,15 @@ public abstract class Server<K extends Serializable, C extends Context, R extend
protected abstract C createContext();
//必须在 createContext()之后调用
protected abstract ObjectPool<ByteBuffer> createBufferPool(AtomicLong createCounter, AtomicLong cycleCounter, int bufferPoolSize);
//必须在 createContext()之后调用
protected abstract ObjectPool<Response> createResponsePool(AtomicLong createCounter, AtomicLong cycleCounter, int responsePoolSize);
//必须在 createResponsePool()之后调用
protected abstract Creator<Response> createResponseCreator(ObjectPool<ByteBuffer> bufferPool, ObjectPool<Response> responsePool);
public void shutdown() throws IOException {
long s = System.currentTimeMillis();
logger.info(this.getClass().getSimpleName() + "-" + this.protocol + " shutdowning");

View File

@@ -23,11 +23,13 @@ public abstract class Servlet<C extends Context, R extends Request<C>, P extends
AnyValue _conf; //当前Servlet的配置
//Server执行start时运行此方法
public void init(C context, AnyValue config) {
}
public abstract void execute(R request, P response) throws IOException;
//Server执行shutdown后运行此方法
public void destroy(C context, AnyValue config) {
}

View File

@@ -24,7 +24,7 @@ import javax.net.ssl.SSLContext;
*/
public class TcpAioAsyncConnection extends AsyncConnection {
private final Semaphore semaphore = new Semaphore(1);
//private final Semaphore semaphore = new Semaphore(1);
private int readTimeoutSeconds;
@@ -103,42 +103,35 @@ public class TcpAioAsyncConnection extends AsyncConnection {
}
}
@Override
public void read(long timeout, TimeUnit unit, CompletionHandler<Integer, ByteBuffer> handler) {
this.readtime = System.currentTimeMillis();
ByteBuffer dst = pollReadBuffer();
channel.read(dst, timeout < 0 ? 0 : timeout, unit, dst, handler);
}
private <A> void nextWrite(Throwable exc, A attachment) {
BlockingQueue<WriteEntry> queue = this.writeQueue;
if (queue != null && exc != null && !isOpen()) {
WriteEntry entry;
while ((entry = queue.poll()) != null) {
try {
entry.writeHandler.failed(exc, entry.writeAttachment);
} catch (Throwable e) {
e.printStackTrace(System.err);
}
}
return;
}
WriteEntry entry = queue == null ? null : queue.poll();
if (entry != null) {
try {
if (entry.writeOneBuffer == null) {
write(false, entry.writeBuffers, entry.writeOffset, entry.writeLength, entry.writeAttachment, entry.writeHandler);
} else {
write(false, entry.writeOneBuffer, entry.writeAttachment, entry.writeHandler);
}
} catch (Exception e) {
entry.writeHandler.failed(e, entry.writeAttachment);
}
} else {
semaphore.release();
}
}
// private <A> void nextWrite(Throwable exc, A attachment) {
// BlockingQueue<WriteEntry> queue = this.writeQueue;
// if (queue != null && exc != null && !isOpen()) {
// WriteEntry entry;
// while ((entry = queue.poll()) != null) {
// try {
// entry.writeHandler.failed(exc, entry.writeAttachment);
// } catch (Throwable e) {
// e.printStackTrace(System.err);
// }
// }
// return;
// }
// WriteEntry entry = queue == null ? null : queue.poll();
//
// if (entry != null) {
// try {
// if (entry.writeOneBuffer == null) {
// write(false, entry.writeBuffers, entry.writeOffset, entry.writeLength, entry.writeAttachment, entry.writeHandler);
// } else {
// write(false, entry.writeOneBuffer, entry.writeAttachment, entry.writeHandler);
// }
// } catch (Exception e) {
// entry.writeHandler.failed(e, entry.writeAttachment);
// }
// } else {
// semaphore.release();
// }
// }
@Override
public <A> void write(ByteBuffer src, A attachment, CompletionHandler<Integer, ? super A> handler) {
@@ -146,17 +139,17 @@ public class TcpAioAsyncConnection extends AsyncConnection {
}
private <A> void write(boolean acquire, ByteBuffer src, A attachment, CompletionHandler<Integer, ? super A> handler) {
if (acquire && !semaphore.tryAcquire()) {
if (this.writeQueue == null) {
synchronized (semaphore) {
if (this.writeQueue == null) {
this.writeQueue = new LinkedBlockingDeque<>();
}
}
}
this.writeQueue.add(new WriteEntry(src, attachment, handler));
return;
}
// if (acquire && !semaphore.tryAcquire()) {
// if (this.writeQueue == null) {
// synchronized (semaphore) {
// if (this.writeQueue == null) {
// this.writeQueue = new LinkedBlockingDeque<>();
// }
// }
// }
// this.writeQueue.add(new WriteEntry(src, attachment, handler));
// return;
// }
WriteOneCompletionHandler newHandler = new WriteOneCompletionHandler(src, handler);
if (!channel.isOpen()) {
newHandler.failed(new ClosedChannelException(), attachment);
@@ -180,17 +173,17 @@ public class TcpAioAsyncConnection extends AsyncConnection {
}
private <A> void write(boolean acquire, ByteBuffer[] srcs, int offset, int length, A attachment, final CompletionHandler<Integer, ? super A> handler) {
if (acquire && !semaphore.tryAcquire()) {
if (this.writeQueue == null) {
synchronized (semaphore) {
if (this.writeQueue == null) {
this.writeQueue = new LinkedBlockingDeque<>();
}
}
}
this.writeQueue.add(new WriteEntry(srcs, offset, length, attachment, handler));
return;
}
// if (acquire && !semaphore.tryAcquire()) {
// if (this.writeQueue == null) {
// synchronized (semaphore) {
// if (this.writeQueue == null) {
// this.writeQueue = new LinkedBlockingDeque<>();
// }
// }
// }
// this.writeQueue.add(new WriteEntry(srcs, offset, length, attachment, handler));
// return;
// }
WriteMoreCompletionHandler newHandler = new WriteMoreCompletionHandler(srcs, offset, length, handler);
if (!channel.isOpen()) {
newHandler.failed(new ClosedChannelException(), attachment);
@@ -307,16 +300,16 @@ public class TcpAioAsyncConnection extends AsyncConnection {
if (result >= 0) {
writeCount += result;
try {
int index = -1;
int incre = -1;
for (int i = writeOffset; i < (writeOffset + writeLength); i++) {
if (writeBuffers[i].hasRemaining()) {
index = i;
incre = i - writeOffset;
break;
}
}
if (index >= 0) {
writeOffset += index;
writeLength -= index;
if (incre >= 0) {
writeOffset += incre;
writeLength -= incre;
channel.write(writeBuffers, writeOffset, writeLength, writeTimeoutSeconds > 0 ? writeTimeoutSeconds : 60, TimeUnit.SECONDS, attachment, this);
return;
}
@@ -324,27 +317,27 @@ public class TcpAioAsyncConnection extends AsyncConnection {
failed(e, attachment);
return;
}
try {
// try {
writeHandler.completed(writeCount, attachment);
} finally {
nextWrite(null, attachment);
}
// } finally {
// nextWrite(null, attachment);
// }
} else {
try {
// try {
writeHandler.completed(result.intValue(), attachment);
} finally {
nextWrite(null, attachment);
}
// } finally {
// nextWrite(null, attachment);
// }
}
}
@Override
public void failed(Throwable exc, A attachment) {
try {
// try {
writeHandler.failed(exc, attachment);
} finally {
nextWrite(exc, attachment);
}
// } finally {
// nextWrite(exc, attachment);
// }
}
}
@@ -371,21 +364,21 @@ public class TcpAioAsyncConnection extends AsyncConnection {
failed(e, attachment);
return;
}
try {
// try {
writeHandler.completed(result, attachment);
} finally {
nextWrite(null, attachment);
}
// } finally {
// nextWrite(null, attachment);
// }
}
@Override
public void failed(Throwable exc, A attachment) {
try {
// try {
writeHandler.failed(exc, attachment);
} finally {
nextWrite(exc, attachment);
}
// } finally {
// nextWrite(exc, attachment);
// }
}
}

View File

@@ -7,10 +7,12 @@ package org.redkale.net;
import java.io.IOException;
import java.net.*;
import java.nio.ByteBuffer;
import java.nio.channels.*;
import java.util.Set;
import java.util.concurrent.atomic.AtomicLong;
import java.util.logging.Level;
import org.redkale.util.AnyValue;
import org.redkale.util.*;
/**
* 协议底层Server
@@ -70,7 +72,14 @@ public class TcpAioProtocolServer extends ProtocolServer {
}
@Override
public void accept() throws IOException {
public void accept(Server server) throws IOException {
AtomicLong createBufferCounter = new AtomicLong();
AtomicLong cycleBufferCounter = new AtomicLong();
ObjectPool<ByteBuffer> bufferPool = server.createBufferPool(createBufferCounter, cycleBufferCounter, server.bufferPoolSize);
AtomicLong createResponseCounter = new AtomicLong();
AtomicLong cycleResponseCounter = new AtomicLong();
ObjectPool<Response> responsePool = server.createResponsePool(createResponseCounter, cycleResponseCounter, server.responsePoolSize);
responsePool.setCreator(server.createResponseCreator(bufferPool, responsePool));
final AsynchronousServerSocketChannel serchannel = this.serverChannel;
serchannel.accept(null, new CompletionHandler<AsynchronousSocketChannel, Void>() {
@@ -93,9 +102,10 @@ public class TcpAioProtocolServer extends ProtocolServer {
channel.setOption(StandardSocketOptions.SO_RCVBUF, 16 * 1024);
channel.setOption(StandardSocketOptions.SO_SNDBUF, 16 * 1024);
AsyncConnection conn = new TcpAioAsyncConnection(context.getBufferSupplier(), context.getBufferConsumer(), channel,
AsyncConnection conn = new TcpAioAsyncConnection(bufferPool, bufferPool, channel,
context.getSSLContext(), null, context.readTimeoutSeconds, context.writeTimeoutSeconds, livingCounter, closedCounter);
context.runAsync(new PrepareRunner(context, conn, null, null));
//context.runAsync(new PrepareRunner(context, responsePool, conn, null, null));
new PrepareRunner(context, responsePool, conn, null, null).run();
} catch (Throwable e) {
context.logger.log(Level.INFO, channel + " accept error", e);
}

View File

@@ -119,7 +119,7 @@ public class TransportFactory {
if (this.checkinterval < 2) this.checkinterval = 2;
}
this.scheduler = new ScheduledThreadPoolExecutor(1, (Runnable r) -> {
final Thread t = new Thread(r, this.getClass().getSimpleName() + "-TransportFactoryTask-Thread");
final Thread t = new Thread(r, "Redkale-" + this.getClass().getSimpleName() + "-Schedule-Thread");
t.setDaemon(true);
return t;
});
@@ -162,7 +162,7 @@ public class TransportFactory {
ExecutorService transportExec = Executors.newFixedThreadPool(threads, (Runnable r) -> {
Thread t = new Thread(r);
t.setDaemon(true);
t.setName("Transport-Thread-" + counter.incrementAndGet());
t.setName("Redkale-Transport-Thread-" + counter.incrementAndGet());
return t;
});
AsynchronousChannelGroup transportGroup = null;

View File

@@ -10,7 +10,6 @@ import java.net.*;
import java.nio.ByteBuffer;
import java.nio.channels.*;
import java.util.Set;
import java.util.concurrent.*;
import java.util.concurrent.atomic.AtomicLong;
import java.util.function.*;
import javax.net.ssl.SSLContext;
@@ -142,11 +141,6 @@ public class UdpBioAsyncConnection extends AsyncConnection {
}
}
@Override
public void read(long timeout, TimeUnit unit, CompletionHandler<Integer, ByteBuffer> handler) {
read(handler);
}
@Override
public int read(ByteBuffer dst) throws IOException {
int rs = channel.read(dst);

View File

@@ -11,7 +11,8 @@ import java.nio.ByteBuffer;
import java.nio.channels.DatagramChannel;
import java.util.Set;
import java.util.concurrent.CountDownLatch;
import org.redkale.util.AnyValue;
import java.util.concurrent.atomic.AtomicLong;
import org.redkale.util.*;
/**
* 协议底层Server
@@ -70,7 +71,14 @@ public class UdpBioProtocolServer extends ProtocolServer {
}
@Override
public void accept() throws IOException {
public void accept(Server server) throws IOException {
AtomicLong createBufferCounter = new AtomicLong();
AtomicLong cycleBufferCounter = new AtomicLong();
ObjectPool<ByteBuffer> bufferPool = server.createBufferPool(createBufferCounter, cycleBufferCounter, server.bufferPoolSize);
AtomicLong createResponseCounter = new AtomicLong();
AtomicLong cycleResponseCounter = new AtomicLong();
ObjectPool<Response> responsePool = server.createResponsePool(createResponseCounter, cycleResponseCounter, server.responsePoolSize);
responsePool.setCreator(server.createResponseCreator(bufferPool, responsePool));
final DatagramChannel serchannel = this.serverChannel;
final int readTimeoutSeconds = this.context.readTimeoutSeconds;
final int writeTimeoutSeconds = this.context.writeTimeoutSeconds;
@@ -81,15 +89,15 @@ public class UdpBioProtocolServer extends ProtocolServer {
public void run() {
cdl.countDown();
while (running) {
final ByteBuffer buffer = context.pollBuffer();
final ByteBuffer buffer = bufferPool.get();
try {
SocketAddress address = serchannel.receive(buffer);
buffer.flip();
AsyncConnection conn = new UdpBioAsyncConnection(context.getBufferSupplier(), context.getBufferConsumer(), serchannel,
AsyncConnection conn = new UdpBioAsyncConnection(bufferPool, bufferPool, serchannel,
context.getSSLContext(), address, false, readTimeoutSeconds, writeTimeoutSeconds, null, null);
context.runAsync(new PrepareRunner(context, conn, buffer, null));
context.runAsync(new PrepareRunner(context, responsePool, conn, buffer, null));
} catch (Exception e) {
context.offerBuffer(buffer);
bufferPool.accept(buffer);
}
}
}

View File

@@ -28,8 +28,11 @@ public class HttpContext extends Context {
protected final ConcurrentHashMap<Class, Creator> asyncHandlerCreators = new ConcurrentHashMap<>();
protected String remoteAddrHeader;
public HttpContext(HttpContextConfig config) {
super(config);
this.remoteAddrHeader = config.remoteAddrHeader;
random.setSeed(Math.abs(System.nanoTime()));
}
@@ -43,10 +46,6 @@ public class HttpContext extends Context {
return executor;
}
protected ObjectPool<Response> getResponsePool() {
return responsePool;
}
@SuppressWarnings("unchecked")
protected <H extends CompletionHandler> Creator<H> loadAsyncHandlerCreator(Class<H> handlerClass) {
Creator<H> creator = asyncHandlerCreators.get(handlerClass);
@@ -162,5 +161,6 @@ public class HttpContext extends Context {
public static class HttpContextConfig extends ContextConfig {
public String remoteAddrHeader;
}
}

View File

@@ -6,6 +6,7 @@
package org.redkale.net.http;
import org.redkale.net.Filter;
import org.redkale.util.AnyValue;
/**
* HTTP 过滤器 <br>
@@ -17,4 +18,7 @@ import org.redkale.net.Filter;
*/
public abstract class HttpFilter extends Filter<HttpContext, HttpRequest, HttpResponse> {
//Server执行start后运行此方法
public void postStart(HttpContext context, AnyValue config) {
}
}

View File

@@ -402,6 +402,21 @@ public class HttpPrepareServlet extends PrepareServlet<String, HttpContext, Http
return this.resourceHttpServlet;
}
public void postStart(HttpContext context, AnyValue config) {
List filters = getFilters();
synchronized (filters) {
if (!filters.isEmpty()) {
for (Object filter : filters) {
((HttpFilter) filter).postStart(context, config);
}
}
}
this.resourceHttpServlet.postStart(context, config);
getServlets().forEach(s -> {
s.postStart(context, getServletConf(s));
});
}
@Override
public void destroy(HttpContext context, AnyValue config) {
super.destroy(context, config); //必须要执行

View File

@@ -6,22 +6,25 @@
package org.redkale.net.http;
import java.io.*;
import java.lang.annotation.Annotation;
import java.lang.reflect.Array;
import java.net.*;
import java.nio.ByteBuffer;
import java.nio.channels.Channels;
import java.nio.charset.*;
import java.util.*;
import java.util.logging.Level;
import org.redkale.convert.json.JsonConvert;
import org.redkale.net.*;
import org.redkale.util.*;
import org.redkale.util.AnyValue.DefaultAnyValue;
import org.redkale.util.*;
/**
* Http请求包 与javax.servlet.http.HttpServletRequest 基本类似。 <br>
* 同时提供json的解析接口: public Object getJsonParameter(Type type, String name) <br>
* Redkale提倡带简单的参数的GET请求采用类似REST风格, 因此提供了 getRequstURIPath 系列接口。 <br>
* 例如简单的翻页查询 <br>
* /pipes/record/query/offset:0/limit:20 <br>
* /pipes/user/query/offset:0/limit:20 <br>
* 获取页号: int offset = request.getRequstURIPath("offset:", 0); <br>
* 获取行数: int limit = request.getRequstURIPath("limit:", 10); <br>
* <p>
@@ -40,6 +43,8 @@ public class HttpRequest extends Request<HttpContext> {
protected String requestURI;
private byte[] queryBytes;
private long contentLength = -1;
private String contentType;
@@ -69,15 +74,17 @@ public class HttpRequest extends Request<HttpContext> {
protected int actionid;
protected Annotation[] annotations;
protected Object currentUser;
private final String remoteAddrHeader;
Object attachment; //仅供HttpServlet传递Entry使用
public HttpRequest(HttpContext context, String remoteAddrHeader) {
super(context);
this.remoteAddrHeader = remoteAddrHeader;
public HttpRequest(HttpContext context, ObjectPool<ByteBuffer> bufferPool) {
super(context, bufferPool);
this.remoteAddrHeader = context.remoteAddrHeader;
}
protected boolean isWebSocket() {
@@ -102,34 +109,44 @@ public class HttpRequest extends Request<HttpContext> {
@Override
protected int readHeader(final ByteBuffer buffer) {
if (!readLine(buffer, array)) return -1;
ByteArray bytes = array;
if (!readLine(buffer, bytes)) return -1;
Charset charset = this.context.getCharset();
int index = 0;
int offset = array.find(index, ' ');
int offset = bytes.find(index, ' ');
if (offset <= 0) return -1;
this.method = array.toString(index, offset, charset).trim();
this.method = bytes.toString(index, offset, charset);
index = ++offset;
offset = array.find(index, ' ');
offset = bytes.find(index, ' ');
if (offset <= 0) return -1;
int off = array.find(index, '#');
int off = bytes.find(index, '#');
if (off > 0) offset = off;
int qst = array.find(index, offset, (byte) '?');
int qst = bytes.find(index, offset, (byte) '?');
if (qst > 0) {
this.requestURI = array.toDecodeString(index, qst - index, charset).trim();
addParameter(array, qst + 1, offset - qst - 1);
this.requestURI = bytes.toDecodeString(index, qst - index, charset);
this.queryBytes = bytes.getBytes(qst + 1, offset - qst - 1);
try {
addParameter(bytes, qst + 1, offset - qst - 1);
} catch (Exception e) {
this.context.getLogger().log(Level.WARNING, "HttpRequest.addParameter error: " + bytes.toString(), e);
}
} else {
this.requestURI = array.toDecodeString(index, offset - index, charset).trim();
this.requestURI = bytes.toDecodeString(index, offset - index, charset);
this.queryBytes = new byte[0];
}
index = ++offset;
this.protocol = array.toString(index, array.size() - index, charset).trim();
while (readLine(buffer, array)) {
if (array.size() < 2) break;
this.protocol = bytes.toString(index, bytes.size() - index, charset);
//header
while (readLine(buffer, bytes)) {
if (bytes.size() < 2) break;
index = 0;
offset = array.find(index, ':');
offset = bytes.find(index, ':');
if (offset <= 0) return -1;
String name = array.toString(index, offset, charset).trim();
String name = bytes.toString(index, offset, charset);
index = offset + 1;
String value = array.toString(index, array.size() - index, charset).trim();
//Upgrade: websocket 前面有空格所以需要trim()
String value = bytes.toString(index, bytes.size() - index, charset).trim();
switch (name) {
case "Content-Type":
case "content-type":
@@ -168,14 +185,14 @@ public class HttpRequest extends Request<HttpContext> {
if (this.contentType != null && this.contentType.contains("boundary=")) this.boundary = true;
if (this.boundary) this.keepAlive = false; //文件上传必须设置keepAlive为false因为文件过大时用户不一定会skip掉多余的数据
array.clear();
bytes.clear();
if (this.contentLength > 0 && (this.contentType == null || !this.boundary)) {
if (this.contentLength > context.getMaxbody()) return -1;
array.write(buffer, Math.min((int) this.contentLength, buffer.remaining()));
int lr = (int) this.contentLength - array.size();
bytes.write(buffer, Math.min((int) this.contentLength, buffer.remaining()));
int lr = (int) this.contentLength - bytes.size();
return lr > 0 ? lr : 0;
}
if (buffer.hasRemaining() && (this.boundary || !this.keepAlive)) array.write(buffer, buffer.remaining()); //文件上传、HTTP1.0或Connection:close
if (buffer.hasRemaining() && (this.boundary || !this.keepAlive)) bytes.write(buffer, buffer.remaining()); //文件上传、HTTP1.0或Connection:close
//暂不考虑是keep-alive且存在body却没有指定Content-Length的情况
return 0;
}
@@ -197,7 +214,9 @@ public class HttpRequest extends Request<HttpContext> {
private void parseBody() {
if (this.boundary || bodyparsed) return;
addParameter(array, 0, array.size());
if (this.contentType != null && this.contentType.toLowerCase().contains("x-www-form-urlencoded")) {
addParameter(array, 0, array.size());
}
bodyparsed = true;
}
@@ -298,6 +317,55 @@ public class HttpRequest extends Request<HttpContext> {
return this.actionid;
}
/**
* 获取当前操作Method上的注解集合
*
* @return Annotation[]
*/
public Annotation[] getAnnotations() {
if (this.annotations == null) return new Annotation[0];
Annotation[] newanns = new Annotation[this.annotations.length];
System.arraycopy(this.annotations, 0, newanns, 0, newanns.length);
return newanns;
}
/**
* 获取当前操作Method上的注解
*
* @param <T> 注解泛型
* @param annotationClass 注解类型
*
* @return Annotation
*/
public <T extends Annotation> T getAnnotation(Class<T> annotationClass) {
if (this.annotations == null) return null;
for (Annotation ann : this.annotations) {
if (ann.getClass() == annotationClass) return (T) ann;
}
return null;
}
/**
* 获取当前操作Method上的注解集合
*
* @param <T> 注解泛型
* @param annotationClass 注解类型
*
* @return Annotation[]
*/
public <T extends Annotation> T[] getAnnotationsByType(Class<T> annotationClass) {
if (this.annotations == null) return (T[]) Array.newInstance(annotationClass, 0);
T[] news = (T[]) Array.newInstance(annotationClass, this.annotations.length);
int index = 0;
for (Annotation ann : this.annotations) {
if (ann.getClass() == annotationClass) {
news[index++] = (T) ann;
}
}
if (index < 1) return (T[]) Array.newInstance(annotationClass, 0);
return Arrays.copyOf(news, index);
}
/**
* 获取客户端地址IP
*
@@ -435,6 +503,7 @@ public class HttpRequest extends Request<HttpContext> {
this.method = null;
this.protocol = null;
this.requestURI = null;
this.queryBytes = null;
this.contentType = null;
this.host = null;
this.connection = null;
@@ -443,6 +512,7 @@ public class HttpRequest extends Request<HttpContext> {
this.bodyparsed = false;
this.moduleid = 0;
this.actionid = 0;
this.annotations = null;
this.currentUser = null;
this.attachment = null;
@@ -613,6 +683,15 @@ public class HttpRequest extends Request<HttpContext> {
return requestURI;
}
/**
* 获取请求参数的byte[]
*
* @return byte[]
*/
public byte[] getQueryBytes() {
return queryBytes;
}
/**
* 截取getRequestURI最后的一个/后面的部分
*
@@ -625,7 +704,7 @@ public class HttpRequest extends Request<HttpContext> {
/**
* 获取请求URL最后的一个/后面的部分的short值 <br>
* 例如请求URL /pipes/record/query/2 <br>
* 例如请求URL /pipes/user/query/2 <br>
* 获取type参数: short type = request.getRequstURILastPath((short)0); //type = 2
*
* @param defvalue 默认short值
@@ -644,7 +723,7 @@ public class HttpRequest extends Request<HttpContext> {
/**
* 获取请求URL最后的一个/后面的部分的short值 <br>
* 例如请求URL /pipes/record/query/2 <br>
* 例如请求URL /pipes/user/query/2 <br>
* 获取type参数: short type = request.getRequstURILastPath(16, (short)0); //type = 2
*
* @param radix 进制数
@@ -664,7 +743,7 @@ public class HttpRequest extends Request<HttpContext> {
/**
* 获取请求URL最后的一个/后面的部分的int值 <br>
* 例如请求URL /pipes/record/query/2 <br>
* 例如请求URL /pipes/user/query/2 <br>
* 获取type参数: int type = request.getRequstURILastPath(0); //type = 2
*
* @param defvalue 默认int值
@@ -682,7 +761,7 @@ public class HttpRequest extends Request<HttpContext> {
/**
* 获取请求URL最后的一个/后面的部分的int值 <br>
* 例如请求URL /pipes/record/query/2 <br>
* 例如请求URL /pipes/user/query/2 <br>
* 获取type参数: int type = request.getRequstURILastPath(16, 0); //type = 2
*
* @param radix 进制数
@@ -701,7 +780,7 @@ public class HttpRequest extends Request<HttpContext> {
/**
* 获取请求URL最后的一个/后面的部分的float值 <br>
* 例如请求URL /pipes/record/query/2 <br>
* 例如请求URL /pipes/user/query/2 <br>
* 获取type参数: float type = request.getRequstURILastPath(0.0f); //type = 2.0f
*
* @param defvalue 默认float值
@@ -719,7 +798,7 @@ public class HttpRequest extends Request<HttpContext> {
/**
* 获取请求URL最后的一个/后面的部分的int值 <br>
* 例如请求URL /pipes/record/query/2 <br>
* 例如请求URL /pipes/user/query/2 <br>
* 获取type参数: long type = request.getRequstURILastPath(0L); //type = 2
*
* @param defvalue 默认long值
@@ -737,7 +816,7 @@ public class HttpRequest extends Request<HttpContext> {
/**
* 获取请求URL最后的一个/后面的部分的int值 <br>
* 例如请求URL /pipes/record/query/2 <br>
* 例如请求URL /pipes/user/query/2 <br>
* 获取type参数: long type = request.getRequstURILastPath(16, 0L); //type = 2
*
* @param radix 进制数
@@ -756,7 +835,7 @@ public class HttpRequest extends Request<HttpContext> {
/**
* 获取请求URL最后的一个/后面的部分的double值 <br>
* 例如请求URL /pipes/record/query/2 <br>
* 例如请求URL /pipes/user/query/2 <br>
* 获取type参数: double type = request.getRequstURILastPath(0.0); //type = 2.0
*
* @param defvalue 默认double值
@@ -787,7 +866,7 @@ public class HttpRequest extends Request<HttpContext> {
/**
* 获取请求URL分段中含prefix段的值 <br>
* 例如请求URL /pipes/record/query/name:hello <br>
* 例如请求URL /pipes/user/query/name:hello <br>
* 获取name参数: String name = request.getRequstURIPath("name:", "none");
*
* @param prefix prefix段前缀
@@ -806,7 +885,7 @@ public class HttpRequest extends Request<HttpContext> {
/**
* 获取请求URL分段中含prefix段的short值 <br>
* 例如请求URL /pipes/record/query/type:10 <br>
* 例如请求URL /pipes/user/query/type:10 <br>
* 获取type参数: short type = request.getRequstURIPath("type:", (short)0);
*
* @param prefix prefix段前缀
@@ -825,7 +904,7 @@ public class HttpRequest extends Request<HttpContext> {
/**
* 获取请求URL分段中含prefix段的short值 <br>
* 例如请求URL /pipes/record/query/type:a <br>
* 例如请求URL /pipes/user/query/type:a <br>
* 获取type参数: short type = request.getRequstURIPath(16, "type:", (short)0); //type = 10
*
* @param radix 进制数
@@ -845,7 +924,7 @@ public class HttpRequest extends Request<HttpContext> {
/**
* 获取请求URL分段中含prefix段的int值 <br>
* 例如请求URL /pipes/record/query/offset:0/limit:50 <br>
* 例如请求URL /pipes/user/query/offset:0/limit:50 <br>
* 获取offset参数: int offset = request.getRequstURIPath("offset:", 0); <br>
* 获取limit参数: int limit = request.getRequstURIPath("limit:", 20); <br>
*
@@ -865,7 +944,7 @@ public class HttpRequest extends Request<HttpContext> {
/**
* 获取请求URL分段中含prefix段的int值 <br>
* 例如请求URL /pipes/record/query/offset:0/limit:50 <br>
* 例如请求URL /pipes/user/query/offset:0/limit:50 <br>
* 获取offset参数: int offset = request.getRequstURIPath("offset:", 0); <br>
* 获取limit参数: int limit = request.getRequstURIPath(16, "limit:", 20); // limit = 16 <br>
*
@@ -886,7 +965,7 @@ public class HttpRequest extends Request<HttpContext> {
/**
* 获取请求URL分段中含prefix段的float值 <br>
* 例如请求URL /pipes/record/query/point:40.0 <br>
* 例如请求URL /pipes/user/query/point:40.0 <br>
* 获取time参数: float point = request.getRequstURIPath("point:", 0.0f);
*
* @param prefix prefix段前缀
@@ -905,7 +984,7 @@ public class HttpRequest extends Request<HttpContext> {
/**
* 获取请求URL分段中含prefix段的long值 <br>
* 例如请求URL /pipes/record/query/time:1453104341363/id:40 <br>
* 例如请求URL /pipes/user/query/time:1453104341363/id:40 <br>
* 获取time参数: long time = request.getRequstURIPath("time:", 0L);
*
* @param prefix prefix段前缀
@@ -924,7 +1003,7 @@ public class HttpRequest extends Request<HttpContext> {
/**
* 获取请求URL分段中含prefix段的long值 <br>
* 例如请求URL /pipes/record/query/time:1453104341363/id:40 <br>
* 例如请求URL /pipes/user/query/time:1453104341363/id:40 <br>
* 获取time参数: long time = request.getRequstURIPath(16, "time:", 0L);
*
* @param radix 进制数
@@ -944,7 +1023,7 @@ public class HttpRequest extends Request<HttpContext> {
/**
* 获取请求URL分段中含prefix段的double值 <br>
* 例如请求URL /pipes/record/query/point:40.0 <br>
* 例如请求URL /pipes/user/query/point:40.0 <br>
* 获取time参数: double point = request.getRequstURIPath("point:", 0.0);
*
* @param prefix prefix段前缀
@@ -1227,16 +1306,11 @@ public class HttpRequest extends Request<HttpContext> {
* @return String
*/
public String getParametersToString(String prefix) {
final StringBuilder sb = new StringBuilder();
getParameters().forEach((k, v) -> {
if (sb.length() > 0) sb.append('&');
try {
sb.append(k).append('=').append(URLEncoder.encode(v, "UTF-8"));
} catch (IOException ex) {
throw new RuntimeException(ex);
}
});
return (sb.length() > 0 && prefix != null) ? (prefix + sb) : sb.toString();
byte[] rbs = queryBytes;
if (rbs == null || rbs.length < 1) return "";
Charset charset = this.context.getCharset();
String str = charset == null ? new String(rbs, StandardCharsets.UTF_8) : new String(rbs, charset);
return (prefix == null) ? str : (prefix + str);
}
/**
@@ -1480,8 +1554,8 @@ public class HttpRequest extends Request<HttpContext> {
}
/**
* 获取翻页对象 https://redkale.org/pipes/records/list/offset:0/limit:20/sort:createtime%20ASC <br>
* https://redkale.org/pipes/records/list?flipper={'offset':0,'limit':20, 'sort':'createtime ASC'} <br>
* 获取翻页对象 https://redkale.org/pipes/users/list/offset:0/limit:20/sort:createtime%20ASC <br>
* https://redkale.org/pipes/users/list?flipper={'offset':0,'limit':20, 'sort':'createtime ASC'} <br>
* 以上两种接口都可以获取到翻页对象
*
*

View File

@@ -37,7 +37,7 @@ public class HttpResourceServlet extends HttpServlet {
public WatchThread(File root) throws IOException {
this.root = root;
this.setName("HttpResourceServlet-Watch-Thread");
this.setName("Redkale-HttpResourceServlet-Watch-Thread");
this.setDaemon(true);
this.watcher = this.root.toPath().getFileSystem().newWatchService();
}

View File

@@ -50,6 +50,10 @@ public class HttpResponse extends Response<HttpContext, HttpRequest> {
protected static final byte[] connectAliveBytes = "Connection: keep-alive\r\n".getBytes();
private static final byte[] fillContentLengthBytes = ("Content-Length: \r\n").getBytes();
private static final ZoneId ZONE_GMT = ZoneId.of("GMT");
private static final Set<OpenOption> options = new HashSet<>();
private static final Map<Integer, String> httpCodes = new HashMap<>();
@@ -103,8 +107,6 @@ public class HttpResponse extends Response<HttpContext, HttpRequest> {
httpCodes.put(505, "HTTP Version Not Supported");
}
private static final ZoneId ZONE_GMT = ZoneId.of("GMT");
private int status = 200;
private String contentType = "";
@@ -113,9 +115,17 @@ public class HttpResponse extends Response<HttpContext, HttpRequest> {
private HttpCookie[] cookies;
private boolean headsended = false;
private int headWritedSize = -1; //0表示跳过header正数表示header的字节长度。
private ByteBuffer headBuffer;
private int headLenPos = -1;
private BiFunction<HttpResponse, ByteBuffer[], ByteBuffer[]> bufferHandler;
private BiFunction<HttpRequest, org.redkale.service.RetResult, org.redkale.service.RetResult> retResultHandler;
private Supplier<ByteBuffer> bodyBufferSupplier;
//------------------------------------------------
private final String plainContentType;
@@ -148,8 +158,8 @@ public class HttpResponse extends Response<HttpContext, HttpRequest> {
return new ObjectPool<>(creatCounter, cycleCounter, max, creator, (x) -> ((HttpResponse) x).prepare(), (x) -> ((HttpResponse) x).recycle());
}
public HttpResponse(HttpContext context, HttpRequest request, HttpResponseConfig config) {
super(context, request);
public HttpResponse(HttpContext context, HttpRequest request, ObjectPool<Response> responsePool, HttpResponseConfig config) {
super(context, request, responsePool);
this.plainContentType = config.plainContentType == null || config.plainContentType.isEmpty() ? "text/plain; charset=utf-8" : config.plainContentType;
this.jsonContentType = config.jsonContentType == null || config.jsonContentType.isEmpty() ? "application/json; charset=utf-8" : config.jsonContentType;
this.plainContentTypeBytes = ("Content-Type: " + this.plainContentType + "\r\n").getBytes();
@@ -163,6 +173,11 @@ public class HttpResponse extends Response<HttpContext, HttpRequest> {
this.hasRender = renders != null && !renders.isEmpty();
this.onlyoneHttpRender = renders != null && renders.size() == 1 ? renders.get(0) : null;
this.contentType = this.plainContentType;
this.bodyBufferSupplier = () -> {
if (headWritedSize >= 0 || bufferHandler != null) return channel.pollWriteBuffer(); //bufferHandler 需要cached的请求不能带上header
if (contentLength < 0) contentLength = -2;
return createHeader();
};
}
@Override
@@ -174,18 +189,30 @@ public class HttpResponse extends Response<HttpContext, HttpRequest> {
return channel;
}
@Override
protected void prepare() {
super.prepare();
}
@Override
protected boolean recycle() {
this.status = 200;
this.contentLength = -1;
this.contentType = null;
this.cookies = null;
this.headsended = false;
this.headWritedSize = -1;
this.headBuffer = null;
this.headLenPos = -1;
this.header.clear();
this.bufferHandler = null;
this.retResultHandler = null;
return super.recycle();
}
protected Supplier<ByteBuffer> getBodyBufferSupplier() {
return bodyBufferSupplier;
}
@Override
protected void init(AsyncConnection channel) {
super.init(channel);
@@ -280,15 +307,6 @@ public class HttpResponse extends Response<HttpContext, HttpRequest> {
return context.loadAsyncHandlerCreator(handlerClass).create(createAsyncHandler());
}
/**
* 获取ByteBuffer生成器
*
* @return ByteBuffer生成器
*/
public Supplier<ByteBuffer> getBufferSupplier() {
return getBodyBufferSupplier();
}
/**
* 将对象以JSON格式输出
*
@@ -378,14 +396,19 @@ public class HttpResponse extends Response<HttpContext, HttpRequest> {
*
* @param ret RetResult输出对象
*/
public void finishJson(final org.redkale.service.RetResult ret) {
public void finishJson(org.redkale.service.RetResult ret) {
this.contentType = this.jsonContentType;
if (this.retResultHandler != null) {
ret = this.retResultHandler.apply(this.request, ret);
}
if (this.recycleListener != null) this.output = ret;
if (ret != null && !ret.isSuccess()) {
this.header.addValue("retcode", String.valueOf(ret.getRetcode()));
this.header.addValue("retinfo", ret.getRetinfo());
}
finish(request.getJsonConvert().convertTo(getBodyBufferSupplier(), ret));
Convert convert = ret == null ? null : ret.convert();
if (convert == null || !(convert instanceof TextConvert)) convert = request.getJsonConvert();
finish(convert.convertTo(getBodyBufferSupplier(), ret));
}
/**
@@ -394,8 +417,11 @@ public class HttpResponse extends Response<HttpContext, HttpRequest> {
* @param convert 指定的JsonConvert
* @param ret RetResult输出对象
*/
public void finishJson(final JsonConvert convert, final org.redkale.service.RetResult ret) {
public void finishJson(final JsonConvert convert, org.redkale.service.RetResult ret) {
this.contentType = this.jsonContentType;
if (this.retResultHandler != null) {
ret = this.retResultHandler.apply(this.request, ret);
}
if (this.recycleListener != null) this.output = ret;
if (ret != null && !ret.isSuccess()) {
this.header.addValue("retcode", String.valueOf(ret.getRetcode()));
@@ -465,7 +491,7 @@ public class HttpResponse extends Response<HttpContext, HttpRequest> {
* @param obj 输出对象
*/
@SuppressWarnings("unchecked")
public void finish(final Convert convert, final Type type, final Object obj) {
public void finish(final Convert convert, final Type type, Object obj) {
if (obj == null) {
finish("null");
} else if (obj instanceof CompletableFuture) {
@@ -492,6 +518,8 @@ public class HttpResponse extends Response<HttpContext, HttpRequest> {
context.getLogger().log(Level.WARNING, "HttpServlet finish File occur, force to close channel. request = " + getRequest(), e);
finish(500, null);
}
} else if (obj instanceof org.redkale.service.RetResult) {
finishJson((org.redkale.service.RetResult) obj);
} else if (obj instanceof HttpResult) {
HttpResult result = (HttpResult) obj;
if (result.getContentType() != null) setContentType(result.getContentType());
@@ -501,7 +529,9 @@ public class HttpResponse extends Response<HttpContext, HttpRequest> {
} else if (result.getResult() instanceof CharSequence) {
finish(result.getResult().toString());
} else {
finish(convert, result.getResult());
Convert cc = result.convert();
if (cc == null || !(cc instanceof TextConvert)) cc = convert;
finish(cc, result.getResult());
}
} else {
if (hasRender) {
@@ -528,6 +558,10 @@ public class HttpResponse extends Response<HttpContext, HttpRequest> {
if (this.recycleListener != null) this.output = obj;
if (obj instanceof org.redkale.service.RetResult) {
org.redkale.service.RetResult ret = (org.redkale.service.RetResult) obj;
if (this.retResultHandler != null) {
ret = this.retResultHandler.apply(this.request, ret);
obj = ret;
}
if (!ret.isSuccess()) {
this.header.addValue("retcode", String.valueOf(ret.getRetcode())).addValue("retinfo", ret.getRetinfo());
}
@@ -626,7 +660,7 @@ public class HttpResponse extends Response<HttpContext, HttpRequest> {
public void finish(final String contentType, final byte[] bs) {
if (isClosed()) return; //避免重复关闭
final byte[] content = bs == null ? new byte[0] : bs;
if (!this.headsended) {
if (this.headWritedSize < 0) {
this.contentType = contentType;
this.contentLength = content.length;
ByteBuffer headbuf = createHeader();
@@ -670,7 +704,7 @@ public class HttpResponse extends Response<HttpContext, HttpRequest> {
@Override
public void finish(boolean kill, ByteBuffer buffer) {
if (isClosed()) return; //避免重复关闭
if (!this.headsended) {
if (this.headWritedSize < 0) {
this.contentLength = buffer == null ? 0 : buffer.remaining();
ByteBuffer headbuf = createHeader();
headbuf.flip();
@@ -708,7 +742,7 @@ public class HttpResponse extends Response<HttpContext, HttpRequest> {
if (bufs != null) buffers = bufs;
}
if (kill) refuseAlive();
if (!this.headsended) {
if (this.headWritedSize < 0) {
long len = 0;
for (ByteBuffer buf : buffers) {
len += buf.remaining();
@@ -725,6 +759,17 @@ public class HttpResponse extends Response<HttpContext, HttpRequest> {
super.finish(kill, newbuffers);
}
} else {
if (this.headLenPos > 0 && buffers[0] == headBuffer) {
long contentlen = -this.headWritedSize;
for (ByteBuffer buf : buffers) {
contentlen += buf.remaining();
}
byte[] lenBytes = String.valueOf(contentlen).getBytes();
int start = this.headLenPos - lenBytes.length;
for (int i = 0; i < lenBytes.length; i++) {
headBuffer.put(start + i, lenBytes[i]);
}
}
super.finish(kill, buffers);
}
}
@@ -738,7 +783,7 @@ public class HttpResponse extends Response<HttpContext, HttpRequest> {
* @param handler 异步回调函数
*/
public <A> void sendBody(ByteBuffer buffer, A attachment, CompletionHandler<Integer, A> handler) {
if (!this.headsended) {
if (this.headWritedSize < 0) {
if (this.contentLength < 0) this.contentLength = buffer == null ? 0 : buffer.remaining();
ByteBuffer headbuf = createHeader();
headbuf.flip();
@@ -761,7 +806,7 @@ public class HttpResponse extends Response<HttpContext, HttpRequest> {
* @param handler 异步回调函数
*/
public <A> void sendBody(ByteBuffer[] buffers, A attachment, CompletionHandler<Integer, A> handler) {
if (!this.headsended) {
if (this.headWritedSize < 0) {
if (this.contentLength < 0) {
int len = 0;
if (buffers != null && buffers.length > 0) {
@@ -844,7 +889,9 @@ public class HttpResponse extends Response<HttpContext, HttpRequest> {
}
this.contentLength = length;
if (filename != null && !filename.isEmpty() && file != null) {
addHeader("Content-Disposition", "attachment;filename=" + URLEncoder.encode(filename, "UTF-8"));
if (this.header.getValue("Content-Disposition") == null) {
addHeader("Content-Disposition", "attachment;filename=" + URLEncoder.encode(filename, "UTF-8"));
}
}
this.contentType = MimeType.getByFilename(filename == null || filename.isEmpty() ? file.getName() : filename);
if (this.contentType == null) this.contentType = "application/octet-stream";
@@ -886,14 +933,19 @@ public class HttpResponse extends Response<HttpContext, HttpRequest> {
//Header大小不能超过一个ByteBuffer的容量
protected ByteBuffer createHeader() {
this.headsended = true;
ByteBuffer buffer = this.pollWriteReadBuffer();
ByteBuffer buffer = this.channel.pollWriteBuffer();
int oldpos = buffer.position();
if (this.status == 200) {
buffer.put(status200Bytes);
} else {
buffer.put(("HTTP/1.1 " + this.status + " " + httpCodes.get(this.status) + "\r\n").getBytes());
}
if (this.contentLength >= 0) buffer.put(("Content-Length: " + this.contentLength + "\r\n").getBytes());
if (this.contentLength >= 0) {
buffer.put(("Content-Length: " + this.contentLength + "\r\n").getBytes());
} else if (this.contentLength == -2) {
buffer.put(fillContentLengthBytes);
this.headLenPos = buffer.position() - 2; //去掉\r\n
}
if (!this.request.isWebSocket()) {
if (this.contentType == this.jsonContentType) {
buffer.put(this.jsonContentTypeBytes);
@@ -965,6 +1017,8 @@ public class HttpResponse extends Response<HttpContext, HttpRequest> {
}
}
buffer.put(LINE);
this.headWritedSize = buffer.position() - oldpos;
this.headBuffer = buffer;
return buffer;
}
@@ -990,7 +1044,7 @@ public class HttpResponse extends Response<HttpContext, HttpRequest> {
* @return HttpResponse
*/
public HttpResponse skipHeader() {
this.headsended = true;
this.headWritedSize = 0;
return this;
}
@@ -1120,6 +1174,24 @@ public class HttpResponse extends Response<HttpContext, HttpRequest> {
this.bufferHandler = bufferHandler;
}
/**
* 获取输出RetResult时的拦截器
*
* @return 拦截器
*/
protected BiFunction<HttpRequest, org.redkale.service.RetResult, org.redkale.service.RetResult> getRetResultHandler() {
return retResultHandler;
}
/**
* 设置输出RetResult时的拦截器
*
* @param retResultHandler 拦截器
*/
public void retResultHandler(BiFunction<HttpRequest, org.redkale.service.RetResult, org.redkale.service.RetResult> retResultHandler) {
this.retResultHandler = retResultHandler;
}
protected final class TransferFileHandler implements CompletionHandler<Integer, ByteBuffer> {
private final File file;
@@ -1197,7 +1269,7 @@ public class HttpResponse extends Response<HttpContext, HttpRequest> {
@Override
public void failed(Throwable exc, ByteBuffer attachment) {
context.offerBuffer(attachment);
channel.offerBuffer(attachment);
finish(true);
try {
filechannel.close();

View File

@@ -8,6 +8,7 @@ package org.redkale.net.http;
import java.io.Serializable;
import java.net.HttpCookie;
import java.util.*;
import org.redkale.convert.*;
import org.redkale.convert.json.JsonConvert;
/**
@@ -22,21 +23,28 @@ public class HttpResult<T> {
public static final String SESSIONID_COOKIENAME = HttpRequest.SESSIONID_NAME;
private Map<String, String> headers;
protected Map<String, String> headers;
private List<HttpCookie> cookies;
protected List<HttpCookie> cookies;
private String contentType;
protected String contentType;
private T result;
protected T result;
private int status = 0; //不设置则为 200
protected int status = 0; //不设置则为 200
private String message;
protected String message;
protected Convert convert;
public HttpResult() {
}
public HttpResult(Convert convert, T result) {
this.convert = convert;
this.result = result;
}
public HttpResult(T result) {
this.result = result;
}
@@ -82,6 +90,14 @@ public class HttpResult<T> {
return this;
}
public Convert convert() {
return convert;
}
public void convert(Convert convert) {
this.convert = convert;
}
public Map<String, String> getHeaders() {
return headers;
}

View File

@@ -37,6 +37,8 @@ public class HttpServer extends Server<String, HttpContext, HttpRequest, HttpRes
private byte[] currDateBytes;
private HttpResponseConfig respConfig;
public HttpServer() {
this(System.currentTimeMillis(), ResourceFactory.root());
}
@@ -54,6 +56,11 @@ public class HttpServer extends Server<String, HttpContext, HttpRequest, HttpRes
super.init(config);
}
@Override
protected void postStart() {
((HttpPrepareServlet) this.prepare).postStart(this.context, config);
}
@Override
public void destroy(final AnyValue config) throws Exception {
super.destroy(config);
@@ -304,16 +311,7 @@ public class HttpServer extends Server<String, HttpContext, HttpRequest, HttpRes
@SuppressWarnings("unchecked")
protected HttpContext createContext() {
final int port = this.address.getPort();
AtomicLong createBufferCounter = new AtomicLong();
AtomicLong cycleBufferCounter = new AtomicLong();
this.bufferCapacity = Math.max(this.bufferCapacity, 16 * 1024 + 16); //兼容 HTTP 2.0;
final int rcapacity = this.bufferCapacity;
ObjectPool<ByteBuffer> bufferPool = new ObjectPool<>(createBufferCounter, cycleBufferCounter, this.bufferPoolSize,
(Object... params) -> ByteBuffer.allocateDirect(rcapacity), null, (e) -> {
if (e == null || e.isReadOnly() || e.capacity() != rcapacity) return false;
e.clear();
return true;
});
final List<String[]> defaultAddHeaders = new ArrayList<>();
final List<String[]> defaultSetHeaders = new ArrayList<>();
boolean autoOptions = false;
@@ -423,7 +421,7 @@ public class HttpServer extends Server<String, HttpContext, HttpRequest, HttpRes
final String addrHeader = remoteAddrHeader;
final HttpResponseConfig respConfig = new HttpResponseConfig();
this.respConfig = new HttpResponseConfig();
respConfig.plainContentType = plainContentType;
respConfig.jsonContentType = jsonContentType;
respConfig.defaultAddHeaders = defaultAddHeaders.isEmpty() ? null : defaultAddHeaders.toArray(new String[defaultAddHeaders.size()][]);
@@ -433,18 +431,12 @@ public class HttpServer extends Server<String, HttpContext, HttpRequest, HttpRes
respConfig.dateSupplier = dateSupplier;
respConfig.renders = ((HttpPrepareServlet) prepare).renders;
AtomicLong createResponseCounter = new AtomicLong();
AtomicLong cycleResponseCounter = new AtomicLong();
ObjectPool<Response> responsePool = HttpResponse.createPool(createResponseCounter, cycleResponseCounter, this.responsePoolSize, null);
final HttpContextConfig contextConfig = new HttpContextConfig();
contextConfig.serverStartTime = this.serverStartTime;
contextConfig.logger = this.logger;
contextConfig.executor = this.executor;
contextConfig.sslContext = this.sslContext;
contextConfig.bufferCapacity = rcapacity;
contextConfig.bufferPool = bufferPool;
contextConfig.responsePool = responsePool;
contextConfig.bufferCapacity = this.bufferCapacity;
contextConfig.maxconns = this.maxconns;
contextConfig.maxbody = this.maxbody;
contextConfig.charset = this.charset;
@@ -454,9 +446,32 @@ public class HttpServer extends Server<String, HttpContext, HttpRequest, HttpRes
contextConfig.aliveTimeoutSeconds = this.aliveTimeoutSeconds;
contextConfig.readTimeoutSeconds = this.readTimeoutSeconds;
contextConfig.writeTimeoutSeconds = this.writeTimeoutSeconds;
contextConfig.remoteAddrHeader = addrHeader;
HttpContext httpcontext = new HttpContext(contextConfig);
responsePool.setCreator((Object... params) -> new HttpResponse(httpcontext, new HttpRequest(httpcontext, addrHeader), respConfig));
return httpcontext;
return new HttpContext(contextConfig);
}
@Override
protected ObjectPool<ByteBuffer> createBufferPool(AtomicLong createCounter, AtomicLong cycleCounter, int bufferPoolSize) {
if (createCounter == null) createCounter = new AtomicLong();
if (cycleCounter == null) cycleCounter = new AtomicLong();
final int rcapacity = this.bufferCapacity;
ObjectPool<ByteBuffer> bufferPool = new ObjectPool<>(createCounter, cycleCounter, bufferPoolSize,
(Object... params) -> ByteBuffer.allocateDirect(rcapacity), null, (e) -> {
if (e == null || e.isReadOnly() || e.capacity() != rcapacity) return false;
e.clear();
return true;
});
return bufferPool;
}
@Override
protected ObjectPool<Response> createResponsePool(AtomicLong createCounter, AtomicLong cycleCounter, int responsePoolSize) {
return HttpResponse.createPool(createCounter, cycleCounter, responsePoolSize, null);
}
@Override
protected Creator<Response> createResponseCreator(ObjectPool<ByteBuffer> bufferPool, ObjectPool<Response> responsePool) {
return (Object... params) -> new HttpResponse(this.context, new HttpRequest(this.context, bufferPool), responsePool, this.respConfig);
}
}

View File

@@ -6,6 +6,7 @@
package org.redkale.net.http;
import java.io.IOException;
import java.lang.annotation.Annotation;
import java.lang.reflect.Method;
import java.nio.ByteBuffer;
import java.util.*;
@@ -71,6 +72,7 @@ public class HttpServlet extends Servlet<HttpContext, HttpRequest, HttpResponse>
request.attachment = entry;
request.moduleid = entry.moduleid;
request.actionid = entry.actionid;
request.annotations = entry.annotations;
if (entry.auth) {
response.thenEvent(authSuccessServlet);
authenticate(request, response);
@@ -103,6 +105,10 @@ public class HttpServlet extends Servlet<HttpContext, HttpRequest, HttpResponse>
void postDestroy(HttpContext context, AnyValue config) {
}
//Server执行start后运行此方法
public void postStart(HttpContext context, AnyValue config) {
}
/**
* <p>
* 预执行方法在execute方法之前运行设置当前用户信息或者加入常规统计和基础检测例如 : <br>
@@ -210,6 +216,7 @@ public class HttpServlet extends Servlet<HttpContext, HttpRequest, HttpResponse>
InnerActionEntry(int moduleid, int actionid, String name, String[] methods, Method method, HttpServlet servlet) {
this(moduleid, actionid, name, methods, method, auth(method), cacheseconds(method), servlet);
this.annotations = annotations(method);
}
//供Rest类使用参数不能随便更改
@@ -232,16 +239,21 @@ public class HttpServlet extends Servlet<HttpContext, HttpRequest, HttpResponse>
} : null;
}
private static boolean auth(Method method) {
protected static boolean auth(Method method) {
HttpMapping mapping = method.getAnnotation(HttpMapping.class);
return mapping == null || mapping.auth();
}
private static int cacheseconds(Method method) {
protected static int cacheseconds(Method method) {
HttpMapping mapping = method.getAnnotation(HttpMapping.class);
return mapping == null ? 0 : mapping.cacheseconds();
}
//Rest.class会用到此方法
protected static Annotation[] annotations(Method method) {
return method.getAnnotations();
}
boolean isNeedCheck() {
return this.moduleid != 0 || this.actionid != 0;
}
@@ -270,9 +282,11 @@ public class HttpServlet extends Servlet<HttpContext, HttpRequest, HttpResponse>
final String[] methods;
final Method method;
final HttpServlet servlet;
Method method;
Annotation[] annotations;
}
private HttpServlet createActionServlet(final Method method) {

View File

@@ -11,6 +11,7 @@ import java.lang.annotation.*;
import static java.lang.annotation.ElementType.*;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
import java.lang.reflect.*;
import java.net.InetSocketAddress;
import java.nio.channels.CompletionHandler;
import java.util.*;
import java.util.concurrent.CompletableFuture;
@@ -355,6 +356,10 @@ public final class Rest {
fv.visitEnd();
}
}
{ //_redkale_annotations
fv = cw.visitField(ACC_PUBLIC + ACC_STATIC, "_redkale_annotations", "Ljava/util/Map;", "Ljava/util/Map<Ljava/lang/String;[Ljava/lang/annotation/Annotation;>;", null);
fv.visitEnd();
}
{ //_DynWebSocketServlet构造函数
mv = new MethodDebugVisitor(cw.visitMethod(ACC_PUBLIC, "<init>", "()V", null, null));
mv.visitVarInsn(ALOAD, 0);
@@ -422,7 +427,7 @@ public final class Rest {
}
RestClassLoader newLoader = new RestClassLoader(loader);
Map<String, Annotation[]> msgclassToAnnotations = new HashMap<>();
for (int i = 0; i < messageMethods.size(); i++) { // _DyncXXXWebSocketMessage 子消息List
Method method = messageMethods.get(i);
String endfix = "_" + method.getName() + "_" + (i > 9 ? i : ("0" + i));
@@ -504,6 +509,30 @@ public final class Rest {
mv.visitMaxs(2, 2);
mv.visitEnd();
}
{ //getAnnotations
mv = new MethodDebugVisitor(cw2.visitMethod(ACC_PUBLIC, "getAnnotations", "()[Ljava/lang/annotation/Annotation;", null, null));
mv.visitFieldInsn(GETSTATIC, newDynName, "_redkale_annotations", "Ljava/util/Map;");
mv.visitLdcInsn(newDynMessageFullName + endfix);
mv.visitMethodInsn(INVOKEINTERFACE, "java/util/Map", "get", "(Ljava/lang/Object;)Ljava/lang/Object;", true);
mv.visitTypeInsn(CHECKCAST, "[Ljava/lang/annotation/Annotation;");
mv.visitVarInsn(ASTORE, 1);
mv.visitVarInsn(ALOAD, 1);
Label l2 = new Label();
mv.visitJumpInsn(IFNONNULL, l2);
mv.visitInsn(ICONST_0);
mv.visitTypeInsn(ANEWARRAY, "java/lang/annotation/Annotation");
mv.visitInsn(ARETURN);
mv.visitLabel(l2);
mv.visitFrame(Opcodes.F_APPEND, 1, new Object[]{"[Ljava/lang/annotation/Annotation;"}, 0, null);
mv.visitVarInsn(ALOAD, 1);
mv.visitVarInsn(ALOAD, 1);
mv.visitInsn(ARRAYLENGTH);
mv.visitMethodInsn(INVOKESTATIC, "java/util/Arrays", "copyOf", "([Ljava/lang/Object;I)[Ljava/lang/Object;", false);
mv.visitTypeInsn(CHECKCAST, "[Ljava/lang/annotation/Annotation;");
mv.visitInsn(ARETURN);
mv.visitMaxs(2, 2);
mv.visitEnd();
}
{ //execute
mv = new MethodDebugVisitor(cw2.visitMethod(ACC_PUBLIC, "execute", "(L" + newDynWebSokcetFullName + ";)V", null, null));
mv.visitVarInsn(ALOAD, 0);
@@ -544,6 +573,7 @@ public final class Rest {
}
cw2.visitEnd();
newLoader.loadClass((newDynMessageFullName + endfix).replace('/', '.'), cw2.toByteArray());
msgclassToAnnotations.put(newDynMessageFullName + endfix, method.getAnnotations());
}
{ //_DynXXXWebSocketMessage class
@@ -676,6 +706,7 @@ public final class Rest {
Class<?> newClazz = newLoader.loadClass(newDynName.replace('/', '.'), cw.toByteArray());
try {
T servlet = (T) newClazz.getDeclaredConstructor().newInstance();
newClazz.getField("_redkale_annotations").set(null, msgclassToAnnotations);
if (rws.cryptor() != Cryptor.class) {
Cryptor cryptor = rws.cryptor().getDeclaredConstructor().newInstance();
Field cryptorField = newClazz.getSuperclass().getDeclaredField("cryptor"); //WebSocketServlet
@@ -829,7 +860,7 @@ public final class Rest {
break;
}
}
if (defmodulename.isEmpty() || (!pound && entrys.size() <= 6)) {
if (defmodulename.isEmpty() || (!pound && entrys.size() <= 2)) {
for (MappingEntry entry : entrys) {
String suburl = (catalog.isEmpty() ? "/" : ("/" + catalog + "/")) + (defmodulename.isEmpty() ? "" : (defmodulename + "/")) + entry.name;
urlpath += "," + suburl;
@@ -972,7 +1003,7 @@ public final class Rest {
RestHeader annhead = param.getAnnotation(RestHeader.class);
if (annhead != null) {
if (ptype != String.class) throw new RuntimeException("@RestHeader must on String Parameter in " + method);
if (ptype != String.class && ptype != InetSocketAddress.class) throw new RuntimeException("@RestHeader must on String or InetSocketAddress Parameter in " + method);
n = annhead.name();
radix = annhead.radix();
comment = annhead.comment();
@@ -1524,7 +1555,7 @@ public final class Rest {
RestUploadFile ru = field.getAnnotation(RestUploadFile.class);
RestURI ri = field.getAnnotation(RestURI.class);
if (rh == null && rc == null && ra == null && rb == null && rs == null && ru == null && ri == null) continue;
if (rh != null && field.getType() != String.class) throw new RuntimeException("@RestHeader must on String Field in " + field);
if (rh != null && field.getType() != String.class && field.getType() != InetSocketAddress.class) throw new RuntimeException("@RestHeader must on String Field in " + field);
if (rc != null && field.getType() != String.class) throw new RuntimeException("@RestCookie must on String Field in " + field);
if (rs != null && field.getType() != String.class) throw new RuntimeException("@RestSessionid must on String Field in " + field);
if (ra != null && field.getType() != String.class) throw new RuntimeException("@RestAddress must on String Field in " + field);
@@ -1538,7 +1569,7 @@ public final class Rest {
String attrFieldName;
String restname = "";
if (rh != null) {
attrFieldName = "_redkale_attr_header_" + restAttributes.size();
attrFieldName = "_redkale_attr_header_" + (field.getType() != String.class ? "json_" : "") + restAttributes.size();
restname = rh.name();
} else if (rc != null) {
attrFieldName = "_redkale_attr_cookie_" + restAttributes.size();
@@ -1599,6 +1630,14 @@ public final class Rest {
mv.visitMethodInsn(INVOKEVIRTUAL, reqInternalName, "getConnection", "()Ljava/lang/String;", false);
} else if ("Method".equalsIgnoreCase(headerkey)) {
mv.visitMethodInsn(INVOKEVIRTUAL, reqInternalName, "getMethod", "()Ljava/lang/String;", false);
} else if (en.getKey().contains("_header_json_")) {
String typefieldname = "_redkale_body_jsontype_" + bodyTypes.size();
bodyTypes.put(typefieldname, (java.lang.reflect.Type) en.getValue()[2]);
mv.visitVarInsn(ALOAD, 0);
mv.visitFieldInsn(GETFIELD, newDynName, typefieldname, "Ljava/lang/reflect/Type;");
mv.visitLdcInsn(headerkey);
mv.visitMethodInsn(INVOKEVIRTUAL, reqInternalName, "getJsonHeader", "(Ljava/lang/reflect/Type;Ljava/lang/String;)Ljava/lang/Object;", false);
mv.visitTypeInsn(CHECKCAST, Type.getInternalName((Class) en.getValue()[1]));
} else {
mv.visitLdcInsn(headerkey);
mv.visitLdcInsn("");
@@ -1810,9 +1849,10 @@ public final class Rest {
// HashMap<String, InnerActionEntry> _createRestInnerActionEntry() {
// HashMap<String, InnerActionEntry> map = new HashMap<>();
// map.put("asyncfind3", new InnerActionEntry(100000,200000,"asyncfind3", new String[]{},null,false,0, new _Dync_asyncfind3_HttpServlet()));
// map.put("asyncfind3", new InnerActionEntry(1,2,"asyncfind2", new String[]{"GET", "POST"},null,true,0, new _Dync_asyncfind2_HttpServlet()));
// map.put("asyncfind2", new InnerActionEntry(1,2,"asyncfind2", new String[]{"GET", "POST"},null,true,0, new _Dync_asyncfind2_HttpServlet()));
// return map;
// }
Map<String, Method> mappingurlToMethod = new HashMap<>();
{ //_createRestInnerActionEntry 方法
mv = new MethodDebugVisitor(cw.visitMethod(0, "_createRestInnerActionEntry", "()Ljava/util/HashMap;", "()Ljava/util/HashMap<Ljava/lang/String;L" + innerEntryName + ";>;", null));
//mv.setDebug(true);
@@ -1822,6 +1862,7 @@ public final class Rest {
mv.visitVarInsn(ASTORE, 1);
for (final MappingEntry entry : entrys) {
mappingurlToMethod.put(entry.mappingurl, entry.mappingMethod);
mv.visitVarInsn(ALOAD, 1);
mv.visitLdcInsn(entry.mappingurl); //name
mv.visitTypeInsn(NEW, innerEntryName); //new InnerActionEntry
@@ -1837,9 +1878,9 @@ public final class Rest {
mv.visitLdcInsn(entry.methods[i]);
mv.visitInsn(AASTORE);
}
mv.visitInsn(ACONST_NULL); //method
mv.visitInsn(ACONST_NULL); //method
mv.visitInsn(entry.auth ? ICONST_1 : ICONST_0); //auth
pushInt(mv, entry.cacheseconds); //cacheseconds
pushInt(mv, entry.cacheseconds); //cacheseconds
mv.visitTypeInsn(NEW, newDynName + "$" + entry.newActionClassName);
mv.visitInsn(DUP);
mv.visitVarInsn(ALOAD, 0);
@@ -1920,7 +1961,12 @@ public final class Rest {
restactMethod.setAccessible(true);
Field tmpentrysfield = HttpServlet.class.getDeclaredField("_tmpentrys");
tmpentrysfield.setAccessible(true);
tmpentrysfield.set(obj, restactMethod.invoke(obj));
HashMap<String, HttpServlet.InnerActionEntry> innerEntryMap = (HashMap) restactMethod.invoke(obj);
for (Map.Entry<String, HttpServlet.InnerActionEntry> en : innerEntryMap.entrySet()) {
Method m = mappingurlToMethod.get(en.getKey());
if (m != null) en.getValue().annotations = HttpServlet.InnerActionEntry.annotations(m);
}
tmpentrysfield.set(obj, innerEntryMap);
return obj;
} catch (Throwable e) {
throw new RuntimeException(e);

View File

@@ -10,7 +10,7 @@ import static java.lang.annotation.ElementType.*;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
/**
* 只能注解于RestService类的方法的参数或参数内的String字段
* 只能注解于RestService类的方法的参数或参数内的String、java.net.InetSocketAddress字段
* <p>
* 详情见: https://redkale.org
*

View File

@@ -11,10 +11,11 @@ import java.net.*;
import java.nio.ByteBuffer;
import java.util.*;
import java.util.concurrent.*;
import java.util.function.Supplier;
import java.util.function.*;
import java.util.logging.*;
import java.util.stream.Stream;
import org.redkale.convert.Convert;
import org.redkale.net.AsyncConnection;
import org.redkale.util.Comment;
/**
@@ -82,6 +83,10 @@ public abstract class WebSocket<G extends Serializable, T> {
WebSocketEngine _engine; //不可能为空
InetSocketAddress _sncpAddress; //分布式下不可为空
AsyncConnection _channel;//不可能为空
String _sessionid; //不可能为空
G _userid; //不可能为空
@@ -555,6 +560,17 @@ public abstract class WebSocket<G extends Serializable, T> {
return _engine.node.forceCloseWebSocket(userid);
}
/**
* 获取WebSocketNode
*
*
* @return WebSocketNode
*/
@Comment("获取WebSocketNode")
public final WebSocketNode webSocketNode() {
return _engine.node;
}
/**
* 获取当前WebSocket下的属性非线程安全
*
@@ -674,12 +690,21 @@ public abstract class WebSocket<G extends Serializable, T> {
}
/**
* 获取ByteBuffer资源池
* 获取ByteBuffer生成器
*
* @return Supplier
*/
protected Supplier<ByteBuffer> getByteBufferSupplier() {
return this._runner.context.getBufferSupplier();
protected Supplier<ByteBuffer> getBufferSupplier() {
return this._channel.getBufferSupplier();
}
/**
* 获取ByteBuffer回收器
*
* @return Consumer
*/
protected Consumer<ByteBuffer> getBufferConsumer() {
return this._channel.getBufferConsumer();
}
//-------------------------------------------------------------------
@@ -726,8 +751,11 @@ public abstract class WebSocket<G extends Serializable, T> {
/**
* WebSokcet连接成功后的回调方法
*
* @return Future 可以为null
*/
public void onConnected() {
public CompletableFuture onConnected() {
return null;
}
/**
@@ -791,8 +819,11 @@ public abstract class WebSocket<G extends Serializable, T> {
*
* @param code 结果码非0表示非正常关闭
* @param reason 关闭原因
*
* @return Future 可以为null
*/
public void onClose(int code, String reason) {
public CompletableFuture onClose(int code, String reason) {
return null;
}
/**
@@ -814,6 +845,15 @@ public abstract class WebSocket<G extends Serializable, T> {
return forceCloseWebSocket(getUserid()).thenApply((r) -> true);
}
/**
* 获取分布式情况下的SNCP地址, 非分布式下为null
*
* @return InetSocketAddress sncpAddress
*/
public InetSocketAddress getSncpAddress() {
return _sncpAddress;
}
/**
* 获取Logger
*

View File

@@ -7,6 +7,7 @@ package org.redkale.net.http;
import static org.redkale.net.http.WebSocketServlet.DEFAILT_LIVEINTERVAL;
import java.io.*;
import java.nio.ByteBuffer;
import java.util.*;
import java.util.concurrent.*;
import java.util.concurrent.atomic.*;
@@ -229,26 +230,45 @@ public class WebSocketEngine {
}
final boolean more = (!(message instanceof WebSocketPacket) || ((WebSocketPacket) message).sendBuffers == null);
if (more) {
Supplier<ByteBuffer> bufferSupplier = null;
Consumer<ByteBuffer> bufferConsumer = null;
//此处的WebSocketPacket只能是包含payload或bytes内容的不能包含sendConvert、sendJson、sendBuffers
final WebSocketPacket packet = (message instanceof WebSocketPacket) ? (WebSocketPacket) message
: ((message == null || message instanceof CharSequence || message instanceof byte[])
? new WebSocketPacket((Serializable) message, last) : new WebSocketPacket(this.sendConvert, false, message, last));
packet.setSendBuffers(packet.encode(context.getBufferSupplier(), context.getBufferConsumer(), cryptor));
//packet.setSendBuffers(packet.encode(context.getBufferSupplier(), context.getBufferConsumer(), cryptor));
CompletableFuture<Integer> future = null;
if (single) {
for (WebSocket websocket : websockets.values()) {
if (predicate != null && !predicate.test(websocket)) continue;
if (bufferSupplier == null) {
bufferSupplier = websocket.getBufferSupplier();
bufferConsumer = websocket.getBufferConsumer();
packet.setSendBuffers(packet.encode(bufferSupplier, bufferConsumer, cryptor));
}
future = future == null ? websocket.sendPacket(packet) : future.thenCombine(websocket.sendPacket(packet), (a, b) -> a | (Integer) b);
}
} else {
for (List<WebSocket> list : websockets2.values()) {
for (WebSocket websocket : list) {
if (predicate != null && !predicate.test(websocket)) continue;
if (bufferSupplier == null) {
bufferSupplier = websocket.getBufferSupplier();
bufferConsumer = websocket.getBufferConsumer();
packet.setSendBuffers(packet.encode(bufferSupplier, bufferConsumer, cryptor));
}
future = future == null ? websocket.sendPacket(packet) : future.thenCombine(websocket.sendPacket(packet), (a, b) -> a | (Integer) b);
}
}
}
if (future != null) future.whenComplete((rs, ex) -> context.offerBuffer(packet.sendBuffers));
final Consumer<ByteBuffer> bufferConsumer0 = bufferConsumer;
if (future != null) future.whenComplete((rs, ex) -> {
if (packet.sendBuffers != null && bufferConsumer0 != null) {
for (ByteBuffer buffer : packet.sendBuffers) {
bufferConsumer0.accept(buffer);
}
}
});
return future == null ? CompletableFuture.completedFuture(RETCODE_GROUP_EMPTY) : future;
} else {
CompletableFuture<Integer> future = null;
@@ -286,16 +306,23 @@ public class WebSocketEngine {
}
final boolean more = (!(message instanceof WebSocketPacket) || ((WebSocketPacket) message).sendBuffers == null) && userids.length > 1;
if (more) {
Supplier<ByteBuffer> bufferSupplier = null;
Consumer<ByteBuffer> bufferConsumer = null;
//此处的WebSocketPacket只能是包含payload或bytes内容的不能包含sendConvert、sendJson、sendBuffers
final WebSocketPacket packet = (message instanceof WebSocketPacket) ? (WebSocketPacket) message
: ((message == null || message instanceof CharSequence || message instanceof byte[])
? new WebSocketPacket((Serializable) message, last) : new WebSocketPacket(this.sendConvert, false, message, last));
packet.setSendBuffers(packet.encode(context.getBufferSupplier(), context.getBufferConsumer(), cryptor));
//packet.setSendBuffers(packet.encode(context.getBufferSupplier(), context.getBufferConsumer(), cryptor));
CompletableFuture<Integer> future = null;
if (single) {
for (Serializable userid : userids) {
WebSocket websocket = websockets.get(userid);
if (websocket == null) continue;
if (bufferSupplier == null) {
bufferSupplier = websocket.getBufferSupplier();
bufferConsumer = websocket.getBufferConsumer();
packet.setSendBuffers(packet.encode(bufferSupplier, bufferConsumer, cryptor));
}
future = future == null ? websocket.sendPacket(packet) : future.thenCombine(websocket.sendPacket(packet), (a, b) -> a | (Integer) b);
}
} else {
@@ -303,11 +330,23 @@ public class WebSocketEngine {
List<WebSocket> list = websockets2.get(userid);
if (list == null) continue;
for (WebSocket websocket : list) {
if (bufferSupplier == null) {
bufferSupplier = websocket.getBufferSupplier();
bufferConsumer = websocket.getBufferConsumer();
packet.setSendBuffers(packet.encode(bufferSupplier, bufferConsumer, cryptor));
}
future = future == null ? websocket.sendPacket(packet) : future.thenCombine(websocket.sendPacket(packet), (a, b) -> a | (Integer) b);
}
}
}
if (future != null) future.whenComplete((rs, ex) -> context.offerBuffer(packet.sendBuffers));
final Consumer<ByteBuffer> bufferConsumer0 = bufferConsumer;
if (future != null) future.whenComplete((rs, ex) -> {
if (packet.sendBuffers != null && bufferConsumer0 != null) {
for (ByteBuffer buffer : packet.sendBuffers) {
bufferConsumer0.accept(buffer);
}
}
});
return future == null ? CompletableFuture.completedFuture(RETCODE_GROUP_EMPTY) : future;
} else {
CompletableFuture<Integer> future = null;
@@ -413,6 +452,11 @@ public class WebSocketEngine {
return (int) websockets2.values().stream().mapToInt(sublist -> sublist.size()).count();
}
@Comment("获取当前用户总数")
public Set<Serializable> getLocalUserSet() {
return single ? new LinkedHashSet<>(websockets.keySet()) : new LinkedHashSet<>(websockets2.keySet());
}
@Comment("获取当前用户总数")
public int getLocalUserSize() {
return single ? websockets.size() : websockets2.size();

View File

@@ -11,7 +11,7 @@ import java.net.*;
import java.util.*;
import java.util.concurrent.*;
import java.util.logging.*;
import java.util.stream.Stream;
import java.util.stream.*;
import javax.annotation.*;
import org.redkale.boot.*;
import org.redkale.convert.*;
@@ -208,6 +208,22 @@ public abstract class WebSocketNode {
return rs;
}
/**
* 获取在线用户总数
*
*
* @return boolean
*/
public CompletableFuture<Set<String>> getUserSet() {
if (this.localEngine != null && this.sncpNodeAddresses == null) {
return CompletableFuture.completedFuture(new LinkedHashSet<>(this.localEngine.getLocalUserSet().stream().map(x -> String.valueOf(x)).collect(Collectors.toList())));
}
tryAcquireSemaphore();
CompletableFuture<Set<String>> rs = this.sncpNodeAddresses.queryKeysStartsWithAsync(SOURCE_SNCP_USERID_PREFIX).thenApply(v -> new LinkedHashSet<>(v.stream().map(x -> x.substring(SOURCE_SNCP_USERID_PREFIX.length())).collect(Collectors.toList())));
if (semaphore != null) rs.whenComplete((r, e) -> releaseSemaphore());
return rs;
}
/**
* 判断指定用户是否WebSocket在线
*
@@ -217,6 +233,7 @@ public abstract class WebSocketNode {
*/
@Local
public CompletableFuture<Boolean> existsWebSocket(final Serializable userid) {
if (userid instanceof WebSocketUserAddress) return existsWebSocket((WebSocketUserAddress) userid);
CompletableFuture<Boolean> localFuture = null;
if (this.localEngine != null) localFuture = CompletableFuture.completedFuture(localEngine.existsLocalWebSocket(userid));
if (this.sncpNodeAddresses == null || this.remoteNode == null) {
@@ -229,7 +246,7 @@ public abstract class WebSocketNode {
CompletableFuture<Collection<InetSocketAddress>> addrsFuture = sncpNodeAddresses.getCollectionAsync(SOURCE_SNCP_USERID_PREFIX + userid, InetSocketAddress.class);
if (semaphore != null) addrsFuture.whenComplete((r, e) -> releaseSemaphore());
CompletableFuture<Boolean> remoteFuture = addrsFuture.thenCompose((Collection<InetSocketAddress> addrs) -> {
if (logger.isLoggable(Level.FINEST)) logger.finest("websocket found userid:" + userid + " on " + addrs);
//if (logger.isLoggable(Level.FINEST)) logger.finest("websocket found userid:" + userid + " on " + addrs);
if (addrs == null || addrs.isEmpty()) return CompletableFuture.completedFuture(false);
CompletableFuture<Boolean> future = null;
for (InetSocketAddress addr : addrs) {
@@ -242,6 +259,38 @@ public abstract class WebSocketNode {
return localFuture == null ? remoteFuture : localFuture.thenCombine(remoteFuture, (a, b) -> a | b);
}
/**
* 判断指定用户是否WebSocket在线
*
* @param userAddress WebSocketUserAddress
*
* @return boolean
*/
@Local
public CompletableFuture<Boolean> existsWebSocket(final WebSocketUserAddress userAddress) {
CompletableFuture<Boolean> localFuture = null;
if (this.localEngine != null) localFuture = CompletableFuture.completedFuture(localEngine.existsLocalWebSocket(userAddress.userid()));
if (this.sncpNodeAddresses == null || this.remoteNode == null) {
if (logger.isLoggable(Level.FINEST)) logger.finest("websocket remote node is null");
//没有CacheSource就不会有分布式节点
return localFuture;
}
Collection<InetSocketAddress> addrs = userAddress.sncpAddresses();
if (addrs != null) addrs = new ArrayList<>(addrs); //不能修改参数内部值
if (userAddress.sncpAddress() != null) {
if (addrs == null) addrs = new ArrayList<>();
addrs.add(userAddress.sncpAddress());
}
if (addrs == null || addrs.isEmpty()) return CompletableFuture.completedFuture(false);
CompletableFuture<Boolean> future = null;
for (InetSocketAddress addr : addrs) {
if (addr == null || addr.equals(localSncpAddress)) continue;
future = future == null ? remoteNode.existsWebSocket(userAddress.userid(), addr)
: future.thenCombine(remoteNode.existsWebSocket(userAddress.userid(), addr), (a, b) -> a | b);
}
return future == null ? CompletableFuture.completedFuture(false) : future;
}
/**
* 强制关闭用户WebSocket
*
@@ -251,17 +300,45 @@ public abstract class WebSocketNode {
*/
@Local
public CompletableFuture<Integer> forceCloseWebSocket(final Serializable userid) {
return forceCloseWebSocket(userid, (WebSocketUserAddress) null);
}
/**
* 强制关闭用户WebSocket
*
* @param userAddress WebSocketUserAddress
*
* @return int
*/
@Local
public CompletableFuture<Integer> forceCloseWebSocket(final WebSocketUserAddress userAddress) {
return forceCloseWebSocket(null, userAddress);
}
private CompletableFuture<Integer> forceCloseWebSocket(final Serializable userid, final WebSocketUserAddress userAddress) {
CompletableFuture<Integer> localFuture = null;
if (this.localEngine != null) localFuture = CompletableFuture.completedFuture(localEngine.forceCloseLocalWebSocket(userid));
if (this.localEngine != null) localFuture = CompletableFuture.completedFuture(localEngine.forceCloseLocalWebSocket(userAddress == null ? userid : userAddress.userid()));
if (this.sncpNodeAddresses == null || this.remoteNode == null) {
if (logger.isLoggable(Level.FINEST)) logger.finest("websocket remote node is null");
//没有CacheSource就不会有分布式节点
return localFuture;
}
//远程节点关闭
tryAcquireSemaphore();
CompletableFuture<Collection<InetSocketAddress>> addrsFuture = sncpNodeAddresses.getCollectionAsync(SOURCE_SNCP_USERID_PREFIX + userid, InetSocketAddress.class);
if (semaphore != null) addrsFuture.whenComplete((r, e) -> releaseSemaphore());
CompletableFuture<Collection<InetSocketAddress>> addrsFuture;
if (userAddress == null) {
tryAcquireSemaphore();
addrsFuture = sncpNodeAddresses.getCollectionAsync(SOURCE_SNCP_USERID_PREFIX + userid, InetSocketAddress.class);
if (semaphore != null) addrsFuture.whenComplete((r, e) -> releaseSemaphore());
} else {
Collection<InetSocketAddress> addrs = userAddress.sncpAddresses();
if (addrs != null) addrs = new ArrayList<>(addrs); //不能修改参数内部值
if (userAddress.sncpAddress() != null) {
if (addrs == null) addrs = new ArrayList<>();
addrs.add(userAddress.sncpAddress());
}
if (addrs == null || addrs.isEmpty()) return CompletableFuture.completedFuture(0);
addrsFuture = CompletableFuture.completedFuture(addrs);
}
CompletableFuture<Integer> remoteFuture = addrsFuture.thenCompose((Collection<InetSocketAddress> addrs) -> {
if (logger.isLoggable(Level.FINEST)) logger.finest("websocket found userid:" + userid + " on " + addrs);
if (addrs == null || addrs.isEmpty()) return CompletableFuture.completedFuture(0);
@@ -292,88 +369,88 @@ public abstract class WebSocketNode {
* 向指定用户发送消息,先发送本地连接,再发送远程连接 <br>
* 如果当前WebSocketNode是远程模式此方法只发送远程连接
*
* @param message 消息内容
* @param userids Stream
* @param message 消息内容
* @param useridOrAddrs Stream
*
* @return 为0表示成功 其他值表示部分发送异常
*/
@Local
public final CompletableFuture<Integer> sendMessage(Object message, final Stream<? extends Serializable> userids) {
return sendMessage((Convert) null, message, true, userids);
public final CompletableFuture<Integer> sendMessage(Object message, final Stream<? extends Serializable> useridOrAddrs) {
return sendMessage((Convert) null, message, true, useridOrAddrs);
}
/**
* 向指定用户发送消息,先发送本地连接,再发送远程连接 <br>
* 如果当前WebSocketNode是远程模式此方法只发送远程连接
*
* @param message 消息内容
* @param userids Serializable[]
* @param message 消息内容
* @param useridOrAddrs Serializable[]
*
* @return 为0表示成功 其他值表示部分发送异常
*/
@Local
public final CompletableFuture<Integer> sendMessage(Object message, final Serializable... userids) {
return sendMessage((Convert) null, message, true, userids);
public final CompletableFuture<Integer> sendMessage(Object message, final Serializable... useridOrAddrs) {
return sendMessage((Convert) null, message, true, useridOrAddrs);
}
/**
* 向指定用户发送消息,先发送本地连接,再发送远程连接 <br>
* 如果当前WebSocketNode是远程模式此方法只发送远程连接
*
* @param convert Convert
* @param message 消息内容
* @param userids Stream
* @param convert Convert
* @param message 消息内容
* @param useridOrAddrs Stream
*
* @return 为0表示成功 其他值表示部分发送异常
*/
@Local
public final CompletableFuture<Integer> sendMessage(final Convert convert, Object message, final Stream<? extends Serializable> userids) {
return sendMessage(convert, message, true, userids);
public final CompletableFuture<Integer> sendMessage(final Convert convert, Object message, final Stream<? extends Serializable> useridOrAddrs) {
return sendMessage(convert, message, true, useridOrAddrs);
}
/**
* 向指定用户发送消息,先发送本地连接,再发送远程连接 <br>
* 如果当前WebSocketNode是远程模式此方法只发送远程连接
*
* @param convert Convert
* @param message 消息内容
* @param userids Serializable[]
* @param convert Convert
* @param message 消息内容
* @param useridOrAddrs Serializable[]
*
* @return 为0表示成功 其他值表示部分发送异常
*/
@Local
public final CompletableFuture<Integer> sendMessage(final Convert convert, Object message, final Serializable... userids) {
return sendMessage(convert, message, true, userids);
public final CompletableFuture<Integer> sendMessage(final Convert convert, Object message, final Serializable... useridOrAddrs) {
return sendMessage(convert, message, true, useridOrAddrs);
}
/**
* 向指定用户发送消息,先发送本地连接,再发送远程连接 <br>
* 如果当前WebSocketNode是远程模式此方法只发送远程连接
*
* @param message 消息内容
* @param last 是否最后一条
* @param userids Stream
* @param message 消息内容
* @param last 是否最后一条
* @param useridOrAddrs Stream
*
* @return 为0表示成功 其他值表示部分发送异常
*/
@Local
public final CompletableFuture<Integer> sendMessage(final Object message, final boolean last, final Stream<? extends Serializable> userids) {
return sendMessage((Convert) null, message, last, userids);
public final CompletableFuture<Integer> sendMessage(final Object message, final boolean last, final Stream<? extends Serializable> useridOrAddrs) {
return sendMessage((Convert) null, message, last, useridOrAddrs);
}
/**
* 向指定用户发送消息,先发送本地连接,再发送远程连接 <br>
* 如果当前WebSocketNode是远程模式此方法只发送远程连接
*
* @param message 消息内容
* @param last 是否最后一条
* @param userids Serializable[]
* @param message 消息内容
* @param last 是否最后一条
* @param useridOrAddrs Serializable[]
*
* @return 为0表示成功 其他值表示部分发送异常
*/
@Local
public final CompletableFuture<Integer> sendMessage(final Object message, final boolean last, final Serializable... userids) {
return sendMessage((Convert) null, message, last, userids);
public final CompletableFuture<Integer> sendMessage(final Object message, final boolean last, final Serializable... useridOrAddrs) {
return sendMessage((Convert) null, message, last, useridOrAddrs);
}
/**
@@ -411,6 +488,13 @@ public abstract class WebSocketNode {
@Local
public CompletableFuture<Integer> sendMessage(final Convert convert, final Object message0, final boolean last, final Serializable... userids) {
if (userids == null || userids.length < 1) return CompletableFuture.completedFuture(RETCODE_GROUP_EMPTY);
if (userids[0] instanceof WebSocketUserAddress) {
WebSocketUserAddress[] useraddrs = new WebSocketUserAddress[userids.length];
for (int i = 0; i < useraddrs.length; i++) {
useraddrs[i] = (WebSocketUserAddress) userids[i];
}
return sendMessage(convert, message0, last, useraddrs);
}
if (message0 instanceof CompletableFuture) return ((CompletableFuture) message0).thenApply(msg -> sendMessage(convert, msg, last, userids));
final Object message = (convert == null || message0 instanceof WebSocketPacket) ? message0 : ((convert instanceof TextConvert) ? new WebSocketPacket(((TextConvert) convert).convertTo(message0), last) : new WebSocketPacket(((BinaryConvert) convert).convertTo(message0), last));
if (this.localEngine != null && this.sncpNodeAddresses == null) { //本地模式且没有分布式
@@ -428,7 +512,7 @@ public abstract class WebSocketNode {
keyuser.put(keys[i], userids[i]);
}
tryAcquireSemaphore();
CompletableFuture<Map<String, Collection<InetSocketAddress>>> addrsFuture = sncpNodeAddresses.getCollectionMapAsync(InetSocketAddress.class, keys);
CompletableFuture<Map<String, Collection<InetSocketAddress>>> addrsFuture = sncpNodeAddresses.getCollectionMapAsync(true, InetSocketAddress.class, keys);
if (semaphore != null) addrsFuture.whenComplete((r, e) -> releaseSemaphore());
rsfuture = addrsFuture.thenCompose((Map<String, Collection<InetSocketAddress>> addrs) -> {
if (addrs == null || addrs.isEmpty()) {
@@ -446,9 +530,9 @@ public abstract class WebSocketNode {
}
CompletableFuture<Integer> future = null;
for (Map.Entry<InetSocketAddress, List<Serializable>> en : addrUsers.entrySet()) {
Serializable[] us = en.getValue().toArray(new Serializable[en.getValue().size()]);
future = future == null ? sendOneAddrMessage(en.getKey(), remoteMessage, last, us)
: future.thenCombine(sendOneAddrMessage(en.getKey(), remoteMessage, last, us), (a, b) -> a | b);
Serializable[] oneaddrUserids = en.getValue().toArray(new Serializable[en.getValue().size()]);
future = future == null ? sendOneAddrMessage(en.getKey(), remoteMessage, last, oneaddrUserids)
: future.thenCombine(sendOneAddrMessage(en.getKey(), remoteMessage, last, oneaddrUserids), (a, b) -> a | b);
}
return future == null ? CompletableFuture.completedFuture(RETCODE_GROUP_EMPTY) : future;
});
@@ -456,10 +540,45 @@ public abstract class WebSocketNode {
return rsfuture == null ? CompletableFuture.completedFuture(RETCODE_GROUP_EMPTY) : rsfuture;
}
/**
* 向指定用户发送消息,先发送本地连接,再发送远程连接 <br>
* 如果当前WebSocketNode是远程模式此方法只发送远程连接
*
* @param convert Convert
* @param message0 消息内容
* @param last 是否最后一条
* @param useraddrs WebSocketUserAddress[]
*
* @return 为0表示成功 其他值表示部分发送异常
*/
@Local
public CompletableFuture<Integer> sendMessage(final Convert convert, final Object message0, final boolean last, final WebSocketUserAddress... useraddrs) {
if (useraddrs == null || useraddrs.length < 1) return CompletableFuture.completedFuture(RETCODE_GROUP_EMPTY);
if (message0 instanceof CompletableFuture) return ((CompletableFuture) message0).thenApply(msg -> sendMessage(convert, msg, last, useraddrs));
final Object message = (convert == null || message0 instanceof WebSocketPacket) ? message0 : ((convert instanceof TextConvert) ? new WebSocketPacket(((TextConvert) convert).convertTo(message0), last) : new WebSocketPacket(((BinaryConvert) convert).convertTo(message0), last));
if (this.localEngine != null && this.sncpNodeAddresses == null) { //本地模式且没有分布式
return this.localEngine.sendLocalMessage(message, last, userAddressToUserids(useraddrs));
}
final Object remoteMessage = formatRemoteMessage(message);
final Map<InetSocketAddress, List<Serializable>> addrUsers = userAddressToAddrMap(useraddrs);
if (logger.isLoggable(Level.FINEST)) {
logger.finest("websocket(localaddr=" + localSncpAddress + ", useraddrs=" + JsonConvert.root().convertTo(useraddrs) + ") found message-addr-userids: " + addrUsers);
}
CompletableFuture<Integer> future = null;
for (Map.Entry<InetSocketAddress, List<Serializable>> en : addrUsers.entrySet()) {
Serializable[] oneaddrUserids = en.getValue().toArray(new Serializable[en.getValue().size()]);
future = future == null ? sendOneAddrMessage(en.getKey(), remoteMessage, last, oneaddrUserids)
: future.thenCombine(sendOneAddrMessage(en.getKey(), remoteMessage, last, oneaddrUserids), (a, b) -> a | b);
}
return future == null ? CompletableFuture.completedFuture(RETCODE_GROUP_EMPTY) : future;
}
protected CompletableFuture<Integer> sendOneUserMessage(final Object message, final boolean last, final Serializable userid) {
if (message instanceof CompletableFuture) return ((CompletableFuture) message).thenApply(msg -> sendOneUserMessage(msg, last, userid));
if (logger.isLoggable(Level.FINEST)) {
logger.finest("websocket want send message {userid:" + userid + ", content:'" + (message instanceof WebSocketPacket ? ((WebSocketPacket) message).toSimpleString() : JsonConvert.root().convertTo(message)) + "'} from locale node to " + ((this.localEngine != null) ? "locale" : "remote") + " engine");
logger.finest("websocket want send message {userid:" + userid + ", content:" + (message instanceof WebSocketPacket ? ((WebSocketPacket) message).toSimpleString() : (message instanceof CharSequence ? message : JsonConvert.root().convertTo(message))) + "} from locale node to " + ((this.localEngine != null) ? "locale" : "remote") + " engine");
}
CompletableFuture<Integer> localFuture = null;
if (this.localEngine != null) localFuture = localEngine.sendLocalMessage(message, last, userid);
@@ -492,8 +611,8 @@ public abstract class WebSocketNode {
protected CompletableFuture<Integer> sendOneAddrMessage(final InetSocketAddress sncpAddr, final Object message, final boolean last, final Serializable... userids) {
if (message instanceof CompletableFuture) return ((CompletableFuture) message).thenApply(msg -> sendOneAddrMessage(sncpAddr, msg, last, userids));
if (logger.isLoggable(Level.FINEST)) {
logger.finest("websocket want send message {userids:" + JsonConvert.root().convertTo(userids) + ", sncpaddr:" + sncpAddr + ", content:'" + (message instanceof WebSocketPacket ? ((WebSocketPacket) message).toSimpleString() : JsonConvert.root().convertTo(message)) + "'} from locale node to " + ((this.localEngine != null) ? "locale" : "remote") + " engine");
if (logger.isLoggable(Level.FINEST) && this.localEngine == null) { //只打印远程模式的
logger.finest("websocket want send message {userids:" + JsonConvert.root().convertTo(userids) + ", sncpaddr:" + sncpAddr + ", content:" + (message instanceof WebSocketPacket ? ((WebSocketPacket) message).toSimpleString() : (message instanceof CharSequence ? message : JsonConvert.root().convertTo(message))) + "} from locale node to " + ((this.localEngine != null) ? "locale" : "remote") + " engine");
}
if (Objects.equals(sncpAddr, this.localSncpAddress)) {
return this.localEngine == null ? CompletableFuture.completedFuture(RETCODE_GROUP_EMPTY) : localEngine.sendLocalMessage(message, last, userids);
@@ -507,6 +626,32 @@ public abstract class WebSocketNode {
return remoteNode.sendMessage(sncpAddr, remoteMessage, last, userids);
}
protected Serializable[] userAddressToUserids(WebSocketUserAddress... useraddrs) {
if (useraddrs == null || useraddrs.length == 1) return new Serializable[0];
Set<Serializable> set = new HashSet<>();
for (WebSocketUserAddress userAddress : useraddrs) {
set.add(userAddress.userid());
}
return set.toArray(new Serializable[set.size()]);
}
protected Map<InetSocketAddress, List<Serializable>> userAddressToAddrMap(WebSocketUserAddress... useraddrs) {
final Map<InetSocketAddress, List<Serializable>> addrUsers = new HashMap<>();
for (WebSocketUserAddress userAddress : useraddrs) {
if (userAddress.sncpAddress() != null) {
addrUsers.computeIfAbsent(userAddress.sncpAddress(), k -> new ArrayList<>()).add(userAddress.userid());
}
if (userAddress.sncpAddresses() != null) {
for (InetSocketAddress addr : userAddress.sncpAddresses()) {
if (addr != null) {
addrUsers.computeIfAbsent(addr, k -> new ArrayList<>()).add(userAddress.userid());
}
}
}
}
return addrUsers;
}
/**
* 广播消息, 给所有人发消息
*
@@ -678,6 +823,13 @@ public abstract class WebSocketNode {
@Local
public CompletableFuture<Integer> sendAction(final WebSocketAction action, final Serializable... userids) {
if (userids == null || userids.length < 1) return CompletableFuture.completedFuture(RETCODE_GROUP_EMPTY);
if (userids[0] instanceof WebSocketUserAddress) {
WebSocketUserAddress[] useraddrs = new WebSocketUserAddress[userids.length];
for (int i = 0; i < useraddrs.length; i++) {
useraddrs[i] = (WebSocketUserAddress) userids[i];
}
return sendAction(action, useraddrs);
}
if (this.localEngine != null && this.sncpNodeAddresses == null) { //本地模式且没有分布式
return this.localEngine.sendLocalAction(action, userids);
}
@@ -692,7 +844,7 @@ public abstract class WebSocketNode {
keyuser.put(keys[i], userids[i]);
}
tryAcquireSemaphore();
CompletableFuture<Map<String, Collection<InetSocketAddress>>> addrsFuture = sncpNodeAddresses.getCollectionMapAsync(InetSocketAddress.class, keys);
CompletableFuture<Map<String, Collection<InetSocketAddress>>> addrsFuture = sncpNodeAddresses.getCollectionMapAsync(true, InetSocketAddress.class, keys);
if (semaphore != null) addrsFuture.whenComplete((r, e) -> releaseSemaphore());
rsfuture = addrsFuture.thenCompose((Map<String, Collection<InetSocketAddress>> addrs) -> {
if (addrs == null || addrs.isEmpty()) {
@@ -710,9 +862,9 @@ public abstract class WebSocketNode {
}
CompletableFuture<Integer> future = null;
for (Map.Entry<InetSocketAddress, List<Serializable>> en : addrUsers.entrySet()) {
Serializable[] us = en.getValue().toArray(new Serializable[en.getValue().size()]);
future = future == null ? sendOneAddrAction(en.getKey(), action, us)
: future.thenCombine(sendOneAddrAction(en.getKey(), action, us), (a, b) -> a | b);
Serializable[] oneaddrUserids = en.getValue().toArray(new Serializable[en.getValue().size()]);
future = future == null ? sendOneAddrAction(en.getKey(), action, oneaddrUserids)
: future.thenCombine(sendOneAddrAction(en.getKey(), action, oneaddrUserids), (a, b) -> a | b);
}
return future == null ? CompletableFuture.completedFuture(RETCODE_GROUP_EMPTY) : future;
});
@@ -720,6 +872,35 @@ public abstract class WebSocketNode {
return rsfuture == null ? CompletableFuture.completedFuture(RETCODE_GROUP_EMPTY) : rsfuture;
}
/**
* 向指定用户发送操作,先发送本地连接,再发送远程连接 <br>
* 如果当前WebSocketNode是远程模式此方法只发送远程连接
*
* @param action 操作参数
* @param useraddrs WebSocketUserAddress[]
*
* @return 为0表示成功 其他值表示部分发送异常
*/
@Local
public CompletableFuture<Integer> sendAction(final WebSocketAction action, final WebSocketUserAddress... useraddrs) {
if (useraddrs == null || useraddrs.length < 1) return CompletableFuture.completedFuture(RETCODE_GROUP_EMPTY);
if (this.localEngine != null && this.sncpNodeAddresses == null) { //本地模式且没有分布式
return this.localEngine.sendLocalAction(action, userAddressToUserids(useraddrs));
}
final Map<InetSocketAddress, List<Serializable>> addrUsers = userAddressToAddrMap(useraddrs);
if (logger.isLoggable(Level.FINEST)) {
logger.finest("websocket(localaddr=" + localSncpAddress + ", useraddrs=" + JsonConvert.root().convertTo(useraddrs) + ") found action-userid-addrs: " + addrUsers);
}
CompletableFuture<Integer> future = null;
for (Map.Entry<InetSocketAddress, List<Serializable>> en : addrUsers.entrySet()) {
Serializable[] oneaddrUserids = en.getValue().toArray(new Serializable[en.getValue().size()]);
future = future == null ? sendOneAddrAction(en.getKey(), action, oneaddrUserids)
: future.thenCombine(sendOneAddrAction(en.getKey(), action, oneaddrUserids), (a, b) -> a | b);
}
return future == null ? CompletableFuture.completedFuture(RETCODE_GROUP_EMPTY) : future;
}
protected CompletableFuture<Integer> sendOneUserAction(final WebSocketAction action, final Serializable userid) {
if (logger.isLoggable(Level.FINEST)) {
logger.finest("websocket want send action {userid:" + userid + ", action:" + action + "} from locale node to " + ((this.localEngine != null) ? "locale" : "remote") + " engine");
@@ -753,7 +934,7 @@ public abstract class WebSocketNode {
}
protected CompletableFuture<Integer> sendOneAddrAction(final InetSocketAddress sncpAddr, final WebSocketAction action, final Serializable... userids) {
if (logger.isLoggable(Level.FINEST)) {
if (logger.isLoggable(Level.FINEST) && this.localEngine == null) { //只打印远程模式的
logger.finest("websocket want send action {userids:" + JsonConvert.root().convertTo(userids) + ", sncpaddr:" + sncpAddr + ", action:" + action + " from locale node to " + ((this.localEngine != null) ? "locale" : "remote") + " engine");
}
if (Objects.equals(sncpAddr, this.localSncpAddress)) {

View File

@@ -492,7 +492,7 @@ public final class WebSocketPacket {
void parseReceiveMessage(final Logger logger, WebSocketRunner runner, WebSocket webSocket, ByteBuffer... buffers) {
if (webSocket._engine.cryptor != null) {
HttpContext context = webSocket._engine.context;
buffers = webSocket._engine.cryptor.decrypt(buffers, context.getBufferSupplier(), context.getBufferConsumer());
buffers = webSocket._engine.cryptor.decrypt(buffers, webSocket._channel.getBufferSupplier(), webSocket._channel.getBufferConsumer());
}
FrameType selfType = this.type;
final boolean series = selfType == FrameType.SERIES;

View File

@@ -5,6 +5,10 @@
*/
package org.redkale.net.http;
import java.lang.annotation.Annotation;
import java.lang.reflect.Array;
import java.util.Arrays;
/**
*
* 供WebSocket.preOnMessage 方法获取RestWebSocket里OnMessage方法的参数 <br>
@@ -16,6 +20,29 @@ package org.redkale.net.http;
public interface WebSocketParam {
public <T> T getValue(String name);
public String[] getNames();
public Annotation[] getAnnotations();
default <T extends Annotation> T getAnnotation(Class<T> annotationClass) {
for (Annotation ann : getAnnotations()) {
if (ann.getClass() == annotationClass) return (T) ann;
}
return null;
}
default <T extends Annotation> T[] getAnnotationsByType(Class<T> annotationClass) {
Annotation[] annotations = getAnnotations();
if (annotations == null) return (T[]) Array.newInstance(annotationClass, 0);
T[] news = (T[]) Array.newInstance(annotationClass, annotations.length);
int index = 0;
for (Annotation ann : annotations) {
if (ann.getClass() == annotationClass) {
news[index++] = (T) ann;
}
}
if (index < 1) return (T[]) Array.newInstance(annotationClass, 0);
return Arrays.copyOf(news, index);
}
}

View File

@@ -18,35 +18,47 @@ import org.redkale.convert.json.JsonConvert;
* @author zhangjx
*/
public class WebSocketRange implements Serializable {
protected String wskey;
protected Map<String, String> attach;
public WebSocketRange() {
}
public WebSocketRange(String wskey) {
this.wskey = wskey;
}
public WebSocketRange(String wskey, Map<String, String> attach) {
this.wskey = wskey;
this.attach = attach;
}
public boolean containsAttach(String key) {
return this.attach == null ? false : this.attach.containsKey(key);
}
public String getAttach(String key) {
return this.attach == null ? null : this.attach.get(key);
}
public String getAttach(String key, String defval) {
return this.attach == null ? defval : this.attach.getOrDefault(key, defval);
}
public String getWskey() {
return wskey;
}
public void setWskey(String wskey) {
this.wskey = wskey;
}
public Map<String, String> getAttach() {
return attach;
}
public void setAttach(Map<String, String> attach) {
this.attach = attach;
}

View File

@@ -5,7 +5,6 @@
*/
package org.redkale.net.http;
import org.redkale.net.AsyncConnection;
import static org.redkale.net.http.WebSocket.*;
import org.redkale.net.http.WebSocketPacket.FrameType;
import java.nio.ByteBuffer;
@@ -29,14 +28,16 @@ class WebSocketRunner implements Runnable {
private final WebSocketEngine engine;
private final AsyncConnection channel;
private final WebSocket webSocket;
protected final HttpContext context;
protected final boolean mergemsg;
protected final Semaphore writeSemaphore = new Semaphore(1);
protected final LinkedBlockingQueue<WriteEntry> writeQueue = new LinkedBlockingQueue();
volatile boolean closed = false;
FrameType currSeriesMergeFrameType;
@@ -49,13 +50,12 @@ class WebSocketRunner implements Runnable {
protected long lastReadTime;
WebSocketRunner(HttpContext context, WebSocket webSocket, BiConsumer<WebSocket, Object> messageConsumer, AsyncConnection channel) {
WebSocketRunner(HttpContext context, WebSocket webSocket, BiConsumer<WebSocket, Object> messageConsumer) {
this.context = context;
this.engine = webSocket._engine;
this.webSocket = webSocket;
this.mergemsg = webSocket._engine.mergemsg;
this.restMessageConsumer = messageConsumer;
this.channel = channel;
}
@Override
@@ -63,11 +63,12 @@ class WebSocketRunner implements Runnable {
final boolean debug = context.getLogger().isLoggable(Level.FINEST);
final WebSocketRunner self = this;
try {
webSocket.onConnected();
channel.setReadTimeoutSeconds(300); //读取超时5分钟
if (channel.isOpen()) {
CompletableFuture connectfFuture = webSocket.onConnected();
if (connectfFuture != null) connectfFuture.join();
webSocket._channel.setReadTimeoutSeconds(300); //读取超时5分钟
if (webSocket._channel.isOpen()) {
final int wsmaxbody = webSocket._engine.wsmaxbody;
channel.read(new CompletionHandler<Integer, ByteBuffer>() {
webSocket._channel.read(new CompletionHandler<Integer, ByteBuffer>() {
//尚未解析完的数据包
private WebSocketPacket unfinishPacket;
@@ -94,11 +95,11 @@ class WebSocketRunner implements Runnable {
onePacket = unfinishPacket;
unfinishPacket = null;
for (ByteBuffer b : exBuffers) {
context.offerBuffer(b);
webSocket._channel.offerBuffer(b);
}
exBuffers.clear();
} else { //需要继续接收, 此处不能回收readBuffer
channel.read(this);
webSocket._channel.read(this);
return;
}
}
@@ -125,7 +126,7 @@ class WebSocketRunner implements Runnable {
}
//继续监听消息
if (readBuffer.hasRemaining()) { //exBuffers缓存了
readBuffer = context.pollBuffer();
readBuffer = webSocket._channel.pollReadBuffer();
} else {
readBuffer.clear();
}
@@ -133,8 +134,8 @@ class WebSocketRunner implements Runnable {
readBuffer.put(halfBytes.getValue());
halfBytes.setValue(null);
}
channel.setReadBuffer(readBuffer);
channel.read(this);
webSocket._channel.setReadBuffer(readBuffer);
webSocket._channel.read(this);
//消息处理
for (final WebSocketPacket packet : packets) {
@@ -229,11 +230,9 @@ class WebSocketRunner implements Runnable {
//System.out.println("推送消息");
final CompletableFuture<Integer> futureResult = new CompletableFuture<>();
try {
ByteBuffer[] buffers = packet.sendBuffers != null ? packet.duplicateSendBuffers() : packet.encode(this.context.getBufferSupplier(), this.context.getBufferConsumer(), webSocket._engine.cryptor);
ByteBuffer[] buffers = packet.sendBuffers != null ? packet.duplicateSendBuffers() : packet.encode(webSocket._channel.getBufferSupplier(), webSocket._channel.getBufferConsumer(), webSocket._engine.cryptor);
//if (debug) context.getLogger().log(Level.FINEST, "wsrunner.sending websocket message: " + packet);
this.lastSendTime = System.currentTimeMillis();
channel.write(buffers, buffers, new CompletionHandler<Integer, ByteBuffer[]>() {
CompletionHandler<Integer, ByteBuffer[]> handler = new CompletionHandler<Integer, ByteBuffer[]>() {
private CompletableFuture<Integer> future = futureResult;
@@ -245,7 +244,7 @@ class WebSocketRunner implements Runnable {
future = null;
if (attachments != null) {
for (ByteBuffer buf : attachments) {
context.offerBuffer(buf);
webSocket._channel.offerBuffer(buf);
}
}
}
@@ -260,7 +259,7 @@ class WebSocketRunner implements Runnable {
}
}
if (index >= 0) { //ByteBuffer[]统一回收的可以采用此写法
channel.write(attachments, index, attachments.length - index, attachments, this);
webSocket._channel.write(attachments, index, attachments.length - index, attachments, this);
return;
}
if (future != null) {
@@ -268,7 +267,7 @@ class WebSocketRunner implements Runnable {
future = null;
if (attachments != null) {
for (ByteBuffer buf : attachments) {
context.offerBuffer(buf);
webSocket._channel.offerBuffer(buf);
}
}
}
@@ -288,7 +287,14 @@ class WebSocketRunner implements Runnable {
}
}
});
};
this.lastSendTime = System.currentTimeMillis();
if (writeSemaphore.tryAcquire()) {
webSocket._channel.write(buffers, buffers, handler);
} else {
writeQueue.add(new WriteEntry(buffers, handler));
}
} catch (Exception t) {
futureResult.complete(RETCODE_SENDEXCEPTION);
closeRunner(RETCODE_SENDEXCEPTION, "websocket send message failed on channel.write");
@@ -297,7 +303,14 @@ class WebSocketRunner implements Runnable {
}
}
return futureResult;
return futureResult.whenComplete((r, t) -> {
WriteEntry entry = writeQueue.poll();
if (entry != null) {
webSocket._channel.write(entry.writeBuffers, entry.writeBuffers, entry.writeHandler);
} else {
writeSemaphore.release();
}
});
}
public boolean isClosed() {
@@ -310,10 +323,23 @@ class WebSocketRunner implements Runnable {
if (closed) return null;
closed = true;
CompletableFuture<Void> future = engine.removeLocalThenClose(webSocket);
channel.dispose();
webSocket.onClose(code, reason);
return future;
webSocket._channel.dispose();
CompletableFuture closeFuture = webSocket.onClose(code, reason);
if (closeFuture == null) return future;
return CompletableFuture.allOf(future, closeFuture);
}
}
private static class WriteEntry {
ByteBuffer[] writeBuffers;
CompletionHandler writeHandler;
public WriteEntry(ByteBuffer[] writeBuffers, CompletionHandler writeHandler) {
this.writeBuffers = writeBuffers;
this.writeHandler = writeHandler;
}
}
}

View File

@@ -202,12 +202,14 @@ public abstract class WebSocketServlet extends HttpServlet implements Resourcabl
}
final WebSocket webSocket = this.createWebSocket();
webSocket._engine = this.node.localEngine;
webSocket._channel = response.getChannel();
webSocket._messageTextType = this.messageTextType;
webSocket._textConvert = textConvert;
webSocket._binaryConvert = binaryConvert;
webSocket._sendConvert = sendConvert;
webSocket._remoteAddress = request.getRemoteAddress();
webSocket._remoteAddr = request.getRemoteAddr();
webSocket._sncpAddress = this.node.localSncpAddress;
initRestWebSocket(webSocket);
CompletableFuture<String> sessionFuture = webSocket.onOpen(request);
if (sessionFuture == null) {
@@ -262,7 +264,8 @@ public abstract class WebSocketServlet extends HttpServlet implements Resourcabl
Consumer<Boolean> task = (oldkilled) -> {
if (oldkilled) {
WebSocketServlet.this.node.localEngine.addLocal(webSocket);
WebSocketRunner runner = new WebSocketRunner(context, webSocket, restMessageConsumer, response.removeChannel());
response.removeChannel();
WebSocketRunner runner = new WebSocketRunner(context, webSocket, restMessageConsumer);
webSocket._runner = runner;
context.runAsync(runner);
response.finish(true);
@@ -283,7 +286,8 @@ public abstract class WebSocketServlet extends HttpServlet implements Resourcabl
}
} else {
WebSocketServlet.this.node.localEngine.addLocal(webSocket);
WebSocketRunner runner = new WebSocketRunner(context, webSocket, restMessageConsumer, response.removeChannel());
response.removeChannel();
WebSocketRunner runner = new WebSocketRunner(context, webSocket, restMessageConsumer);
webSocket._runner = runner;
context.runAsync(runner);
response.finish(true);
@@ -291,14 +295,15 @@ public abstract class WebSocketServlet extends HttpServlet implements Resourcabl
});
} else {
WebSocketServlet.this.node.localEngine.addLocal(webSocket);
WebSocketRunner runner = new WebSocketRunner(context, webSocket, restMessageConsumer, response.removeChannel());
response.removeChannel();
WebSocketRunner runner = new WebSocketRunner(context, webSocket, restMessageConsumer);
webSocket._runner = runner;
context.runAsync(runner);
response.finish(true);
}
};
if (webSocket.delayPackets != null) { //存在待发送的消息
if (temprunner == null) temprunner = new WebSocketRunner(context, webSocket, restMessageConsumer, response.getChannel());
if (temprunner == null) temprunner = new WebSocketRunner(context, webSocket, restMessageConsumer);
List<WebSocketPacket> delayPackets = webSocket.delayPackets;
webSocket.delayPackets = null;
CompletableFuture<Integer> cf = null;
@@ -323,7 +328,7 @@ public abstract class WebSocketServlet extends HttpServlet implements Resourcabl
});
};
if (webSocket.delayPackets != null) { //存在待发送的消息
if (temprunner == null) temprunner = new WebSocketRunner(context, webSocket, restMessageConsumer, response.getChannel());
if (temprunner == null) temprunner = new WebSocketRunner(context, webSocket, restMessageConsumer);
List<WebSocketPacket> delayPackets = webSocket.delayPackets;
webSocket.delayPackets = null;
CompletableFuture<Integer> cf = null;

View File

@@ -0,0 +1,107 @@
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package org.redkale.net.http;
import java.io.Serializable;
import java.net.InetSocketAddress;
import java.util.Collection;
import org.redkale.convert.json.JsonConvert;
/**
* userid 与 sncpaddress组合对象
*
*
* @author zhangjx
*/
public interface WebSocketUserAddress extends Serializable {
Serializable userid();
InetSocketAddress sncpAddress();
Collection<InetSocketAddress> sncpAddresses();
public static WebSocketUserAddress create(WebSocketUserAddress userAddress) {
return new SimpleWebSocketUserAddress(userAddress);
}
public static WebSocketUserAddress create(Serializable userid, InetSocketAddress sncpAddress) {
return new SimpleWebSocketUserAddress(userid, sncpAddress, null);
}
public static WebSocketUserAddress create(Serializable userid, Collection<InetSocketAddress> sncpAddresses) {
return new SimpleWebSocketUserAddress(userid, null, sncpAddresses);
}
public static class SimpleWebSocketUserAddress implements WebSocketUserAddress {
private Serializable userid;
private InetSocketAddress sncpAddress;
private Collection<InetSocketAddress> sncpAddresses;
public SimpleWebSocketUserAddress() {
}
public SimpleWebSocketUserAddress(Serializable userid, InetSocketAddress sncpAddress, Collection<InetSocketAddress> sncpAddresses) {
this.userid = userid;
this.sncpAddress = sncpAddress;
this.sncpAddresses = sncpAddresses;
}
public SimpleWebSocketUserAddress(WebSocketUserAddress userAddress) {
if (userAddress == null) return;
this.userid = userAddress.userid();
this.sncpAddress = userAddress.sncpAddress();
this.sncpAddresses = userAddress.sncpAddresses();
}
@Override
public Serializable userid() {
return userid;
}
@Override
public InetSocketAddress sncpAddress() {
return sncpAddress;
}
@Override
public Collection<InetSocketAddress> sncpAddresses() {
return sncpAddresses;
}
public Serializable getUserid() {
return userid;
}
public void setUserid(Serializable userid) {
this.userid = userid;
}
public InetSocketAddress getSncpAddress() {
return sncpAddress;
}
public void setSncpAddress(InetSocketAddress sncpAddress) {
this.sncpAddress = sncpAddress;
}
public Collection<InetSocketAddress> getSncpAddresses() {
return sncpAddresses;
}
public void setSncpAddresses(Collection<InetSocketAddress> sncpAddresses) {
this.sncpAddresses = sncpAddresses;
}
@Override
public String toString() {
return JsonConvert.root().convertTo(this);
}
}
}

View File

@@ -332,9 +332,10 @@ public final class SncpClient {
return bsonConvert.convertFrom(action.handlerFuncParamIndex >= 0 ? Object.class : action.resultTypes, reader);
} catch (RpcRemoteException re) {
throw re;
} catch (InterruptedException | ExecutionException | TimeoutException e) {
//logger.log(Level.SEVERE, actions[index].method + " sncp (params: " + jsonConvert.convertTo(params) + ") remote error", e);
throw new RpcRemoteException(actions[index].method + " sncp remote error", e);
} catch (TimeoutException e) {
throw new RpcRemoteException(actions[index].method + " sncp remote timeout, params=" + JsonConvert.root().convertTo(params));
} catch (InterruptedException | ExecutionException e) {
throw new RpcRemoteException(actions[index].method + " sncp remote error, params=" + JsonConvert.root().convertTo(params), e);
} finally {
bsonConvert.offerBsonReader(reader);
}
@@ -348,7 +349,12 @@ public final class SncpClient {
final BsonWriter writer = bsonConvert.pollBsonWriter(transport.getBufferSupplier()); // 将head写入
writer.writeTo(DEFAULT_HEADER);
for (int i = 0; i < params.length; i++) { //params 可能包含: 3 个 boolean
bsonConvert.convertTo(writer, CompletionHandler.class.isAssignableFrom(myparamclass[i]) ? CompletionHandler.class : myparamtypes[i], params[i]);
BsonConvert bcc = bsonConvert;
if (params[i] instanceof org.redkale.service.RetResult) {
org.redkale.convert.Convert cc = ((org.redkale.service.RetResult) params[i]).convert();
if (cc instanceof BsonConvert) bcc = (BsonConvert) cc;
}
bcc.convertTo(writer, CompletionHandler.class.isAssignableFrom(myparamclass[i]) ? CompletionHandler.class : myparamtypes[i], params[i]);
}
final int reqBodyLength = writer.count() - HEADER_SIZE; //body总长度
final long seqid = System.nanoTime();
@@ -358,12 +364,12 @@ public final class SncpClient {
return connFuture.thenCompose(conn0 -> {
final CompletableFuture<byte[]> future = new CompletableFuture();
if (conn0 == null) {
future.completeExceptionally(new RuntimeException("sncp " + (conn0 == null ? addr : conn0.getRemoteAddress()) + " cannot connect"));
future.completeExceptionally(new RpcRemoteException("sncp " + (conn0 == null ? addr : conn0.getRemoteAddress()) + " cannot connect, params=" + JsonConvert.root().convertTo(params)));
return future;
}
if (!conn0.isOpen()) {
conn0.dispose();
future.completeExceptionally(new RuntimeException("sncp " + (conn0 == null ? addr : conn0.getRemoteAddress()) + " cannot connect"));
future.completeExceptionally(new RpcRemoteException("sncp " + (conn0 == null ? addr : conn0.getRemoteAddress()) + " cannot connect, params=" + JsonConvert.root().convertTo(params)));
return future;
}
final AsyncConnection conn = conn0;
@@ -403,7 +409,7 @@ public final class SncpClient {
public void completed(Integer count, ByteBuffer buffer) {
try {
if (count < 1 && buffer.remaining() == buffer.limit()) { //没有数据可读
future.completeExceptionally(new RpcRemoteException(action.method + " sncp[" + conn.getRemoteAddress() + "] remote no response data"));
future.completeExceptionally(new RpcRemoteException(action.method + " sncp[" + conn.getRemoteAddress() + "] remote no response data, params=" + JsonConvert.root().convertTo(params)));
conn.offerBuffer(buffer);
transport.offerConnection(true, conn);
return;
@@ -433,7 +439,7 @@ public final class SncpClient {
final int respBodyLength = buffer.getInt();
final int retcode = buffer.getInt();
if (retcode != 0) {
logger.log(Level.SEVERE, action.method + " sncp (params: " + convert.convertTo(params) + ") deal error (retcode=" + retcode + ", retinfo=" + SncpResponse.getRetCodeInfo(retcode) + ")");
logger.log(Level.SEVERE, action.method + " sncp (params: " + convert.convertTo(params) + ") deal error (retcode=" + retcode + ", retinfo=" + SncpResponse.getRetCodeInfo(retcode) + "), params=" + JsonConvert.root().convertTo(params));
throw new RuntimeException("remote service(" + action.method + ") deal error (retcode=" + retcode + ", retinfo=" + SncpResponse.getRetCodeInfo(retcode) + ")");
}
@@ -451,7 +457,7 @@ public final class SncpClient {
success();
}
} catch (Throwable e) {
future.completeExceptionally(new RuntimeException(action.method + " sncp[" + conn.getRemoteAddress() + "] remote response error"));
future.completeExceptionally(new RpcRemoteException(action.method + " sncp[" + conn.getRemoteAddress() + "] remote response error, params=" + JsonConvert.root().convertTo(params)));
transport.offerConnection(true, conn);
if (handler != null) {
final Object handlerAttach = action.handlerAttachParamIndex >= 0 ? params[action.handlerAttachParamIndex] : null;
@@ -487,27 +493,27 @@ public final class SncpClient {
@Override
public void failed(Throwable exc, ByteBuffer attachment2) {
future.completeExceptionally(new RuntimeException(action.method + " sncp remote exec failed"));
future.completeExceptionally(new RpcRemoteException(action.method + " sncp remote exec failed, params=" + JsonConvert.root().convertTo(params)));
conn.offerBuffer(attachment2);
transport.offerConnection(true, conn);
if (handler != null) {
final Object handlerAttach = action.handlerAttachParamIndex >= 0 ? params[action.handlerAttachParamIndex] : null;
handler.failed(exc, handlerAttach);
}
logger.log(Level.SEVERE, action.method + " sncp (params: " + convert.convertTo(params) + ") remote read exec failed", exc);
logger.log(Level.SEVERE, action.method + " sncp (params: " + convert.convertTo(params) + ") remote read exec failed, params=" + JsonConvert.root().convertTo(params), exc);
}
});
}
@Override
public void failed(Throwable exc, ByteBuffer[] attachment) {
future.completeExceptionally(new RuntimeException(action.method + " sncp remote exec failed"));
future.completeExceptionally(new RpcRemoteException(action.method + " sncp remote exec failed, params=" + JsonConvert.root().convertTo(params)));
transport.offerConnection(true, conn);
if (handler != null) {
final Object handlerAttach = action.handlerAttachParamIndex >= 0 ? params[action.handlerAttachParamIndex] : null;
handler.failed(exc, handlerAttach);
}
logger.log(Level.SEVERE, action.method + " sncp (params: " + convert.convertTo(params) + ") remote write exec failed", exc);
logger.log(Level.SEVERE, action.method + " sncp (params: " + convert.convertTo(params) + ") remote write exec failed, params=" + JsonConvert.root().convertTo(params), exc);
}
});
return future;

View File

@@ -112,7 +112,7 @@ public final class SncpDynServlet extends SncpServlet {
@SuppressWarnings("unchecked")
public void execute(SncpRequest request, SncpResponse response) throws IOException {
if (bufferSupplier == null) {
bufferSupplier = request.getContext().getBufferSupplier();
bufferSupplier = request.getBufferPool();
}
final SncpServletAction action = actions.get(request.getActionid());
//logger.log(Level.FINEST, "sncpdyn.execute: " + request + ", " + (action == null ? "null" : action.method));

View File

@@ -45,11 +45,15 @@ public final class SncpRequest extends Request<SncpContext> {
private byte[] bufferbytes = new byte[6];
protected SncpRequest(SncpContext context) {
super(context);
protected SncpRequest(SncpContext context, ObjectPool<ByteBuffer> bufferPool) {
super(context, bufferPool);
this.convert = context.getBsonConvert();
}
protected ObjectPool<ByteBuffer> getBufferPool() {
return this.bufferPool;
}
@Override
protected int readHeader(ByteBuffer buffer) {
if (buffer.remaining() < HEADER_SIZE) {

View File

@@ -21,13 +21,13 @@ import org.redkale.util.*;
*/
public final class SncpResponse extends Response<SncpContext, SncpRequest> {
public static final int RETCODE_ILLSERVICEID = (1 << 10); //无效serviceid
public static final int RETCODE_ILLSERVICEID = (1 << 1); //无效serviceid
public static final int RETCODE_ILLSERVICEVER = (1 << 11); //无效serviceversion
public static final int RETCODE_ILLSERVICEVER = (1 << 2); //无效serviceversion
public static final int RETCODE_ILLACTIONID = (1 << 15); //无效actionid
public static final int RETCODE_ILLACTIONID = (1 << 3); //无效actionid
public static final int RETCODE_THROWEXCEPTION = (1 << 30); //内部异常
public static final int RETCODE_THROWEXCEPTION = (1 << 4); //内部异常
public static ObjectPool<Response> createPool(AtomicLong creatCounter, AtomicLong cycleCounter, int max, Creator<Response> creator) {
return new ObjectPool<>(creatCounter, cycleCounter, max, creator, (x) -> ((SncpResponse) x).prepare(), (x) -> ((SncpResponse) x).recycle());
@@ -45,8 +45,8 @@ public final class SncpResponse extends Response<SncpContext, SncpRequest> {
return null;
}
protected SncpResponse(SncpContext context, SncpRequest request) {
super(context, request);
protected SncpResponse(SncpContext context, SncpRequest request, ObjectPool<Response> responsePool) {
super(context, request, responsePool);
this.addrBytes = context.getServerAddress().getAddress().getAddress();
this.addrPort = context.getServerAddress().getPort();
if (this.addrBytes.length != 4) throw new RuntimeException("SNCP serverAddress only support IPv4");
@@ -56,10 +56,10 @@ public final class SncpResponse extends Response<SncpContext, SncpRequest> {
protected void offerBuffer(ByteBuffer... buffers) {
super.offerBuffer(buffers);
}
public void finish(final int retcode, final BsonWriter out) {
if (out == null) {
final ByteBuffer buffer = pollWriteReadBuffer();
final ByteBuffer buffer = channel.pollWriteBuffer();
fillHeader(buffer, 0, retcode);
finish(buffer);
return;

View File

@@ -5,10 +5,10 @@
*/
package org.redkale.net.sncp;
import java.nio.*;
import java.util.*;
import java.nio.ByteBuffer;
import java.util.List;
import java.util.concurrent.atomic.*;
import org.redkale.convert.bson.*;
import org.redkale.convert.bson.BsonFactory;
import org.redkale.net.*;
import org.redkale.net.sncp.SncpContext.SncpContextConfig;
import org.redkale.service.Service;
@@ -99,28 +99,14 @@ public class SncpServer extends Server<DLong, SncpContext, SncpRequest, SncpResp
@Override
@SuppressWarnings("unchecked")
protected SncpContext createContext() {
final int port = this.address.getPort();
AtomicLong createBufferCounter = new AtomicLong();
AtomicLong cycleBufferCounter = new AtomicLong();
final int rcapacity = Math.max(this.bufferCapacity, 8 * 1024);
ObjectPool<ByteBuffer> bufferPool = new ObjectPool<>(createBufferCounter, cycleBufferCounter, this.bufferPoolSize,
(Object... params) -> ByteBuffer.allocateDirect(rcapacity), null, (e) -> {
if (e == null || e.isReadOnly() || e.capacity() != rcapacity) return false;
e.clear();
return true;
});
AtomicLong createResponseCounter = new AtomicLong();
AtomicLong cycleResponseCounter = new AtomicLong();
ObjectPool<Response> responsePool = SncpResponse.createPool(createResponseCounter, cycleResponseCounter, this.responsePoolSize, null);
this.bufferCapacity = Math.max(this.bufferCapacity, 8 * 1024);
final SncpContextConfig contextConfig = new SncpContextConfig();
contextConfig.serverStartTime = this.serverStartTime;
contextConfig.logger = this.logger;
contextConfig.executor = this.executor;
contextConfig.sslContext = this.sslContext;
contextConfig.bufferCapacity = rcapacity;
contextConfig.bufferPool = bufferPool;
contextConfig.responsePool = responsePool;
contextConfig.bufferCapacity = this.bufferCapacity;
contextConfig.maxconns = this.maxconns;
contextConfig.maxbody = this.maxbody;
contextConfig.charset = this.charset;
@@ -131,9 +117,31 @@ public class SncpServer extends Server<DLong, SncpContext, SncpRequest, SncpResp
contextConfig.readTimeoutSeconds = this.readTimeoutSeconds;
contextConfig.writeTimeoutSeconds = this.writeTimeoutSeconds;
SncpContext sncpcontext = new SncpContext(contextConfig);
responsePool.setCreator((Object... params) -> new SncpResponse(sncpcontext, new SncpRequest(sncpcontext)));
return sncpcontext;
return new SncpContext(contextConfig);
}
@Override
protected ObjectPool<ByteBuffer> createBufferPool(AtomicLong createCounter, AtomicLong cycleCounter, int bufferPoolSize) {
if (createCounter == null) createCounter = new AtomicLong();
if (cycleCounter == null) cycleCounter = new AtomicLong();
final int rcapacity = this.bufferCapacity;
ObjectPool<ByteBuffer> bufferPool = new ObjectPool<>(createCounter, cycleCounter, bufferPoolSize,
(Object... params) -> ByteBuffer.allocateDirect(rcapacity), null, (e) -> {
if (e == null || e.isReadOnly() || e.capacity() != rcapacity) return false;
e.clear();
return true;
});
return bufferPool;
}
@Override
protected ObjectPool<Response> createResponsePool(AtomicLong createCounter, AtomicLong cycleCounter, int responsePoolSize) {
return SncpResponse.createPool(createCounter, cycleCounter, responsePoolSize, null);
}
@Override
protected Creator<Response> createResponseCreator(ObjectPool<ByteBuffer> bufferPool, ObjectPool<Response> responsePool) {
return (Object... params) -> new SncpResponse(this.context, new SncpRequest(this.context, bufferPool), responsePool);
}
}

View File

@@ -49,8 +49,8 @@ public @interface RetLabel {
for (Field field : clazz.getFields()) {
if (!Modifier.isStatic(field.getModifiers())) continue;
if (field.getType() != int.class) continue;
RetLabel info = field.getAnnotation(RetLabel.class);
if (info == null) continue;
RetLabel[] infos = field.getAnnotationsByType(RetLabel.class);
if (infos == null || infos.length == 0) continue;
int value;
try {
value = field.getInt(null);
@@ -58,7 +58,9 @@ public @interface RetLabel {
ex.printStackTrace();
continue;
}
rets.computeIfAbsent(info.locale(), (k) -> new HashMap<>()).put(value, info.value());
for (RetLabel info : infos) {
rets.computeIfAbsent(info.locale(), (k) -> new HashMap<>()).put(value, info.value());
}
}
return rets;
}

View File

@@ -7,7 +7,9 @@ package org.redkale.service;
import java.util.*;
import java.util.concurrent.CompletableFuture;
import org.redkale.convert.Convert;
import org.redkale.convert.json.*;
import org.redkale.util.Utility;
/**
* 通用的结果对象在常见的HTTP+JSON接口中返回的结果需要含结果码错误信息和实体对象。 <br>
@@ -32,6 +34,8 @@ public class RetResult<T> {
protected Map<String, String> attach;
protected Convert convert;
public RetResult() {
}
@@ -39,6 +43,11 @@ public class RetResult<T> {
this.result = result;
}
public RetResult(Convert convert, T result) {
this.convert = convert;
this.result = result;
}
public RetResult(int retcode) {
this.retcode = retcode;
}
@@ -54,6 +63,14 @@ public class RetResult<T> {
this.result = result;
}
public Convert convert() {
return convert;
}
public void convert(Convert convert) {
this.convert = convert;
}
public static RetResult success() {
return new RetResult();
}
@@ -66,6 +83,14 @@ public class RetResult<T> {
return CompletableFuture.completedFuture(new RetResult());
}
public static RetResult<Map<String, String>> map(String... items) {
return new RetResult(Utility.ofMap(items));
}
public static <K, V> RetResult<Map<K, V>> map(Object... items) {
return new RetResult(Utility.ofMap(items));
}
/**
* 判断结果是否成功返回, retcode = 0 视为成功, 否则视为错误码
*

View File

@@ -677,7 +677,7 @@ public class CacheMemorySource<V extends Object> extends AbstractService impleme
}
@Override
public <T> Map<String, Collection<T>> getCollectionMap(final Type componentType, final String... keys) {
public <T> Map<String, Collection<T>> getCollectionMap(final boolean set, final Type componentType, final String... keys) {
Map<String, Collection<T>> map = new HashMap<>();
for (String key : keys) {
Collection<T> s = (Collection<T>) get(key);
@@ -692,7 +692,7 @@ public class CacheMemorySource<V extends Object> extends AbstractService impleme
}
@Override
public Map<String, Collection<String>> getStringCollectionMap(final String... keys) {
public Map<String, Collection<String>> getStringCollectionMap(final boolean set, final String... keys) {
Map<String, Collection<String>> map = new HashMap<>();
for (String key : keys) {
Collection<String> s = (Collection<String>) get(key);
@@ -707,7 +707,7 @@ public class CacheMemorySource<V extends Object> extends AbstractService impleme
}
@Override
public Map<String, Collection<Long>> getLongCollectionMap(final String... keys) {
public Map<String, Collection<Long>> getLongCollectionMap(final boolean set, final String... keys) {
Map<String, Collection<Long>> map = new HashMap<>();
for (String key : keys) {
Collection<Long> s = (Collection<Long>) get(key);
@@ -727,8 +727,8 @@ public class CacheMemorySource<V extends Object> extends AbstractService impleme
}
@Override
public CompletableFuture<Map<String, Collection<V>>> getCollectionMapAsync(final Type componentType, final String... keys) {
return CompletableFuture.supplyAsync(() -> getCollectionMap(componentType, keys), getExecutor());
public CompletableFuture<Map<String, Collection<V>>> getCollectionMapAsync(final boolean set, final Type componentType, final String... keys) {
return CompletableFuture.supplyAsync(() -> getCollectionMap(set, componentType, keys), getExecutor());
}
@Override
@@ -737,8 +737,8 @@ public class CacheMemorySource<V extends Object> extends AbstractService impleme
}
@Override
public CompletableFuture<Map<String, Collection<String>>> getStringCollectionMapAsync(final String... keys) {
return CompletableFuture.supplyAsync(() -> getStringCollectionMap(keys), getExecutor());
public CompletableFuture<Map<String, Collection<String>>> getStringCollectionMapAsync(final boolean set, final String... keys) {
return CompletableFuture.supplyAsync(() -> getStringCollectionMap(set, keys), getExecutor());
}
@Override
@@ -747,8 +747,8 @@ public class CacheMemorySource<V extends Object> extends AbstractService impleme
}
@Override
public CompletableFuture<Map<String, Collection<Long>>> getLongCollectionMapAsync(final String... keys) {
return CompletableFuture.supplyAsync(() -> getLongCollectionMap(keys), getExecutor());
public CompletableFuture<Map<String, Collection<Long>>> getLongCollectionMapAsync(final boolean set, final String... keys) {
return CompletableFuture.supplyAsync(() -> getLongCollectionMap(set, keys), getExecutor());
}
@Override

View File

@@ -92,7 +92,7 @@ public interface CacheSource<V extends Object> {
public <T> Collection<T> getCollection(final String key, final Type componentType);
public <T> Map<String, Collection<T>> getCollectionMap(final Type componentType, final String... keys);
public <T> Map<String, Collection<T>> getCollectionMap(final boolean set, final Type componentType, final String... keys);
public int getCollectionSize(final String key);
@@ -140,7 +140,7 @@ public interface CacheSource<V extends Object> {
public Collection<String> getStringCollection(final String key);
public Map<String, Collection<String>> getStringCollectionMap(final String... keys);
public Map<String, Collection<String>> getStringCollectionMap(final boolean set, final String... keys);
public Collection<String> getStringCollectionAndRefresh(final String key, final int expireSeconds);
@@ -164,7 +164,7 @@ public interface CacheSource<V extends Object> {
public Collection<Long> getLongCollection(final String key);
public Map<String, Collection<Long>> getLongCollectionMap(final String... keys);
public Map<String, Collection<Long>> getLongCollectionMap(final boolean set, final String... keys);
public Collection<Long> getLongCollectionAndRefresh(final String key, final int expireSeconds);
@@ -241,7 +241,7 @@ public interface CacheSource<V extends Object> {
public <T> CompletableFuture<Collection<T>> getCollectionAsync(final String key, final Type componentType);
public <T> CompletableFuture<Map<String, Collection<T>>> getCollectionMapAsync(final Type componentType, final String... keys);
public <T> CompletableFuture<Map<String, Collection<T>>> getCollectionMapAsync(final boolean set, final Type componentType, final String... keys);
public CompletableFuture<Integer> getCollectionSizeAsync(final String key);
@@ -289,7 +289,7 @@ public interface CacheSource<V extends Object> {
public CompletableFuture<Collection<String>> getStringCollectionAsync(final String key);
public CompletableFuture<Map<String, Collection<String>>> getStringCollectionMapAsync(final String... keys);
public CompletableFuture<Map<String, Collection<String>>> getStringCollectionMapAsync(final boolean set, final String... keys);
public CompletableFuture<Collection<String>> getStringCollectionAndRefreshAsync(final String key, final int expireSeconds);
@@ -313,7 +313,7 @@ public interface CacheSource<V extends Object> {
public CompletableFuture<Collection<Long>> getLongCollectionAsync(final String key);
public CompletableFuture<Map<String, Collection<Long>>> getLongCollectionMapAsync(final String... keys);
public CompletableFuture<Map<String, Collection<Long>>> getLongCollectionMapAsync(final boolean set, final String... keys);
public CompletableFuture<Collection<Long>> getLongCollectionAndRefreshAsync(final String key, final int expireSeconds);

View File

@@ -15,17 +15,29 @@ package org.redkale.source;
*/
public enum ColumnExpress {
/**
* 直接赋值 col = val
* 赋值 col = val
*/
MOV,
/**
* 加值 col = col + val
* 加值 col = col + val
*/
INC,
/**
* 加值 col = col - val
*/
DEC,
/**
* 乘值 col = col * val
*/
MUL,
/**
* 除值 col = col / val
*/
DIV,
/**
* 取模 col = col % val
*/
MOD,
/**
* 与值 col = col &#38; val
*/

View File

@@ -0,0 +1,83 @@
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package org.redkale.source;
import java.io.Serializable;
/**
* 与ColumnNodeValue 组合,用于复杂的字段表达式 。
* String 视为 字段名
*
* <p>
* 详情见: https://redkale.org
*
* @author zhangjx
* @since 2.0.0
*/
public class ColumnFuncNode implements ColumnNode {
protected FilterFunc func;
protected Serializable value;//类型只能是String、ColumnNodeValue
public ColumnFuncNode() {
}
public ColumnFuncNode(FilterFunc func, Serializable node) {
if (!(node instanceof String) && !(node instanceof ColumnNodeValue)) throw new IllegalArgumentException("value must be String or ColumnNodeValue");
this.func = func;
this.value = node;
}
public static ColumnFuncNode create(FilterFunc func, Serializable node) {
return new ColumnFuncNode(func, node);
}
public static ColumnFuncNode avg(Serializable node) {
return new ColumnFuncNode(FilterFunc.AVG, node);
}
public static ColumnFuncNode count(Serializable node) {
return new ColumnFuncNode(FilterFunc.COUNT, node);
}
public static ColumnFuncNode distinctCount(Serializable node) {
return new ColumnFuncNode(FilterFunc.DISTINCTCOUNT, node);
}
public static ColumnFuncNode max(Serializable node) {
return new ColumnFuncNode(FilterFunc.MAX, node);
}
public static ColumnFuncNode min(Serializable node) {
return new ColumnFuncNode(FilterFunc.MIN, node);
}
public static ColumnFuncNode sum(Serializable node) {
return new ColumnFuncNode(FilterFunc.SUM, node);
}
public FilterFunc getFunc() {
return func;
}
public void setFunc(FilterFunc func) {
this.func = func;
}
public Serializable getValue() {
return value;
}
public void setValue(Serializable value) {
this.value = value;
}
@Override
public String toString() {
return "{\"func\":\"" + func + "\", \"value\":" + ((value instanceof CharSequence) ? ("\"" + value + "\"") : value) + "}";
}
}

View File

@@ -0,0 +1,21 @@
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package org.redkale.source;
import java.io.Serializable;
/**
* ColumnFuncNode与ColumnNodeValue 的接口
*
* <p>
* 详情见: https://redkale.org
*
* @author zhangjx
* @since 2.0.0
*/
public interface ColumnNode extends Serializable {
}

View File

@@ -0,0 +1,144 @@
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package org.redkale.source;
import java.io.Serializable;
import static org.redkale.source.ColumnExpress.*;
/**
* 作为ColumnValue的value字段值用于复杂的字段表达式 。
* String 视为 字段名
* Number 视为 数值
*
* <p>
* 详情见: https://redkale.org
*
* @author zhangjx
* @since 2.0.0
*/
public class ColumnNodeValue implements ColumnNode {
protected Serializable left;//类型只能是String、Number、ColumnNodeValue
protected ColumnExpress express; //不能是MOV
protected Serializable right;//类型只能是String、Number、ColumnNodeValue
public ColumnNodeValue() {
}
public ColumnNodeValue(Serializable left, ColumnExpress express, Serializable right) {
if (express == null || express == ColumnExpress.MOV) {
throw new IllegalArgumentException("express cannot be null or MOV");
}
if (!(left instanceof String) && !(left instanceof Number) && !(left instanceof ColumnNodeValue) && !(left instanceof ColumnFuncNode)) {
throw new IllegalArgumentException("left value must be String, Number, ColumnFuncNode or ColumnNodeValue");
}
if (!(right instanceof String) && !(right instanceof Number) && !(right instanceof ColumnNodeValue) && !(right instanceof ColumnFuncNode)) {
throw new IllegalArgumentException("right value must be String, Number, ColumnFuncNode or ColumnNodeValue");
}
this.left = left;
this.express = express;
this.right = right;
}
public static ColumnNodeValue create(Serializable left, ColumnExpress express, Serializable right) {
return new ColumnNodeValue(left, express, right);
}
public static ColumnNodeValue inc(Serializable left, Serializable right) {
return new ColumnNodeValue(left, INC, right);
}
public static ColumnNodeValue dec(Serializable left, Serializable right) {
return new ColumnNodeValue(left, DEC, right);
}
public static ColumnNodeValue mul(Serializable left, Serializable right) {
return new ColumnNodeValue(left, MUL, right);
}
public static ColumnNodeValue div(Serializable left, Serializable right) {
return new ColumnNodeValue(left, DIV, right);
}
public static ColumnNodeValue mod(Serializable left, Serializable right) {
return new ColumnNodeValue(left, MOD, right);
}
public static ColumnNodeValue and(Serializable left, Serializable right) {
return new ColumnNodeValue(left, AND, right);
}
public static ColumnNodeValue orr(Serializable left, Serializable right) {
return new ColumnNodeValue(left, ORR, right);
}
public ColumnNodeValue inc(Serializable right) {
return any(INC, right);
}
public ColumnNodeValue dec(Serializable right) {
return any(DEC, right);
}
public ColumnNodeValue mul(Serializable right) {
return any(MUL, right);
}
public ColumnNodeValue div(Serializable right) {
return any(DIV, right);
}
public ColumnNodeValue mod(Serializable right) {
return any(MOD, right);
}
public ColumnNodeValue and(Serializable right) {
return any(AND, right);
}
public ColumnNodeValue orr(Serializable right) {
return any(ORR, right);
}
protected ColumnNodeValue any(ColumnExpress express, Serializable right) {
ColumnNodeValue one = new ColumnNodeValue(this.left, this.express, this.right);
this.left = one;
this.express = express;
this.right = right;
return this;
}
public Serializable getLeft() {
return left;
}
public void setLeft(Serializable left) {
this.left = left;
}
public ColumnExpress getExpress() {
return express;
}
public void setExpress(ColumnExpress express) {
this.express = express;
}
public Serializable getRight() {
return right;
}
public void setRight(Serializable right) {
this.right = right;
}
@Override
public String toString() {
return "{\"column\":" + ((left instanceof CharSequence) ? ("\"" + left + "\"") : left) + ", \"express\":" + express + ", \"value\":" + ((right instanceof CharSequence) ? ("\"" + right + "\"") : right) + "}";
}
}

View File

@@ -74,6 +74,18 @@ public class ColumnValue {
return new ColumnValue(column, INC, value);
}
/**
* 返回 {column} = {column} - {value} 操作
*
* @param column 字段名
* @param value 字段值
*
* @return ColumnValue
*/
public static ColumnValue dec(String column, Serializable value) {
return new ColumnValue(column, DEC, value);
}
/**
* 返回 {column} = {column} * {value} 操作
*
@@ -86,6 +98,31 @@ public class ColumnValue {
return new ColumnValue(column, MUL, value);
}
/**
* 返回 {column} = {column} / {value} 操作
*
* @param column 字段名
* @param value 字段值
*
* @return ColumnValue
*/
public static ColumnValue div(String column, Serializable value) {
return new ColumnValue(column, DIV, value);
}
/**
* 返回 {column} = {column} % {value} 操作
*
* @param column 字段名
* @param value 字段值
*
* @return ColumnValue
*/
//不常用防止开发者容易在mov时误输入mod
// public static ColumnValue mod(String column, Serializable value) {
// return new ColumnValue(column, MOD, value);
// }
/**
* 返回 {column} = {column} &#38; {value} 操作
*

View File

@@ -30,7 +30,6 @@ public class DataCallAttribute implements Attribute<Object, Serializable> {
Class cltmp = clazz;
do {
for (Field field : cltmp.getDeclaredFields()) {
if (field.getAnnotation(javax.persistence.GeneratedValue.class) == null) continue;
try {
rs = Attribute.create(cltmp, field);
attributes.put(clazz, rs);

View File

@@ -70,16 +70,17 @@ public class DataJdbcSource extends DataSqlSource<Connection> {
}
c = c1;
} catch (SQLException se) {
if (info.tableStrategy == null || !info.isTableNotExist(se)) throw se;
synchronized (info.tables) {
final String oldTable = info.table;
if (info.getTableStrategy() == null || !info.isTableNotExist(se)) throw se;
synchronized (info.disTableLock()) {
final String catalog = conn.getCatalog();
final String newTable = info.getTable(entitys[0]);
if (!info.tables.contains(newTable)) {
final String tablekey = newTable.indexOf('.') > 0 ? newTable : (catalog + '.' + newTable);
if (!info.containsDisTable(tablekey)) {
try {
Statement st = conn.createStatement();
st.execute(info.tablecopySQL.replace("${newtable}", newTable).replace("${oldtable}", oldTable));
st.execute(info.getTableCopySQL(newTable));
st.close();
info.tables.add(newTable);
info.addDisTable(tablekey);
} catch (SQLException sqle) { //多进程并发时可能会出现重复建表
if (newTable.indexOf('.') > 0 && info.isTableNotExist(se)) {
Statement st;
@@ -92,14 +93,14 @@ public class DataJdbcSource extends DataSqlSource<Connection> {
}
try {
st = conn.createStatement();
st.execute(info.tablecopySQL.replace("${newtable}", newTable).replace("${oldtable}", oldTable));
st.execute(info.getTableCopySQL(newTable));
st.close();
info.tables.add(newTable);
info.addDisTable(tablekey);
} catch (SQLException sqle2) {
logger.log(Level.SEVERE, "create table2(" + info.tablecopySQL.replace("${newtable}", newTable).replace("${oldtable}", oldTable) + ") error", sqle2);
logger.log(Level.SEVERE, "create table2(" + info.getTableCopySQL(newTable) + ") error", sqle2);
}
} else {
logger.log(Level.SEVERE, "create table(" + info.tablecopySQL.replace("${newtable}", newTable).replace("${oldtable}", oldTable) + ") error", sqle);
logger.log(Level.SEVERE, "create table(" + info.getTableCopySQL(newTable) + ") error", sqle);
}
}
}
@@ -113,20 +114,6 @@ public class DataJdbcSource extends DataSqlSource<Connection> {
}
c = c1;
}
if (info.autoGenerated) { //由数据库自动生成主键值
ResultSet set = prestmt.getGeneratedKeys();
int i = -1;
while (set.next()) {
if (primaryType == int.class) {
primary.set(entitys[++i], set.getInt(1));
} else if (primaryType == long.class) {
primary.set(entitys[++i], set.getLong(1));
} else {
primary.set(entitys[++i], set.getObject(1));
}
}
set.close();
}
prestmt.close();
//------------------------------------------------------------
if (info.isLoggable(logger, Level.FINEST)) { //打印调试信息
@@ -141,7 +128,7 @@ public class DataJdbcSource extends DataSqlSource<Connection> {
if (obj != null && obj.getClass().isArray()) {
sb.append("'[length=").append(java.lang.reflect.Array.getLength(obj)).append("]'");
} else {
sb.append(FilterNode.formatToString(obj));
sb.append(info.formatSQLValue(obj, sqlFormatter));
}
} else {
sb.append(ch);
@@ -155,7 +142,7 @@ public class DataJdbcSource extends DataSqlSource<Connection> {
} catch (SQLException e) {
CompletableFuture future = new CompletableFuture();
future.completeExceptionally(e);
return future;
return future;//return CompletableFuture.failedFuture(e);
} finally {
if (conn != null) writePool.offerConnection(conn);
}
@@ -164,10 +151,9 @@ public class DataJdbcSource extends DataSqlSource<Connection> {
protected <T> PreparedStatement createInsertPreparedStatement(final Connection conn, final String sql,
final EntityInfo<T> info, T... entitys) throws SQLException {
Attribute<T, Serializable>[] attrs = info.insertAttributes;
final PreparedStatement prestmt = info.autoGenerated ? conn.prepareStatement(sql, Statement.RETURN_GENERATED_KEYS) : conn.prepareStatement(sql);
final PreparedStatement prestmt = conn.prepareStatement(sql);
for (final T value : entitys) {
if (info.autouuid) info.createPrimaryValue(value);
batchStatementParameters(conn, prestmt, info, attrs, value);
prestmt.addBatch();
}
@@ -177,18 +163,22 @@ public class DataJdbcSource extends DataSqlSource<Connection> {
protected <T> int batchStatementParameters(Connection conn, PreparedStatement prestmt, EntityInfo<T> info, Attribute<T, Serializable>[] attrs, T entity) throws SQLException {
int i = 0;
for (Attribute<T, Serializable> attr : attrs) {
Serializable val = info.getSQLValue(attr, entity);
Object val = info.getSQLValue(attr, entity);
if (val instanceof byte[]) {
Blob blob = conn.createBlob();
blob.setBytes(1, (byte[]) val);
prestmt.setObject(++i, blob);
} else if (val instanceof Boolean) {
prestmt.setObject(++i, ((Boolean) val) ? (byte) 1 : (byte) 0);
} else if (val instanceof AtomicInteger) {
prestmt.setObject(++i, ((AtomicInteger) val).get());
} else if (val instanceof AtomicLong) {
prestmt.setObject(++i, ((AtomicLong) val).get());
} else if (val != null && !(val instanceof Number) && !(val instanceof CharSequence) && !(entity instanceof java.util.Date)
} else if (val != null && !(val instanceof Number) && !(val instanceof CharSequence) && !(val instanceof java.util.Date)
&& !val.getClass().getName().startsWith("java.sql.") && !val.getClass().getName().startsWith("java.time.")) {
prestmt.setObject(++i, info.jsonConvert.convertTo(attr.genericType(), val));
} else if (val == null && info.isNotNullJson(attr)) {
prestmt.setObject(++i, "");
} else {
prestmt.setObject(++i, val);
}
@@ -212,14 +202,14 @@ public class DataJdbcSource extends DataSqlSource<Connection> {
} catch (SQLException e) {
CompletableFuture future = new CompletableFuture();
future.completeExceptionally(e);
return future;
return future;//return CompletableFuture.failedFuture(e);
} finally {
if (conn != null) writePool.offerConnection(conn);
}
}
@Override
protected <T> CompletableFuture<Integer> clearTableDB(EntityInfo<T> info, String sql) {
protected <T> CompletableFuture<Integer> clearTableDB(EntityInfo<T> info, final String table, String sql) {
Connection conn = null;
try {
conn = writePool.poll();
@@ -230,16 +220,17 @@ public class DataJdbcSource extends DataSqlSource<Connection> {
stmt.close();
return CompletableFuture.completedFuture(c);
} catch (SQLException e) {
if (info.isTableNotExist(e)) return CompletableFuture.completedFuture(-1);
CompletableFuture future = new CompletableFuture();
future.completeExceptionally(e);
return future;
return future;//return CompletableFuture.failedFuture(e);
} finally {
if (conn != null) writePool.offerConnection(conn);
}
}
@Override
protected <T> CompletableFuture<Integer> dropTableDB(EntityInfo<T> info, String sql) {
protected <T> CompletableFuture<Integer> dropTableDB(EntityInfo<T> info, final String table, String sql) {
Connection conn = null;
try {
conn = writePool.poll();
@@ -248,11 +239,16 @@ public class DataJdbcSource extends DataSqlSource<Connection> {
final Statement stmt = conn.createStatement();
int c = stmt.executeUpdate(sql);
stmt.close();
if (info.getTableStrategy() != null) {
String tablekey = table.indexOf('.') > 0 ? table : (conn.getCatalog() + '.' + table);
info.removeDisTable(tablekey);
}
return CompletableFuture.completedFuture(c);
} catch (SQLException e) {
if (info.isTableNotExist(e)) return CompletableFuture.completedFuture(-1);
CompletableFuture future = new CompletableFuture();
future.completeExceptionally(e);
return future;
return future;//return CompletableFuture.failedFuture(e);
} finally {
if (conn != null) writePool.offerConnection(conn);
}
@@ -285,7 +281,7 @@ public class DataJdbcSource extends DataSqlSource<Connection> {
if (obj != null && obj.getClass().isArray()) {
sb.append("'[length=").append(java.lang.reflect.Array.getLength(obj)).append("]'");
} else {
sb.append(FilterNode.formatToString(obj));
sb.append(info.formatSQLValue(obj, sqlFormatter));
}
} else {
sb.append(ch);
@@ -305,7 +301,7 @@ public class DataJdbcSource extends DataSqlSource<Connection> {
} catch (SQLException e) {
CompletableFuture future = new CompletableFuture();
future.completeExceptionally(e);
return future;
return future;//return CompletableFuture.failedFuture(e);
} finally {
if (conn != null) writePool.offerConnection(conn);
}
@@ -339,7 +335,7 @@ public class DataJdbcSource extends DataSqlSource<Connection> {
} catch (SQLException e) {
CompletableFuture future = new CompletableFuture();
future.completeExceptionally(e);
return future;
return future;//return CompletableFuture.failedFuture(e);
} finally {
if (conn != null) writePool.offerConnection(conn);
}
@@ -348,12 +344,12 @@ public class DataJdbcSource extends DataSqlSource<Connection> {
@Override
protected <T, N extends Number> CompletableFuture<Map<String, N>> getNumberMapDB(EntityInfo<T> info, String sql, FilterFuncColumn... columns) {
Connection conn = null;
final Map map = new HashMap<>();
try {
conn = readPool.poll();
//conn.setReadOnly(true);
final Statement stmt = conn.createStatement();
ResultSet set = stmt.executeQuery(sql);
final Map map = new HashMap<>();
if (set.next()) {
int index = 0;
for (FilterFuncColumn ffc : columns) {
@@ -369,9 +365,10 @@ public class DataJdbcSource extends DataSqlSource<Connection> {
stmt.close();
return CompletableFuture.completedFuture(map);
} catch (SQLException e) {
if (info.getTableStrategy() != null && info.isTableNotExist(e)) return CompletableFuture.completedFuture(map);
CompletableFuture future = new CompletableFuture();
future.completeExceptionally(e);
return future;
return future;//return CompletableFuture.failedFuture(e);
} finally {
if (conn != null) readPool.offerConnection(conn);
}
@@ -394,9 +391,10 @@ public class DataJdbcSource extends DataSqlSource<Connection> {
stmt.close();
return CompletableFuture.completedFuture(rs);
} catch (SQLException e) {
if (info.getTableStrategy() != null && info.isTableNotExist(e)) return CompletableFuture.completedFuture(defVal);
CompletableFuture future = new CompletableFuture();
future.completeExceptionally(e);
return future;
return future;//return CompletableFuture.failedFuture(e);
} finally {
if (conn != null) readPool.offerConnection(conn);
}
@@ -405,11 +403,11 @@ public class DataJdbcSource extends DataSqlSource<Connection> {
@Override
protected <T, K extends Serializable, N extends Number> CompletableFuture<Map<K, N>> queryColumnMapDB(EntityInfo<T> info, String sql, String keyColumn) {
Connection conn = null;
Map<K, N> rs = new LinkedHashMap<>();
try {
conn = readPool.poll();
//conn.setReadOnly(true);
final Statement stmt = conn.createStatement();
Map<K, N> rs = new LinkedHashMap<>();
ResultSet set = stmt.executeQuery(sql);
ResultSetMetaData rsd = set.getMetaData();
boolean smallint = rsd == null ? false : rsd.getColumnType(1) == Types.SMALLINT;
@@ -420,9 +418,52 @@ public class DataJdbcSource extends DataSqlSource<Connection> {
stmt.close();
return CompletableFuture.completedFuture(rs);
} catch (SQLException e) {
if (info.getTableStrategy() != null && info.isTableNotExist(e)) return CompletableFuture.completedFuture(rs);
CompletableFuture future = new CompletableFuture();
future.completeExceptionally(e);
return future;
return future;//return CompletableFuture.failedFuture(e);
} finally {
if (conn != null) readPool.offerConnection(conn);
}
}
@Override
protected <T, K extends Serializable, N extends Number> CompletableFuture<Map<K[], N[]>> queryColumnMapDB(EntityInfo<T> info, String sql, final ColumnNode[] funcNodes, final String[] groupByColumns) {
Connection conn = null;
Map rs = new LinkedHashMap<>();
try {
conn = readPool.poll();
//conn.setReadOnly(true);
final Statement stmt = conn.createStatement();
ResultSet set = stmt.executeQuery(sql);
ResultSetMetaData rsd = set.getMetaData();
boolean[] smallints = null;
while (set.next()) {
int index = 0;
Serializable[] keys = new Serializable[groupByColumns.length];
if (smallints == null) {
smallints = new boolean[keys.length];
for (int i = 0; i < keys.length; i++) {
smallints[i] = rsd == null ? false : rsd.getColumnType(i + 1) == Types.SMALLINT;
}
}
for (int i = 0; i < keys.length; i++) {
keys[i] = (Serializable) ((smallints[i] && index == 0) ? set.getShort(++index) : set.getObject(++index));
}
Number[] vals = new Number[funcNodes.length];
for (int i = 0; i < vals.length; i++) {
vals[i] = (Number) set.getObject(++index);
}
rs.put(keys, vals);
}
set.close();
stmt.close();
return CompletableFuture.completedFuture(rs);
} catch (SQLException e) {
if (info.getTableStrategy() != null && info.isTableNotExist(e)) return CompletableFuture.completedFuture(rs);
CompletableFuture future = new CompletableFuture();
future.completeExceptionally(e);
return future;//return CompletableFuture.failedFuture(e);
} finally {
if (conn != null) readPool.offerConnection(conn);
}
@@ -442,10 +483,10 @@ public class DataJdbcSource extends DataSqlSource<Connection> {
ps.close();
return CompletableFuture.completedFuture(rs);
} catch (SQLException e) {
if (info.tableStrategy != null && info.isTableNotExist(e)) return CompletableFuture.completedFuture(null);
if (info.getTableStrategy() != null && info.isTableNotExist(e)) return CompletableFuture.completedFuture(null);
CompletableFuture future = new CompletableFuture();
future.completeExceptionally(e);
return future;
return future;//return CompletableFuture.failedFuture(e);
} finally {
if (conn != null) readPool.offerConnection(conn);
}
@@ -469,10 +510,10 @@ public class DataJdbcSource extends DataSqlSource<Connection> {
ps.close();
return CompletableFuture.completedFuture(val == null ? defValue : val);
} catch (SQLException e) {
if (info.tableStrategy != null && info.isTableNotExist(e)) return CompletableFuture.completedFuture(defValue);
if (info.getTableStrategy() != null && info.isTableNotExist(e)) return CompletableFuture.completedFuture(defValue);
CompletableFuture future = new CompletableFuture();
future.completeExceptionally(e);
return future;
return future;//return CompletableFuture.failedFuture(e);
} finally {
if (conn != null) readPool.offerConnection(conn);
}
@@ -492,17 +533,17 @@ public class DataJdbcSource extends DataSqlSource<Connection> {
if (info.isLoggable(logger, Level.FINEST, sql)) logger.finest(info.getType().getSimpleName() + " exists (" + rs + ") sql=" + sql);
return CompletableFuture.completedFuture(rs);
} catch (SQLException e) {
if (info.tableStrategy != null && info.isTableNotExist(e)) return CompletableFuture.completedFuture(false);
if (info.getTableStrategy() != null && info.isTableNotExist(e)) return CompletableFuture.completedFuture(false);
CompletableFuture future = new CompletableFuture();
future.completeExceptionally(e);
return future;
return future;//return CompletableFuture.failedFuture(e);
} finally {
if (conn != null) readPool.offerConnection(conn);
}
}
@Override
protected <T> CompletableFuture<Sheet<T>> querySheetDB(EntityInfo<T> info, final boolean readcache, boolean needtotal, SelectColumn selects, Flipper flipper, FilterNode node) {
protected <T> CompletableFuture<Sheet<T>> querySheetDB(EntityInfo<T> info, final boolean readcache, boolean needtotal, final boolean distinct, SelectColumn selects, Flipper flipper, FilterNode node) {
Connection conn = null;
try {
conn = readPool.poll();
@@ -514,7 +555,7 @@ public class DataJdbcSource extends DataSqlSource<Connection> {
final CharSequence where = node == null ? null : node.createSQLExpress(info, joinTabalis);
final String dbtype = this.readPool.getDbtype();
if ("mysql".equals(dbtype) || "postgresql".equals(dbtype)) {
final String listsql = "SELECT " + info.getQueryColumns("a", selects) + " FROM " + info.getTable(node) + " a" + (join == null ? "" : join)
final String listsql = "SELECT " + (distinct ? "DISTINCT " : "") + info.getQueryColumns("a", selects) + " FROM " + info.getTable(node) + " a" + (join == null ? "" : join)
+ ((where == null || where.length() == 0) ? "" : (" WHERE " + where)) + createSQLOrderby(info, flipper) + (flipper == null || flipper.getLimit() < 1 ? "" : (" LIMIT " + flipper.getLimit() + " OFFSET " + flipper.getOffset()));
if (readcache && info.isLoggable(logger, Level.FINEST, listsql)) {
logger.finest(info.getType().getSimpleName() + " query sql=" + listsql);
@@ -528,7 +569,7 @@ public class DataJdbcSource extends DataSqlSource<Connection> {
ps.close();
long total = list.size();
if (needtotal) {
final String countsql = "SELECT COUNT(*) FROM " + info.getTable(node) + " a" + (join == null ? "" : join) + ((where == null || where.length() == 0) ? "" : (" WHERE " + where));
final String countsql = "SELECT " + (distinct ? "DISTINCT COUNT(" + info.getQueryColumns("a", selects) + ")" : "COUNT(*)") + " FROM " + info.getTable(node) + " a" + (join == null ? "" : join) + ((where == null || where.length() == 0) ? "" : (" WHERE " + where));
if (readcache && info.isLoggable(logger, Level.FINEST, countsql)) {
logger.finest(info.getType().getSimpleName() + " query countsql=" + countsql);
}
@@ -540,7 +581,7 @@ public class DataJdbcSource extends DataSqlSource<Connection> {
}
return CompletableFuture.completedFuture(new Sheet<>(total, list));
}
final String sql = "SELECT " + info.getQueryColumns("a", selects) + " FROM " + info.getTable(node) + " a" + (join == null ? "" : join)
final String sql = "SELECT " + (distinct ? "DISTINCT " : "") + info.getQueryColumns("a", selects) + " FROM " + info.getTable(node) + " a" + (join == null ? "" : join)
+ ((where == null || where.length() == 0) ? "" : (" WHERE " + where)) + info.createSQLOrderby(flipper);
if (readcache && info.isLoggable(logger, Level.FINEST, sql)) {
logger.finest(info.getType().getSimpleName() + " query sql=" + sql + (flipper == null || flipper.getLimit() < 1 ? "" : (" LIMIT " + flipper.getLimit() + " OFFSET " + flipper.getOffset())));
@@ -566,10 +607,10 @@ public class DataJdbcSource extends DataSqlSource<Connection> {
ps.close();
return CompletableFuture.completedFuture(new Sheet<>(total, list));
} catch (SQLException e) {
if (info.tableStrategy != null && info.isTableNotExist(e)) return CompletableFuture.completedFuture(new Sheet<>());
if (info.getTableStrategy() != null && info.isTableNotExist(e)) return CompletableFuture.completedFuture(new Sheet<>(0, new ArrayList()));
CompletableFuture future = new CompletableFuture();
future.completeExceptionally(e);
return future;
return future;//return CompletableFuture.failedFuture(e);
} finally {
if (conn != null) readPool.offerConnection(conn);
}

View File

@@ -94,12 +94,12 @@ public class DataMemorySource extends DataSqlSource<Void> {
}
@Override
protected <T> CompletableFuture<Integer> clearTableDB(EntityInfo<T> info, String sql) {
protected <T> CompletableFuture<Integer> clearTableDB(EntityInfo<T> info, final String table, String sql) {
return CompletableFuture.completedFuture(0);
}
@Override
protected <T> CompletableFuture<Integer> dropTableDB(EntityInfo<T> info, String sql) {
protected <T> CompletableFuture<Integer> dropTableDB(EntityInfo<T> info, final String table, String sql) {
return CompletableFuture.completedFuture(0);
}
@@ -128,6 +128,11 @@ public class DataMemorySource extends DataSqlSource<Void> {
return CompletableFuture.completedFuture(null);
}
@Override
protected <T, K extends Serializable, N extends Number> CompletableFuture<Map<K[], N[]>> queryColumnMapDB(final EntityInfo<T> info, final String sql, final ColumnNode[] funcNodes, final String[] groupByColumns) {
return CompletableFuture.completedFuture(null);
}
@Override
protected <T> CompletableFuture<T> findDB(EntityInfo<T> info, String sql, boolean onlypk, SelectColumn selects) {
return CompletableFuture.completedFuture(null);
@@ -144,8 +149,8 @@ public class DataMemorySource extends DataSqlSource<Void> {
}
@Override
protected <T> CompletableFuture<Sheet<T>> querySheetDB(EntityInfo<T> info, final boolean readcache, boolean needtotal, SelectColumn selects, Flipper flipper, FilterNode node) {
return CompletableFuture.completedFuture(new Sheet<>());
protected <T> CompletableFuture<Sheet<T>> querySheetDB(EntityInfo<T> info, final boolean readcache, boolean needtotal, final boolean distinct, SelectColumn selects, Flipper flipper, FilterNode node) {
return CompletableFuture.completedFuture(new Sheet<>(0, new ArrayList()));
}
}

View File

@@ -43,6 +43,26 @@ public interface DataSource {
*/
public <T> int insert(final T... entitys);
/**
* 新增记录, 多对象必须是同一个Entity类且必须在同一张表中 <br>
*
* @param <T> 泛型
* @param entitys Entity对象
*
* @return 影响的记录条数
*/
public <T> int insert(final Collection<T> entitys);
/**
* 新增记录, 多对象必须是同一个Entity类且必须在同一张表中 <br>
*
* @param <T> 泛型
* @param entitys Entity对象
*
* @return 影响的记录条数
*/
public <T> int insert(final Stream<T> entitys);
/**
* 新增记录, 多对象必须是同一个Entity类且必须在同一张表中 <br>
*
@@ -53,6 +73,26 @@ public interface DataSource {
*/
public <T> CompletableFuture<Integer> insertAsync(final T... entitys);
/**
* 新增记录, 多对象必须是同一个Entity类且必须在同一张表中 <br>
*
* @param <T> 泛型
* @param entitys Entity对象
*
* @return CompletableFuture
*/
public <T> CompletableFuture<Integer> insertAsync(final Collection<T> entitys);
/**
* 新增记录, 多对象必须是同一个Entity类且必须在同一张表中 <br>
*
* @param <T> 泛型
* @param entitys Entity对象
*
* @return CompletableFuture
*/
public <T> CompletableFuture<Integer> insertAsync(final Stream<T> entitys);
//-------------------------deleteAsync--------------------------
/**
* 删除指定主键值的记录, 多对象必须是同一个Entity类且必须在同一张表中 <br>
@@ -216,7 +256,7 @@ public interface DataSource {
* @param <T> Entity泛型
* @param clazz Entity类
*
* @return 影响的记录条数
* @return 影响的记录条数 -1表示表不存在
*/
public <T> int clearTable(final Class<T> clazz);
@@ -227,7 +267,7 @@ public interface DataSource {
* @param <T> Entity泛型
* @param clazz Entity类
*
* @return 影响的记录条数CompletableFuture
* @return 影响的记录条数CompletableFuture -1表示表不存在
*/
public <T> CompletableFuture<Integer> clearTableAsync(final Class<T> clazz);
@@ -239,7 +279,7 @@ public interface DataSource {
* @param clazz Entity类
* @param node 过滤条件
*
* @return 影响的记录条数
* @return 影响的记录条数 -1表示表不存在
*/
public <T> int clearTable(final Class<T> clazz, final FilterNode node);
@@ -251,7 +291,7 @@ public interface DataSource {
* @param clazz Entity类
* @param node 过滤条件
*
* @return 影响的记录条数CompletableFuture
* @return 影响的记录条数CompletableFuture -1表示表不存在
*/
public <T> CompletableFuture<Integer> clearTableAsync(final Class<T> clazz, final FilterNode node);
@@ -263,7 +303,7 @@ public interface DataSource {
* @param <T> Entity泛型
* @param clazz Entity类
*
* @return 影响的记录条数
* @return 影响的记录条数 -1表示表不存在
*/
public <T> int dropTable(final Class<T> clazz);
@@ -274,7 +314,7 @@ public interface DataSource {
* @param <T> Entity泛型
* @param clazz Entity类
*
* @return 影响的记录条数CompletableFuture
* @return 影响的记录条数CompletableFuture -1表示表不存在
*/
public <T> CompletableFuture<Integer> dropTableAsync(final Class<T> clazz);
@@ -286,7 +326,7 @@ public interface DataSource {
* @param clazz Entity类
* @param node 过滤条件
*
* @return 影响的记录条数
* @return 影响的记录条数 -1表示表不存在
*/
public <T> int dropTable(final Class<T> clazz, final FilterNode node);
@@ -298,7 +338,7 @@ public interface DataSource {
* @param clazz Entity类
* @param node 过滤条件
*
* @return 影响的记录条数CompletableFuture
* @return 影响的记录条数CompletableFuture -1表示表不存在
*/
public <T> CompletableFuture<Integer> dropTableAsync(final Class<T> clazz, final FilterNode node);
@@ -662,7 +702,7 @@ public interface DataSource {
/**
* 获取符合过滤条件记录的聚合结果, 无结果返回null <br>
* 等价SQL: SELECT FUNC{column} FROM {table} <br>
* 如 getNumberResultAsync(Record.class, FilterFunc.COUNT, null) 等价于: SELECT COUNT(*) FROM {table} <br>
* 如 getNumberResultAsync(User.class, FilterFunc.COUNT, null) 等价于: SELECT COUNT(*) FROM {table} <br>
*
* @param entityClass Entity类
* @param func 聚合函数
@@ -675,7 +715,7 @@ public interface DataSource {
/**
* 获取符合过滤条件记录的聚合结果, 无结果返回null <br>
* 等价SQL: SELECT FUNC{column} FROM {table} <br>
* 如 getNumberResultAsync(Record.class, FilterFunc.COUNT, null) 等价于: SELECT COUNT(*) FROM {table} <br>
* 如 getNumberResultAsync(User.class, FilterFunc.COUNT, null) 等价于: SELECT COUNT(*) FROM {table} <br>
*
* @param entityClass Entity类
* @param func 聚合函数
@@ -688,7 +728,7 @@ public interface DataSource {
/**
* 获取符合过滤条件记录的聚合结果, 无结果返回null <br>
* 等价SQL: SELECT FUNC{column} FROM {table} WHERE {filter bean} <br>
* 如 getNumberResultAsync(Record.class, FilterFunc.COUNT, null, (FilterBean)null) 等价于: SELECT COUNT(*) FROM {table} <br>
* 如 getNumberResultAsync(User.class, FilterFunc.COUNT, null, (FilterBean)null) 等价于: SELECT COUNT(*) FROM {table} <br>
*
* @param entityClass Entity类
* @param func 聚合函数
@@ -702,7 +742,7 @@ public interface DataSource {
/**
* 获取符合过滤条件记录的聚合结果, 无结果返回null <br>
* 等价SQL: SELECT FUNC{column} FROM {table} WHERE {filter bean} <br>
* 如 getNumberResultAsync(Record.class, FilterFunc.COUNT, null, (FilterBean)null) 等价于: SELECT COUNT(*) FROM {table} <br>
* 如 getNumberResultAsync(User.class, FilterFunc.COUNT, null, (FilterBean)null) 等价于: SELECT COUNT(*) FROM {table} <br>
*
* @param entityClass Entity类
* @param func 聚合函数
@@ -716,7 +756,7 @@ public interface DataSource {
/**
* 获取符合过滤条件记录的聚合结果, 无结果返回null <br>
* 等价SQL: SELECT FUNC{column} FROM {table} WHERE {filter node} <br>
* 如 getNumberResultAsync(Record.class, FilterFunc.MAX, "createtime", (FilterNode)null) 等价于: SELECT MAX(createtime) FROM {table} <br>
* 如 getNumberResultAsync(User.class, FilterFunc.MAX, "createtime", (FilterNode)null) 等价于: SELECT MAX(createtime) FROM {table} <br>
*
* @param entityClass Entity类
* @param func 聚合函数
@@ -730,7 +770,7 @@ public interface DataSource {
/**
* 获取符合过滤条件记录的聚合结果, 无结果返回null <br>
* 等价SQL: SELECT FUNC{column} FROM {table} WHERE {filter node} <br>
* 如 getNumberResultAsync(Record.class, FilterFunc.MAX, "createtime", (FilterNode)null) 等价于: SELECT MAX(createtime) FROM {table} <br>
* 如 getNumberResultAsync(User.class, FilterFunc.MAX, "createtime", (FilterNode)null) 等价于: SELECT MAX(createtime) FROM {table} <br>
*
* @param entityClass Entity类
* @param func 聚合函数
@@ -744,7 +784,7 @@ public interface DataSource {
/**
* 获取符合过滤条件记录的聚合结果, 无结果返回默认值 <br>
* 等价SQL: SELECT FUNC{column} FROM {table} <br>
* 如 getNumberResultAsync(Record.class, FilterFunc.MAX, "createtime") 等价于: SELECT MAX(createtime) FROM {table} <br>
* 如 getNumberResultAsync(User.class, FilterFunc.MAX, "createtime") 等价于: SELECT MAX(createtime) FROM {table} <br>
*
* @param entityClass Entity类
* @param func 聚合函数
@@ -758,7 +798,7 @@ public interface DataSource {
/**
* 获取符合过滤条件记录的聚合结果, 无结果返回默认值 <br>
* 等价SQL: SELECT FUNC{column} FROM {table} <br>
* 如 getNumberResultAsync(Record.class, FilterFunc.MAX, "createtime") 等价于: SELECT MAX(createtime) FROM {table} <br>
* 如 getNumberResultAsync(User.class, FilterFunc.MAX, "createtime") 等价于: SELECT MAX(createtime) FROM {table} <br>
*
* @param entityClass Entity类
* @param func 聚合函数
@@ -772,7 +812,7 @@ public interface DataSource {
/**
* 获取符合过滤条件记录的聚合结果, 无结果返回默认值 <br>
* 等价SQL: SELECT FUNC{column} FROM {table} WHERE {filter bean} <br>
* 如 getNumberResultAsync(Record.class, FilterFunc.MAX, "createtime", (FilterNode)null) 等价于: SELECT MAX(createtime) FROM {table} <br>
* 如 getNumberResultAsync(User.class, FilterFunc.MAX, "createtime", (FilterNode)null) 等价于: SELECT MAX(createtime) FROM {table} <br>
*
* @param entityClass Entity类
* @param func 聚合函数
@@ -787,7 +827,7 @@ public interface DataSource {
/**
* 获取符合过滤条件记录的聚合结果, 无结果返回默认值 <br>
* 等价SQL: SELECT FUNC{column} FROM {table} WHERE {filter bean} <br>
* 如 getNumberResultAsync(Record.class, FilterFunc.MAX, "createtime", (FilterNode)null) 等价于: SELECT MAX(createtime) FROM {table} <br>
* 如 getNumberResultAsync(User.class, FilterFunc.MAX, "createtime", (FilterNode)null) 等价于: SELECT MAX(createtime) FROM {table} <br>
*
* @param entityClass Entity类
* @param func 聚合函数
@@ -802,7 +842,7 @@ public interface DataSource {
/**
* 获取符合过滤条件记录的聚合结果, 无结果返回默认值 <br>
* 等价SQL: SELECT FUNC{column} FROM {table} WHERE {filter node} <br>
* 如 getNumberResultAsync(Record.class, FilterFunc.MAX, "createtime", (FilterNode)null) 等价于: SELECT MAX(createtime) FROM {table} <br>
* 如 getNumberResultAsync(User.class, FilterFunc.MAX, "createtime", (FilterNode)null) 等价于: SELECT MAX(createtime) FROM {table} <br>
*
* @param entityClass Entity类
* @param func 聚合函数
@@ -817,7 +857,7 @@ public interface DataSource {
/**
* 获取符合过滤条件记录的聚合结果, 无结果返回默认值 <br>
* 等价SQL: SELECT FUNC{column} FROM {table} WHERE {filter node} <br>
* 如 getNumberResultAsync(Record.class, FilterFunc.MAX, "createtime", (FilterNode)null) 等价于: SELECT MAX(createtime) FROM {table} <br>
* 如 getNumberResultAsync(User.class, FilterFunc.MAX, "createtime", (FilterNode)null) 等价于: SELECT MAX(createtime) FROM {table} <br>
*
* @param entityClass Entity类
* @param func 聚合函数
@@ -832,7 +872,7 @@ public interface DataSource {
/**
* 获取符合过滤条件记录的聚合结果Map <br>
* 等价SQL: SELECT FUNC1{column1}, FUNC2{column2}, &#183;&#183;&#183; FROM {table} <br>
* 如 getNumberMapAsync(Record.class, new FilterFuncColumn(FilterFunc.MAX, "createtime")) 等价于: SELECT MAX(createtime) FROM {table} <br>
* 如 getNumberMapAsync(User.class, new FilterFuncColumn(FilterFunc.MAX, "createtime")) 等价于: SELECT MAX(createtime) FROM {table} <br>
*
* @param <N> Number
* @param entityClass Entity类
@@ -845,7 +885,7 @@ public interface DataSource {
/**
* 获取符合过滤条件记录的聚合结果Map <br>
* 等价SQL: SELECT FUNC1{column1}, FUNC2{column2}, &#183;&#183;&#183; FROM {table} <br>
* 如 getNumberMapAsync(Record.class, new FilterFuncColumn(FilterFunc.MAX, "createtime")) 等价于: SELECT MAX(createtime) FROM {table} <br>
* 如 getNumberMapAsync(User.class, new FilterFuncColumn(FilterFunc.MAX, "createtime")) 等价于: SELECT MAX(createtime) FROM {table} <br>
*
* @param <N> Number
* @param entityClass Entity类
@@ -858,7 +898,7 @@ public interface DataSource {
/**
* 获取符合过滤条件记录的聚合结果Map <br>
* 等价SQL: SELECT FUNC1{column1}, FUNC2{column2}, &#183;&#183;&#183; FROM {table} WHERE {filter bean} <br>
* 如 getNumberMapAsync(Record.class, (FilterBean)null, new FilterFuncColumn(FilterFunc.MAX, "createtime")) 等价于: SELECT MAX(createtime) FROM {table} <br>
* 如 getNumberMapAsync(User.class, (FilterBean)null, new FilterFuncColumn(FilterFunc.MAX, "createtime")) 等价于: SELECT MAX(createtime) FROM {table} <br>
*
* @param <N> Number
* @param entityClass Entity类
@@ -872,7 +912,7 @@ public interface DataSource {
/**
* 获取符合过滤条件记录的聚合结果Map <br>
* 等价SQL: SELECT FUNC1{column1}, FUNC2{column2}, &#183;&#183;&#183; FROM {table} WHERE {filter bean} <br>
* 如 getNumberMapAsync(Record.class, (FilterBean)null, new FilterFuncColumn(FilterFunc.MAX, "createtime")) 等价于: SELECT MAX(createtime) FROM {table} <br>
* 如 getNumberMapAsync(User.class, (FilterBean)null, new FilterFuncColumn(FilterFunc.MAX, "createtime")) 等价于: SELECT MAX(createtime) FROM {table} <br>
*
* @param <N> Number
* @param entityClass Entity类
@@ -886,7 +926,7 @@ public interface DataSource {
/**
* 获取符合过滤条件记录的聚合结果Map <br>
* 等价SQL: SELECT FUNC1{column1}, FUNC2{column2}, &#183;&#183;&#183; FROM {table} WHERE {filter node} <br>
* 如 getNumberMapAsync(Record.class, (FilterNode)null, new FilterFuncColumn(FilterFunc.MAX, "createtime")) 等价于: SELECT MAX(createtime) FROM {table} <br>
* 如 getNumberMapAsync(User.class, (FilterNode)null, new FilterFuncColumn(FilterFunc.MAX, "createtime")) 等价于: SELECT MAX(createtime) FROM {table} <br>
*
* @param <N> Number
* @param entityClass Entity类
@@ -900,7 +940,7 @@ public interface DataSource {
/**
* 获取符合过滤条件记录的聚合结果Map <br>
* 等价SQL: SELECT FUNC1{column1}, FUNC2{column2}, &#183;&#183;&#183; FROM {table} WHERE {filter node} <br>
* 如 getNumberMapAsync(Record.class, (FilterNode)null, new FilterFuncColumn(FilterFunc.MAX, "createtime")) 等价于: SELECT MAX(createtime) FROM {table} <br>
* 如 getNumberMapAsync(User.class, (FilterNode)null, new FilterFuncColumn(FilterFunc.MAX, "createtime")) 等价于: SELECT MAX(createtime) FROM {table} <br>
*
* @param <N> Number
* @param entityClass Entity类
@@ -914,7 +954,7 @@ public interface DataSource {
/**
* 查询符合过滤条件记录的GROUP BY聚合结果Map <br>
* 等价SQL: SELECT keyColumn, FUNC{funcColumn} FROM {table} GROUP BY {keyColumn} <br>
* 如 queryColumnMapAsync(Record.class, "name", FilterFunc.MAX, "createtime") 等价于: SELECT name, MAX(createtime) FROM record GROUP BY name<br>
* 如 queryColumnMapAsync(User.class, "name", FilterFunc.MAX, "createtime") 等价于: SELECT name, MAX(createtime) FROM user GROUP BY name<br>
*
* @param <T> Entity泛型
* @param <K> Key字段的数据类型
@@ -931,7 +971,7 @@ public interface DataSource {
/**
* 查询符合过滤条件记录的GROUP BY聚合结果Map <br>
* 等价SQL: SELECT keyColumn, FUNC{funcColumn} FROM {table} GROUP BY {keyColumn} <br>
* 如 queryColumnMapAsync(Record.class, "name", FilterFunc.MAX, "createtime") 等价于: SELECT name, MAX(createtime) FROM record GROUP BY name<br>
* 如 queryColumnMapAsync(User.class, "name", FilterFunc.MAX, "createtime") 等价于: SELECT name, MAX(createtime) FROM user GROUP BY name<br>
*
* @param <T> Entity泛型
* @param <K> Key字段的数据类型
@@ -948,7 +988,7 @@ public interface DataSource {
/**
* 查询符合过滤条件记录的GROUP BY聚合结果Map <br>
* 等价SQL: SELECT keyColumn, FUNC{funcColumn} FROM {table} WHERE {filter bean} GROUP BY {keyColumn} <br>
* 如 queryColumnMapAsync(Record.class, "name", FilterFunc.MAX, "createtime", (FilterBean)null) 等价于: SELECT name, MAX(createtime) FROM record GROUP BY name<br>
* 如 queryColumnMapAsync(User.class, "name", FilterFunc.MAX, "createtime", (FilterBean)null) 等价于: SELECT name, MAX(createtime) FROM user GROUP BY name<br>
*
* @param <T> Entity泛型
* @param <K> Key字段的数据类型
@@ -966,7 +1006,7 @@ public interface DataSource {
/**
* 查询符合过滤条件记录的GROUP BY聚合结果Map <br>
* 等价SQL: SELECT keyColumn, FUNC{funcColumn} FROM {table} WHERE {filter bean} GROUP BY {keyColumn} <br>
* 如 queryColumnMapAsync(Record.class, "name", FilterFunc.MAX, "createtime", (FilterBean)null) 等价于: SELECT name, MAX(createtime) FROM record GROUP BY name<br>
* 如 queryColumnMapAsync(User.class, "name", FilterFunc.MAX, "createtime", (FilterBean)null) 等价于: SELECT name, MAX(createtime) FROM user GROUP BY name<br>
*
* @param <T> Entity泛型
* @param <K> Key字段的数据类型
@@ -984,7 +1024,7 @@ public interface DataSource {
/**
* 查询符合过滤条件记录的GROUP BY聚合结果Map <br>
* 等价SQL: SELECT keyColumn, FUNC{funcColumn} FROM {table} WHERE {filter node} GROUP BY {keyColumn} <br>
* 如 queryColumnMapAsync(Record.class, "name", FilterFunc.MAX, "createtime", (FilterNode)null) 等价于: SELECT name, MAX(createtime) FROM record GROUP BY name<br>
* 如 queryColumnMapAsync(User.class, "name", FilterFunc.MAX, "createtime", (FilterNode)null) 等价于: SELECT name, MAX(createtime) FROM user GROUP BY name<br>
*
* @param <T> Entity泛型
* @param <K> Key字段的数据类型
@@ -1002,7 +1042,7 @@ public interface DataSource {
/**
* 查询符合过滤条件记录的GROUP BY聚合结果Map <br>
* 等价SQL: SELECT keyColumn, FUNC{funcColumn} FROM {table} WHERE {filter node} GROUP BY {keyColumn} <br>
* 如 queryColumnMapAsync(Record.class, "name", FilterFunc.MAX, "createtime", (FilterNode)null) 等价于: SELECT name, MAX(createtime) FROM record GROUP BY name<br>
* 如 queryColumnMapAsync(User.class, "name", FilterFunc.MAX, "createtime", (FilterNode)null) 等价于: SELECT name, MAX(createtime) FROM user GROUP BY name<br>
*
* @param <T> Entity泛型
* @param <K> Key字段的数据类型
@@ -1017,6 +1057,218 @@ public interface DataSource {
*/
public <T, K extends Serializable, N extends Number> CompletableFuture<Map<K, N>> queryColumnMapAsync(final Class<T> entityClass, final String keyColumn, final FilterFunc func, final String funcColumn, final FilterNode node);
/**
* 查询符合过滤条件记录的GROUP BY聚合结果Map <br>
* 等价SQL: SELECT col1, FUNC{funcColumn1}, FUNC{funcColumn2} FROM {table} WHERE GROUP BY {col1} <br>
* 如 queryColumnMapAsync(OrderRecord.class, Utility.ofArray(ColumnNodeValue.div(ColumnFuncNode.sum("money"), 100), ColumnFuncNode.avg(ColumnNodeValue.dec("money", 20)))), "targetid")
* 等价于: SELECT targetid, SUM(money) / 100, AVG(money - 20) FROM orderrecord GROUP BY targetid<br>
*
* @param <T> Entity泛型
* @param <K> Key字段的数据类型
* @param <N> Number
* @param entityClass Entity类
* @param funcNodes ColumnNode[]
* @param groupByColumn GROUP BY字段
*
* @return 聚合结果Map CompletableFuture
*/
public <T, K extends Serializable, N extends Number> Map<K, N[]> queryColumnMap(final Class<T> entityClass, final ColumnNode[] funcNodes, final String groupByColumn);
/**
* 查询符合过滤条件记录的GROUP BY聚合结果Map <br>
* 等价SQL: SELECT col1, FUNC{funcColumn1}, FUNC{funcColumn2} FROM {table} GROUP BY {col1} <br>
* 如 queryColumnMapAsync(OrderRecord.class, Utility.ofArray(ColumnNodeValue.div(ColumnFuncNode.sum("money"), 100), ColumnFuncNode.avg(ColumnNodeValue.dec("money", 20)))), "targetid")
* 等价于: SELECT targetid, SUM(money) / 100, AVG(money - 20) FROM orderrecord GROUP BY targetid<br>
*
* @param <T> Entity泛型
* @param <K> Key字段的数据类型
* @param <N> Number
* @param entityClass Entity类
* @param funcNodes ColumnNode[]
* @param groupByColumn GROUP BY字段
*
* @return 聚合结果Map CompletableFuture
*/
public <T, K extends Serializable, N extends Number> CompletableFuture<Map<K, N[]>> queryColumnMapAsync(final Class<T> entityClass, final ColumnNode[] funcNodes, final String groupByColumn);
/**
* 查询符合过滤条件记录的GROUP BY聚合结果Map <br>
* 等价SQL: SELECT col1, FUNC{funcColumn1}, FUNC{funcColumn2} FROM {table} WHERE {filter bean} GROUP BY {col1} <br>
* 如 queryColumnMapAsync(OrderRecord.class, Utility.ofArray(ColumnNodeValue.div(ColumnFuncNode.sum("money"), 100), ColumnFuncNode.avg(ColumnNodeValue.dec("money", 20)))), "targetid", (FilterBean)null)
* 等价于: SELECT targetid, SUM(money) / 100, AVG(money - 20) FROM orderrecord GROUP BY targetid<br>
*
* @param <T> Entity泛型
* @param <K> Key字段的数据类型
* @param <N> Number
* @param entityClass Entity类
* @param funcNodes ColumnNode[]
* @param groupByColumn GROUP BY字段
* @param bean 过滤条件
*
* @return 聚合结果Map CompletableFuture
*/
public <T, K extends Serializable, N extends Number> Map<K, N[]> queryColumnMap(final Class<T> entityClass, final ColumnNode[] funcNodes, final String groupByColumn, final FilterBean bean);
/**
* 查询符合过滤条件记录的GROUP BY聚合结果Map <br>
* 等价SQL: SELECT col1, FUNC{funcColumn1}, FUNC{funcColumn2} FROM {table} WHERE {filter bean} GROUP BY {col1} <br>
* 如 queryColumnMapAsync(OrderRecord.class, Utility.ofArray(ColumnNodeValue.div(ColumnFuncNode.sum("money"), 100), ColumnFuncNode.avg(ColumnNodeValue.dec("money", 20)))), "targetid", (FilterBean)null)
* 等价于: SELECT targetid, SUM(money) / 100, AVG(money - 20) FROM orderrecord GROUP BY targetid<br>
*
* @param <T> Entity泛型
* @param <K> Key字段的数据类型
* @param <N> Number
* @param entityClass Entity类
* @param funcNodes ColumnNode[]
* @param groupByColumn GROUP BY字段
* @param bean 过滤条件
*
* @return 聚合结果Map CompletableFuture
*/
public <T, K extends Serializable, N extends Number> CompletableFuture<Map<K, N[]>> queryColumnMapAsync(final Class<T> entityClass, final ColumnNode[] funcNodes, final String groupByColumn, final FilterBean bean);
/**
* 查询符合过滤条件记录的GROUP BY聚合结果Map <br>
* 等价SQL: SELECT col1, FUNC{funcColumn1}, FUNC{funcColumn2} FROM {table} WHERE {filter node} GROUP BY {col1} <br>
* 如 queryColumnMapAsync(OrderRecord.class, Utility.ofArray(ColumnNodeValue.div(ColumnFuncNode.sum("money"), 100), ColumnFuncNode.avg(ColumnNodeValue.dec("money", 20)))), "targetid", (FilterNode)null)
* 等价于: SELECT targetid, SUM(money) / 100, AVG(money - 20) FROM orderrecord GROUP BY targetid<br>
*
* @param <T> Entity泛型
* @param <K> Key字段的数据类型
* @param <N> Number
* @param entityClass Entity类
* @param funcNodes ColumnNode[]
* @param groupByColumn GROUP BY字段
* @param node 过滤条件
*
* @return 聚合结果Map CompletableFuture
*/
public <T, K extends Serializable, N extends Number> Map<K, N[]> queryColumnMap(final Class<T> entityClass, final ColumnNode[] funcNodes, final String groupByColumn, final FilterNode node);
/**
* 查询符合过滤条件记录的GROUP BY聚合结果Map <br>
* 等价SQL: SELECT col1, FUNC{funcColumn1}, FUNC{funcColumn2} FROM {table} WHERE {filter node} GROUP BY {col1} <br>
* 如 queryColumnMapAsync(OrderRecord.class, Utility.ofArray(ColumnNodeValue.div(ColumnFuncNode.sum("money"), 100), ColumnFuncNode.avg(ColumnNodeValue.dec("money", 20)))), "targetid", (FilterNode)null)
* 等价于: SELECT targetid, SUM(money) / 100, AVG(money - 20) FROM orderrecord GROUP BY targetid<br>
*
* @param <T> Entity泛型
* @param <K> Key字段的数据类型
* @param <N> Number
* @param entityClass Entity类
* @param funcNodes ColumnNode[]
* @param groupByColumn GROUP BY字段
* @param node 过滤条件
*
* @return 聚合结果Map CompletableFuture
*/
public <T, K extends Serializable, N extends Number> CompletableFuture<Map<K, N[]>> queryColumnMapAsync(final Class<T> entityClass, final ColumnNode[] funcNodes, final String groupByColumn, final FilterNode node);
/**
* 查询符合过滤条件记录的GROUP BY聚合结果Map <br>
* 等价SQL: SELECT col1, col2, FUNC{funcColumn1}, FUNC{funcColumn2} FROM {table} GROUP BY {col1}, {col2} <br>
* 如 queryColumnMapAsync(OrderRecord.class, Utility.ofArray(ColumnNodeValue.div(ColumnFuncNode.sum("money"), 100), ColumnFuncNode.avg(ColumnNodeValue.dec("money", 20)))), Utility.ofArray("fromid", "targetid"))
* 等价于: SELECT fromid, targetid, SUM(money) / 100, AVG(money - 20) FROM orderrecord GROUP BY fromid, targetid<br>
*
* @param <T> Entity泛型
* @param <K> Key字段的数据类型
* @param <N> Number
* @param entityClass Entity类
* @param funcNodes ColumnNode[]
* @param groupByColumns GROUP BY字段
*
* @return 聚合结果Map CompletableFuture
*/
public <T, K extends Serializable, N extends Number> Map<K[], N[]> queryColumnMap(final Class<T> entityClass, final ColumnNode[] funcNodes, final String[] groupByColumns);
/**
* 查询符合过滤条件记录的GROUP BY聚合结果Map <br>
* 等价SQL: SELECT col1, col2, FUNC{funcColumn1}, FUNC{funcColumn2} FROM {table} GROUP BY {col1}, {col2} <br>
* 如 queryColumnMapAsync(OrderRecord.class, Utility.ofArray(ColumnNodeValue.div(ColumnFuncNode.sum("money"), 100), ColumnFuncNode.avg(ColumnNodeValue.dec("money", 20)))), Utility.ofArray("fromid", "targetid"))
* 等价于: SELECT fromid, targetid, SUM(money) / 100, AVG(money - 20) FROM orderrecord GROUP BY fromid, targetid<br>
*
* @param <T> Entity泛型
* @param <K> Key字段的数据类型
* @param <N> Number
* @param entityClass Entity类
* @param funcNodes ColumnNode[]
* @param groupByColumns GROUP BY字段
*
* @return 聚合结果Map CompletableFuture
*/
public <T, K extends Serializable, N extends Number> CompletableFuture<Map<K[], N[]>> queryColumnMapAsync(final Class<T> entityClass, final ColumnNode[] funcNodes, final String[] groupByColumns);
/**
* 查询符合过滤条件记录的GROUP BY聚合结果Map <br>
* 等价SQL: SELECT col1, col2, FUNC{funcColumn1}, FUNC{funcColumn2} FROM {table} WHERE {filter bean} GROUP BY {col1}, {col2} <br>
* 如 queryColumnMapAsync(OrderRecord.class, Utility.ofArray(ColumnNodeValue.div(ColumnFuncNode.sum("money"), 100), ColumnFuncNode.avg(ColumnNodeValue.dec("money", 20)))), Utility.ofArray("fromid", "targetid"), (FilterBean)null)
* 等价于: SELECT fromid, targetid, SUM(money) / 100, AVG(money - 20) FROM orderrecord GROUP BY fromid, targetid<br>
*
* @param <T> Entity泛型
* @param <K> Key字段的数据类型
* @param <N> Number
* @param entityClass Entity类
* @param funcNodes ColumnNode[]
* @param groupByColumns GROUP BY字段
* @param bean 过滤条件
*
* @return 聚合结果Map CompletableFuture
*/
public <T, K extends Serializable, N extends Number> Map<K[], N[]> queryColumnMap(final Class<T> entityClass, final ColumnNode[] funcNodes, final String[] groupByColumns, final FilterBean bean);
/**
* 查询符合过滤条件记录的GROUP BY聚合结果Map <br>
* 等价SQL: SELECT col1, col2, FUNC{funcColumn1}, FUNC{funcColumn2} FROM {table} WHERE {filter bean} GROUP BY {col1}, {col2} <br>
* 如 queryColumnMapAsync(OrderRecord.class, Utility.ofArray(ColumnNodeValue.div(ColumnFuncNode.sum("money"), 100), ColumnFuncNode.avg(ColumnNodeValue.dec("money", 20)))), Utility.ofArray("fromid", "targetid"), (FilterBean)null)
* 等价于: SELECT fromid, targetid, SUM(money) / 100, AVG(money - 20) FROM orderrecord GROUP BY fromid, targetid<br>
*
* @param <T> Entity泛型
* @param <K> Key字段的数据类型
* @param <N> Number
* @param entityClass Entity类
* @param funcNodes ColumnNode[]
* @param groupByColumns GROUP BY字段
* @param bean 过滤条件
*
* @return 聚合结果Map CompletableFuture
*/
public <T, K extends Serializable, N extends Number> CompletableFuture<Map<K[], N[]>> queryColumnMapAsync(final Class<T> entityClass, final ColumnNode[] funcNodes, final String[] groupByColumns, final FilterBean bean);
/**
* 查询符合过滤条件记录的GROUP BY聚合结果Map <br>
* 等价SQL: SELECT col1, col2, FUNC{funcColumn1}, FUNC{funcColumn2} FROM {table} WHERE {filter node} GROUP BY {col1}, {col2} <br>
* 如 queryColumnMapAsync(OrderRecord.class, Utility.ofArray(ColumnNodeValue.div(ColumnFuncNode.sum("money"), 100), ColumnFuncNode.avg(ColumnNodeValue.dec("money", 20)))), Utility.ofArray("fromid", "targetid"), (FilterNode)null)
* 等价于: SELECT fromid, targetid, SUM(money) / 100, AVG(money - 20) FROM orderrecord GROUP BY fromid, targetid<br>
*
* @param <T> Entity泛型
* @param <K> Key字段的数据类型
* @param <N> Number
* @param entityClass Entity类
* @param funcNodes ColumnNode[]
* @param groupByColumns GROUP BY字段
* @param node 过滤条件
*
* @return 聚合结果Map
*/
public <T, K extends Serializable, N extends Number> Map<K[], N[]> queryColumnMap(final Class<T> entityClass, final ColumnNode[] funcNodes, final String[] groupByColumns, final FilterNode node);
/**
* 查询符合过滤条件记录的GROUP BY聚合结果Map <br>
* 等价SQL: SELECT col1, col2, FUNC{funcColumn1}, FUNC{funcColumn2} FROM {table} WHERE {filter node} GROUP BY {col1}, {col2} <br>
* 如 queryColumnMapAsync(OrderRecord.class, Utility.ofArray(ColumnNodeValue.div(ColumnFuncNode.sum("money"), 100), ColumnFuncNode.avg(ColumnNodeValue.dec("money", 20)))), Utility.ofArray("fromid", "targetid"), (FilterNode)null)
* 等价于: SELECT fromid, targetid, SUM(money) / 100, AVG(money - 20) FROM orderrecord GROUP BY fromid, targetid<br>
*
* @param <T> Entity泛型
* @param <K> Key字段的数据类型
* @param <N> Number
* @param entityClass Entity类
* @param funcNodes ColumnNode[]
* @param groupByColumns GROUP BY字段
* @param node 过滤条件
*
* @return 聚合结果Map CompletableFuture
*/
public <T, K extends Serializable, N extends Number> CompletableFuture<Map<K[], N[]>> queryColumnMapAsync(final Class<T> entityClass, final ColumnNode[] funcNodes, final String[] groupByColumns, final FilterNode node);
//-----------------------findAsync----------------------------
/**
* 获取指定主键值的单个记录, 返回null表示不存在值 <br>
@@ -1431,7 +1683,7 @@ public interface DataSource {
//-----------------------list set----------------------------
/**
* 查询符合过滤条件记录的某个字段Set集合 <br>
* 等价SQL: SELECT {selectedColumn} FROM {table} WHERE {column} = {key} <br>
* 等价SQL: SELECT DISTINCT {selectedColumn} FROM {table} WHERE {column} = {key} <br>
*
* @param <T> Entity泛型
* @param <V> 字段类型
@@ -1442,11 +1694,11 @@ public interface DataSource {
*
* @return 字段值的集合
*/
public <T, V extends Serializable> HashSet<V> queryColumnSet(final String selectedColumn, final Class<T> clazz, final String column, final Serializable colval);
public <T, V extends Serializable> Set<V> queryColumnSet(final String selectedColumn, final Class<T> clazz, final String column, final Serializable colval);
/**
* 查询符合过滤条件记录的某个字段Set集合 <br>
* 等价SQL: SELECT {selectedColumn} FROM {table} WHERE {column} = {key} <br>
* 等价SQL: SELECT DISTINCT {selectedColumn} FROM {table} WHERE {column} = {key} <br>
*
* @param <T> Entity泛型
* @param <V> 字段类型
@@ -1457,11 +1709,11 @@ public interface DataSource {
*
* @return 字段值的集合CompletableFuture
*/
public <T, V extends Serializable> CompletableFuture<HashSet<V>> queryColumnSetAsync(final String selectedColumn, final Class<T> clazz, final String column, final Serializable colval);
public <T, V extends Serializable> CompletableFuture<Set<V>> queryColumnSetAsync(final String selectedColumn, final Class<T> clazz, final String column, final Serializable colval);
/**
* 查询符合过滤条件记录的某个字段Set集合 <br>
* 等价SQL: SELECT {selectedColumn} FROM {table} WHERE {filter bean} <br>
* 等价SQL: SELECT DISTINCT {selectedColumn} FROM {table} WHERE {filter bean} <br>
*
* @param <T> Entity泛型
* @param <V> 字段类型
@@ -1471,11 +1723,11 @@ public interface DataSource {
*
* @return 字段值的集合
*/
public <T, V extends Serializable> HashSet<V> queryColumnSet(final String selectedColumn, final Class<T> clazz, final FilterBean bean);
public <T, V extends Serializable> Set<V> queryColumnSet(final String selectedColumn, final Class<T> clazz, final FilterBean bean);
/**
* 查询符合过滤条件记录的某个字段Set集合 <br>
* 等价SQL: SELECT {selectedColumn} FROM {table} WHERE {filter bean} <br>
* 等价SQL: SELECT DISTINCT {selectedColumn} FROM {table} WHERE {filter bean} <br>
*
* @param <T> Entity泛型
* @param <V> 字段类型
@@ -1485,11 +1737,11 @@ public interface DataSource {
*
* @return 字段值的集合CompletableFuture
*/
public <T, V extends Serializable> CompletableFuture<HashSet<V>> queryColumnSetAsync(final String selectedColumn, final Class<T> clazz, final FilterBean bean);
public <T, V extends Serializable> CompletableFuture<Set<V>> queryColumnSetAsync(final String selectedColumn, final Class<T> clazz, final FilterBean bean);
/**
* 查询符合过滤条件记录的某个字段Set集合 <br>
* 等价SQL: SELECT {selectedColumn} FROM {table} WHERE {filter node} <br>
* 等价SQL: SELECT DISTINCT {selectedColumn} FROM {table} WHERE {filter node} <br>
*
* @param <T> Entity泛型
* @param <V> 字段类型
@@ -1499,11 +1751,11 @@ public interface DataSource {
*
* @return 字段值的集合
*/
public <T, V extends Serializable> HashSet<V> queryColumnSet(final String selectedColumn, final Class<T> clazz, final FilterNode node);
public <T, V extends Serializable> Set<V> queryColumnSet(final String selectedColumn, final Class<T> clazz, final FilterNode node);
/**
* 查询符合过滤条件记录的某个字段Set集合 <br>
* 等价SQL: SELECT {selectedColumn} FROM {table} WHERE {filter node} <br>
* 等价SQL: SELECT DISTINCT {selectedColumn} FROM {table} WHERE {filter node} <br>
*
* @param <T> Entity泛型
* @param <V> 字段类型
@@ -1513,7 +1765,67 @@ public interface DataSource {
*
* @return 字段值的集合CompletableFuture
*/
public <T, V extends Serializable> CompletableFuture<HashSet<V>> queryColumnSetAsync(final String selectedColumn, final Class<T> clazz, final FilterNode node);
public <T, V extends Serializable> CompletableFuture<Set<V>> queryColumnSetAsync(final String selectedColumn, final Class<T> clazz, final FilterNode node);
/**
* 查询符合过滤条件记录的某个字段Set集合 <br>
* 等价SQL: SELECT DISTINCT {selectedColumn} FROM {table} WHERE {filter bean} ORDER BY {flipper.sort} LIMIT {flipper.limit} <br>
*
* @param <T> Entity泛型
* @param <V> 字段类型
* @param selectedColumn 指定字段
* @param clazz Entity类
* @param flipper 翻页对象
* @param bean 过滤条件
*
* @return 字段值的集合
*/
public <T, V extends Serializable> Set<V> queryColumnSet(final String selectedColumn, final Class<T> clazz, final Flipper flipper, final FilterBean bean);
/**
* 查询符合过滤条件记录的某个字段Set集合 <br>
* 等价SQL: SELECT DISTINCT {selectedColumn} FROM {table} WHERE {filter bean} ORDER BY {flipper.sort} LIMIT {flipper.limit} <br>
*
* @param <T> Entity泛型
* @param <V> 字段类型
* @param selectedColumn 指定字段
* @param clazz Entity类
* @param flipper 翻页对象
* @param bean 过滤条件
*
* @return 字段值的集合CompletableFuture
*/
public <T, V extends Serializable> CompletableFuture<Set<V>> queryColumnSetAsync(final String selectedColumn, final Class<T> clazz, final Flipper flipper, final FilterBean bean);
/**
* 查询符合过滤条件记录的某个字段Set集合 <br>
* 等价SQL: SELECT DISTINCT {selectedColumn} FROM {table} WHERE {filter node} ORDER BY {flipper.sort} LIMIT {flipper.limit} <br>
*
* @param <T> Entity泛型
* @param <V> 字段类型
* @param selectedColumn 指定字段
* @param clazz Entity类
* @param flipper 翻页对象
* @param node 过滤条件
*
* @return 字段值的集合
*/
public <T, V extends Serializable> Set<V> queryColumnSet(final String selectedColumn, final Class<T> clazz, final Flipper flipper, final FilterNode node);
/**
* 查询符合过滤条件记录的某个字段Set集合 <br>
* 等价SQL: SELECT DISTINCT {selectedColumn} FROM {table} WHERE {filter node} ORDER BY {flipper.sort} LIMIT {flipper.limit} <br>
*
* @param <T> Entity泛型
* @param <V> 字段类型
* @param selectedColumn 指定字段
* @param clazz Entity类
* @param flipper 翻页对象
* @param node 过滤条件
*
* @return 字段值的集合CompletableFuture
*/
public <T, V extends Serializable> CompletableFuture<Set<V>> queryColumnSetAsync(final String selectedColumn, final Class<T> clazz, final Flipper flipper, final FilterNode node);
/**
* 查询符合过滤条件记录的某个字段List集合 <br>
@@ -1883,6 +2195,296 @@ public interface DataSource {
*/
public <K extends Serializable, T> CompletableFuture<Map<K, T>> queryMapAsync(final Class<T> clazz, final SelectColumn selects, final FilterNode node);
/**
* 查询符合过滤条件记录的Set集合 <br>
* 等价SQL: SELECT DISTINCT * FROM {table} WHERE {column} = {key} ORDER BY {flipper.sort} LIMIT {flipper.limit} <br>
*
* @param <T> Entity泛型
* @param clazz Entity类
* @param column 过滤字段名
* @param colval 过滤字段值
*
* @return Entity的集合
*/
public <T> Set<T> querySet(final Class<T> clazz, final String column, final Serializable colval);
/**
* 查询符合过滤条件记录的Set集合 <br>
* 等价SQL: SELECT DISTINCT * FROM {table} WHERE {column} = {key} ORDER BY {flipper.sort} LIMIT {flipper.limit} <br>
*
* @param <T> Entity泛型
* @param clazz Entity类
* @param column 过滤字段名
* @param colval 过滤字段值
*
* @return Entity的集合CompletableFuture
*/
public <T> CompletableFuture<Set<T>> querySetAsync(final Class<T> clazz, final String column, final Serializable colval);
/**
* 查询符合过滤条件记录的Set集合 <br>
* 等价SQL: SELECT DISTINCT * FROM {table} WHERE {filter bean} <br>
*
* @param <T> Entity泛型
* @param clazz Entity类
* @param bean 过滤条件
*
* @return Entity的集合
*/
public <T> Set<T> querySet(final Class<T> clazz, final FilterBean bean);
/**
* 查询符合过滤条件记录的Set集合 <br>
* 等价SQL: SELECT DISTINCT * FROM {table} WHERE {filter bean} <br>
*
* @param <T> Entity泛型
* @param clazz Entity类
* @param bean 过滤条件
*
* @return Entity的集合CompletableFuture
*/
public <T> CompletableFuture<Set<T>> querySetAsync(final Class<T> clazz, final FilterBean bean);
/**
* 查询记录的Set集合 <br>
* 等价SQL: SELECT DISTINCT * FROM {table} <br>
*
* @param <T> Entity泛型
* @param clazz Entity类
*
* @return Entity的集合
*/
default <T> Set<T> querySet(final Class<T> clazz) {
return querySet(clazz, (FilterNode) null);
}
/**
* 查询符合过滤条件记录的Set集合 <br>
* 等价SQL: SELECT DISTINCT * FROM {table} WHERE {filter node} <br>
*
* @param <T> Entity泛型
* @param clazz Entity类
* @param node 过滤条件
*
* @return Entity的集合
*/
public <T> Set<T> querySet(final Class<T> clazz, final FilterNode node);
/**
* 查询记录的Set集合 <br>
* 等价SQL: SELECT DISTINCT * FROM {table} <br>
*
* @param <T> Entity泛型
* @param clazz Entity类
*
* @return Entity的集合CompletableFuture
*/
default <T> CompletableFuture<Set<T>> querySetAsync(final Class<T> clazz) {
return querySetAsync(clazz, (FilterNode) null);
}
/**
* 查询符合过滤条件记录的Set集合 <br>
* 等价SQL: SELECT DISTINCT * FROM {table} WHERE {filter node} <br>
*
* @param <T> Entity泛型
* @param clazz Entity类
* @param node 过滤条件
*
* @return Entity的集合CompletableFuture
*/
public <T> CompletableFuture<Set<T>> querySetAsync(final Class<T> clazz, final FilterNode node);
/**
* 查询符合过滤条件记录的Set集合 <br>
* 等价SQL: SELECT DISTINCT {column1},{column2}, &#183;&#183;&#183; FROM {table} WHERE {filter bean} <br>
*
* @param <T> Entity泛型
* @param clazz Entity类
* @param selects 指定字段
* @param bean 过滤条件
*
* @return Entity的集合
*/
public <T> Set<T> querySet(final Class<T> clazz, final SelectColumn selects, final FilterBean bean);
/**
* 查询符合过滤条件记录的Set集合 <br>
* 等价SQL: SELECT DISTINCT {column1},{column2}, &#183;&#183;&#183; FROM {table} WHERE {filter bean} <br>
*
* @param <T> Entity泛型
* @param clazz Entity类
* @param selects 指定字段
* @param bean 过滤条件
*
* @return Entity的集合CompletableFuture
*/
public <T> CompletableFuture<Set<T>> querySetAsync(final Class<T> clazz, final SelectColumn selects, final FilterBean bean);
/**
* 查询符合过滤条件记录的Set集合 <br>
* 等价SQL: SELECT DISTINCT {column1},{column2}, &#183;&#183;&#183; FROM {table} WHERE {filter node} <br>
*
* @param <T> Entity泛型
* @param clazz Entity类
* @param selects 指定字段
* @param node 过滤条件
*
* @return Entity的集合
*/
public <T> Set<T> querySet(final Class<T> clazz, final SelectColumn selects, final FilterNode node);
/**
* 查询符合过滤条件记录的Set集合 <br>
* 等价SQL: SELECT DISTINCT {column1},{column2}, &#183;&#183;&#183; FROM {table} WHERE {filter node} <br>
*
* @param <T> Entity泛型
* @param clazz Entity类
* @param selects 指定字段
* @param node 过滤条件
*
* @return Entity的集合CompletableFuture
*/
public <T> CompletableFuture<Set<T>> querySetAsync(final Class<T> clazz, final SelectColumn selects, final FilterNode node);
/**
* 查询符合过滤条件记录的Set集合 <br>
* 等价SQL: SELECT DISTINCT * FROM {table} WHERE {column} = {key} ORDER BY {flipper.sort} LIMIT {flipper.limit} <br>
*
* @param <T> Entity泛型
* @param clazz Entity类
* @param flipper 翻页对象
* @param column 过滤字段名
* @param colval 过滤字段值
*
* @return Entity的集合
*/
public <T> Set<T> querySet(final Class<T> clazz, final Flipper flipper, final String column, final Serializable colval);
/**
* 查询符合过滤条件记录的Set集合 <br>
* 等价SQL: SELECT DISTINCT * FROM {table} WHERE {column} = {key} ORDER BY {flipper.sort} LIMIT {flipper.limit} <br>
*
* @param <T> Entity泛型
* @param clazz Entity类
* @param flipper 翻页对象
* @param column 过滤字段名
* @param colval 过滤字段值
*
* @return Entity的集合CompletableFuture
*/
public <T> CompletableFuture<Set<T>> querySetAsync(final Class<T> clazz, final Flipper flipper, final String column, final Serializable colval);
/**
* 查询符合过滤条件记录的Set集合 <br>
* 等价SQL: SELECT DISTINCT * FROM {table} WHERE {filter bean} ORDER BY {flipper.sort} LIMIT {flipper.limit} <br>
*
* @param <T> Entity泛型
* @param clazz Entity类
* @param flipper 翻页对象
* @param bean 过滤条件
*
* @return Entity的集合
*/
public <T> Set<T> querySet(final Class<T> clazz, final Flipper flipper, final FilterBean bean);
/**
* 查询符合过滤条件记录的Set集合 <br>
* 等价SQL: SELECT DISTINCT * FROM {table} WHERE {filter bean} ORDER BY {flipper.sort} LIMIT {flipper.limit} <br>
*
* @param <T> Entity泛型
* @param clazz Entity类
* @param flipper 翻页对象
* @param bean 过滤条件
*
* @return Entity的集合CompletableFuture
*/
public <T> CompletableFuture<Set<T>> querySetAsync(final Class<T> clazz, final Flipper flipper, final FilterBean bean);
/**
* 查询符合过滤条件记录的Set集合 <br>
* 等价SQL: SELECT DISTINCT * FROM {table} WHERE {filter node} ORDER BY {flipper.sort} LIMIT {flipper.limit} <br>
*
* @param <T> Entity泛型
* @param clazz Entity类
* @param flipper 翻页对象
* @param node 过滤条件
*
* @return Entity的集合
*
*/
public <T> Set<T> querySet(final Class<T> clazz, final Flipper flipper, final FilterNode node);
/**
* 查询符合过滤条件记录的Set集合 <br>
* 等价SQL: SELECT DISTINCT * FROM {table} WHERE {filter node} ORDER BY {flipper.sort} LIMIT {flipper.limit} <br>
*
* @param <T> Entity泛型
* @param clazz Entity类
* @param flipper 翻页对象
* @param node 过滤条件
*
* @return Entity的集合
*
*/
public <T> CompletableFuture<Set<T>> querySetAsync(final Class<T> clazz, final Flipper flipper, final FilterNode node);
/**
* 查询符合过滤条件记录的Set集合 <br>
* 等价SQL: SELECT DISTINCT {column1},{column2}, &#183;&#183;&#183; FROM {table} WHERE {filter bean} ORDER BY {flipper.sort} LIMIT {flipper.limit} <br>
*
* @param <T> Entity泛型
* @param clazz Entity类
* @param selects 指定字段
* @param flipper 翻页对象
* @param bean 过滤条件
*
* @return Entity的集合
*/
public <T> Set<T> querySet(final Class<T> clazz, final SelectColumn selects, final Flipper flipper, final FilterBean bean);
/**
* 查询符合过滤条件记录的Set集合 <br>
* 等价SQL: SELECT DISTINCT {column1},{column2}, &#183;&#183;&#183; FROM {table} WHERE {filter bean} ORDER BY {flipper.sort} LIMIT {flipper.limit} <br>
*
* @param <T> Entity泛型
* @param clazz Entity类
* @param selects 指定字段
* @param flipper 翻页对象
* @param bean 过滤条件
*
* @return Entity的集合CompletableFuture
*/
public <T> CompletableFuture<Set<T>> querySetAsync(final Class<T> clazz, final SelectColumn selects, final Flipper flipper, final FilterBean bean);
/**
* 查询符合过滤条件记录的Set集合 <br>
* 等价SQL: SELECT DISTINCT {column1},{column2}, &#183;&#183;&#183; FROM {table} WHERE {filter node} ORDER BY {flipper.sort} LIMIT {flipper.limit} <br>
*
* @param <T> Entity泛型
* @param clazz Entity类
* @param selects 指定字段
* @param flipper 翻页对象
* @param node 过滤条件
*
* @return Entity的集合
*/
public <T> Set<T> querySet(final Class<T> clazz, final SelectColumn selects, final Flipper flipper, final FilterNode node);
/**
* 查询符合过滤条件记录的Set集合 <br>
* 等价SQL: SELECT DISTINCT {column1},{column2}, &#183;&#183;&#183; FROM {table} WHERE {filter node} ORDER BY {flipper.sort} LIMIT {flipper.limit} <br>
*
* @param <T> Entity泛型
* @param clazz Entity类
* @param selects 指定字段
* @param flipper 翻页对象
* @param node 过滤条件
*
* @return Entity的集合CompletableFuture
*/
public <T> CompletableFuture<Set<T>> querySetAsync(final Class<T> clazz, final SelectColumn selects, final Flipper flipper, final FilterNode node);
/**
* 查询符合过滤条件记录的List集合 <br>
* 等价SQL: SELECT * FROM {table} WHERE {column} = {key} ORDER BY {flipper.sort} LIMIT {flipper.limit} <br>

View File

@@ -7,7 +7,7 @@ package org.redkale.source;
import java.io.*;
import java.lang.reflect.Constructor;
import java.net.URL;
import java.net.*;
import java.util.*;
import javax.xml.stream.*;
import org.redkale.util.AnyValue;
@@ -139,7 +139,7 @@ public final class DataSources {
public static DataSource createDataSource(final String unitName) throws IOException {
return createDataSource(unitName, System.getProperty(DATASOURCE_CONFPATH) == null
? DataJdbcSource.class.getResource("/META-INF/persistence.xml")
: new File(System.getProperty(DATASOURCE_CONFPATH)).toURI().toURL());
: (System.getProperty(DATASOURCE_CONFPATH, "").contains("://") ? URI.create(System.getProperty(DATASOURCE_CONFPATH)).toURL() : new File(System.getProperty(DATASOURCE_CONFPATH)).toURI().toURL()));
}
public static DataSource createDataSource(final String unitName, URL persistxml) throws IOException {

View File

@@ -59,11 +59,13 @@ public abstract class DataSqlSource<DBChannel> extends AbstractService implement
@Resource(name = "$")
protected DataCacheListener cacheListener;
protected final BiFunction<EntityInfo, Object, CharSequence> sqlFormatter;
protected final BiConsumer futureCompleteConsumer = (r, t) -> {
if (t != null) logger.log(Level.SEVERE, "CompletableFuture complete error", (Throwable) t);
};
protected final BiFunction<DataSource, Class, List> fullloader = (s, t) -> ((Sheet) querySheetCompose(false, false, t, null, null, (FilterNode) null).join()).list(true);
protected final BiFunction<DataSource, Class, List> fullloader = (s, t) -> ((Sheet) querySheetCompose(false, false, false, t, null, null, (FilterNode) null).join()).list(true);
@SuppressWarnings({"OverridableMethodCallInConstructor", "LeakingThisInConstructor"})
public DataSqlSource(String unitName, URL persistxml, Properties readprop, Properties writeprop) {
@@ -89,7 +91,7 @@ public abstract class DataSqlSource<DBChannel> extends AbstractService implement
} else if (s.length() == 2) {
s = "0" + s;
}
t.setName(cname + "-Thread-" + s);
t.setName("Redkale-" + cname + "-Thread-" + s);
t.setUncaughtExceptionHandler(ueh);
return t;
});
@@ -107,6 +109,7 @@ public abstract class DataSqlSource<DBChannel> extends AbstractService implement
Semaphore semaphore = maxconns > 0 ? new Semaphore(maxconns) : null;
this.readPool = createPoolSource(this, "read", queue, semaphore, readprop);
this.writePool = createPoolSource(this, "write", queue, semaphore, writeprop);
this.sqlFormatter = (info, val) -> formatValueToString(info, val);
}
@Local
@@ -134,10 +137,10 @@ public abstract class DataSqlSource<DBChannel> extends AbstractService implement
protected abstract <T> CompletableFuture<Integer> deleteDB(final EntityInfo<T> info, Flipper flipper, final String sql);
//清空表
protected abstract <T> CompletableFuture<Integer> clearTableDB(final EntityInfo<T> info, final String sql);
protected abstract <T> CompletableFuture<Integer> clearTableDB(final EntityInfo<T> info, final String table, final String sql);
//删除表
protected abstract <T> CompletableFuture<Integer> dropTableDB(final EntityInfo<T> info, final String sql);
protected abstract <T> CompletableFuture<Integer> dropTableDB(final EntityInfo<T> info, final String table, final String sql);
//更新纪录
protected abstract <T> CompletableFuture<Integer> updateDB(final EntityInfo<T> info, T... entitys);
@@ -154,6 +157,9 @@ public abstract class DataSqlSource<DBChannel> extends AbstractService implement
//查询Map数据
protected abstract <T, K extends Serializable, N extends Number> CompletableFuture<Map<K, N>> queryColumnMapDB(final EntityInfo<T> info, final String sql, final String keyColumn);
//查询Map数据
protected abstract <T, K extends Serializable, N extends Number> CompletableFuture<Map<K[], N[]>> queryColumnMapDB(final EntityInfo<T> info, final String sql, final ColumnNode[] funcNodes, final String[] groupByColumns);
//查询单条记录
protected abstract <T> CompletableFuture<T> findDB(final EntityInfo<T> info, final String sql, final boolean onlypk, final SelectColumn selects);
@@ -164,7 +170,7 @@ public abstract class DataSqlSource<DBChannel> extends AbstractService implement
protected abstract <T> CompletableFuture<Boolean> existsDB(final EntityInfo<T> info, final String sql, final boolean onlypk);
//查询一页数据
protected abstract <T> CompletableFuture<Sheet<T>> querySheetDB(final EntityInfo<T> info, final boolean readcache, final boolean needtotal, final SelectColumn selects, final Flipper flipper, final FilterNode node);
protected abstract <T> CompletableFuture<Sheet<T>> querySheetDB(final EntityInfo<T> info, final boolean readcache, final boolean needtotal, final boolean distinct, final SelectColumn selects, final Flipper flipper, final FilterNode node);
protected <T> T getEntityValue(EntityInfo<T> info, final SelectColumn sels, final ResultSet set) throws SQLException {
return info.getEntityValue(sels, set);
@@ -206,6 +212,8 @@ public abstract class DataSqlSource<DBChannel> extends AbstractService implement
@Override
public void destroy(AnyValue config) {
if (this.executor != null) this.executor.shutdownNow();
if (readPool != null) readPool.close();
if (writePool != null) writePool.close();
}
@Local
@@ -284,7 +292,7 @@ public abstract class DataSqlSource<DBChannel> extends AbstractService implement
return null;
}
protected <T> String formatValueToString(final EntityInfo<T> info, Object value) {
protected <T> CharSequence formatValueToString(final EntityInfo<T> info, Object value) {
final String dbtype = this.readPool.getDbtype();
if ("mysql".equals(dbtype)) {
if (value == null) return null;
@@ -296,7 +304,7 @@ public abstract class DataSqlSource<DBChannel> extends AbstractService implement
}
return String.valueOf(value);
}
return info.formatToString(value);
return info.formatSQLValue(value, null);
}
//----------------------------- insert -----------------------------
@@ -313,11 +321,6 @@ public abstract class DataSqlSource<DBChannel> extends AbstractService implement
if (entitys.length == 0) return 0;
checkEntity("insert", false, entitys);
final EntityInfo<T> info = loadEntityInfo((Class<T>) entitys[0].getClass());
if (info.autouuid) {
for (T value : entitys) {
info.createPrimaryValue(value);
}
}
if (isOnlyCache(info)) return insertCache(info, entitys);
return insertDB(info, entitys).whenComplete((rs, t) -> {
if (t != null) {
@@ -328,17 +331,24 @@ public abstract class DataSqlSource<DBChannel> extends AbstractService implement
}).join();
}
@Override
public final <T> int insert(final Collection<T> entitys) {
if (entitys == null || entitys.isEmpty()) return 0;
return insert(entitys.toArray());
}
@Override
public final <T> int insert(final Stream<T> entitys) {
if (entitys == null) return 0;
return insert(entitys.toArray());
}
@Override
public <T> CompletableFuture<Integer> insertAsync(@RpcCall(DataCallArrayAttribute.class) T... entitys) {
if (entitys.length == 0) return CompletableFuture.completedFuture(0);
CompletableFuture future = checkEntity("insert", true, entitys);
if (future != null) return future;
final EntityInfo<T> info = loadEntityInfo((Class<T>) entitys[0].getClass());
if (info.autouuid) {
for (T value : entitys) {
info.createPrimaryValue(value);
}
}
if (isOnlyCache(info)) {
return CompletableFuture.supplyAsync(() -> insertCache(info, entitys), getExecutor());
}
@@ -358,6 +368,18 @@ public abstract class DataSqlSource<DBChannel> extends AbstractService implement
});
}
@Override
public final <T> CompletableFuture<Integer> insertAsync(final Collection<T> entitys) {
if (entitys == null || entitys.isEmpty()) return CompletableFuture.completedFuture(0);
return insertAsync(entitys.toArray());
}
@Override
public final <T> CompletableFuture<Integer> insertAsync(final Stream<T> entitys) {
if (entitys == null) return CompletableFuture.completedFuture(0);
return insertAsync(entitys.toArray());
}
protected <T> int insertCache(final EntityInfo<T> info, T... entitys) {
final EntityCache<T> cache = info.getCache();
if (cache == null) return 0;
@@ -473,7 +495,7 @@ public abstract class DataSqlSource<DBChannel> extends AbstractService implement
public <T> int delete(Class<T> clazz, final Flipper flipper, FilterNode node) {
final EntityInfo<T> info = loadEntityInfo(clazz);
if (isOnlyCache(info)) return deleteCache(info, -1, flipper, node);
return DataSqlSource.this.deleteCompose(info, flipper, node).whenComplete((rs, t) -> {
return this.deleteCompose(info, flipper, node).whenComplete((rs, t) -> {
if (t != null) {
futureCompleteConsumer.accept(rs, t);
} else {
@@ -488,14 +510,14 @@ public abstract class DataSqlSource<DBChannel> extends AbstractService implement
if (isOnlyCache(info)) {
return CompletableFuture.supplyAsync(() -> deleteCache(info, -1, flipper, node), getExecutor());
}
if (isAsync()) return DataSqlSource.this.deleteCompose(info, flipper, node).whenComplete((rs, t) -> {
if (isAsync()) return this.deleteCompose(info, flipper, node).whenComplete((rs, t) -> {
if (t != null) {
futureCompleteConsumer.accept(rs, t);
} else {
deleteCache(info, rs, flipper, node);
}
});
return CompletableFuture.supplyAsync(() -> DataSqlSource.this.deleteCompose(info, flipper, node).join(), getExecutor()).whenComplete((rs, t) -> {
return CompletableFuture.supplyAsync(() -> this.deleteCompose(info, flipper, node).join(), getExecutor()).whenComplete((rs, t) -> {
if (t != null) {
futureCompleteConsumer.accept(rs, t);
} else {
@@ -506,13 +528,13 @@ public abstract class DataSqlSource<DBChannel> extends AbstractService implement
protected <T> CompletableFuture<Integer> deleteCompose(final EntityInfo<T> info, final Serializable... pks) {
if (pks.length == 1) {
String sql = "DELETE FROM " + info.getTable(pks[0]) + " WHERE " + info.getPrimarySQLColumn() + " = " + FilterNode.formatToString(info.getSQLValue(info.getPrimarySQLColumn(), pks[0]));
String sql = "DELETE FROM " + info.getTable(pks[0]) + " WHERE " + info.getPrimarySQLColumn() + " = " + info.formatSQLValue(info.getPrimarySQLColumn(), pks[0], sqlFormatter);
return deleteDB(info, null, sql);
}
String sql = "DELETE FROM " + info.getTable(pks[0]) + " WHERE " + info.getPrimarySQLColumn() + " IN (";
for (int i = 0; i < pks.length; i++) {
if (i > 0) sql += ',';
sql += FilterNode.formatToString(info.getSQLValue(info.getPrimarySQLColumn(), pks[i]));
sql += info.formatSQLValue(info.getPrimarySQLColumn(), pks[i], sqlFormatter);
}
sql += ")";
if (info.isLoggable(logger, Level.FINEST, sql)) logger.finest(info.getType().getSimpleName() + " delete sql=" + sql);
@@ -549,7 +571,7 @@ public abstract class DataSqlSource<DBChannel> extends AbstractService implement
public <T> int clearTable(Class<T> clazz, FilterNode node) {
final EntityInfo<T> info = loadEntityInfo(clazz);
if (isOnlyCache(info)) return clearTableCache(info, node);
return DataSqlSource.this.clearTableCompose(info, node).whenComplete((rs, t) -> {
return this.clearTableCompose(info, node).whenComplete((rs, t) -> {
if (t != null) {
futureCompleteConsumer.accept(rs, t);
} else {
@@ -569,14 +591,14 @@ public abstract class DataSqlSource<DBChannel> extends AbstractService implement
if (isOnlyCache(info)) {
return CompletableFuture.supplyAsync(() -> clearTableCache(info, node), getExecutor());
}
if (isAsync()) return DataSqlSource.this.clearTableCompose(info, node).whenComplete((rs, t) -> {
if (isAsync()) return this.clearTableCompose(info, node).whenComplete((rs, t) -> {
if (t != null) {
futureCompleteConsumer.accept(rs, t);
} else {
clearTableCache(info, node);
}
});
return CompletableFuture.supplyAsync(() -> DataSqlSource.this.clearTableCompose(info, node).join(), getExecutor()).whenComplete((rs, t) -> {
return CompletableFuture.supplyAsync(() -> this.clearTableCompose(info, node).join(), getExecutor()).whenComplete((rs, t) -> {
if (t != null) {
futureCompleteConsumer.accept(rs, t);
} else {
@@ -586,9 +608,10 @@ public abstract class DataSqlSource<DBChannel> extends AbstractService implement
}
protected <T> CompletableFuture<Integer> clearTableCompose(final EntityInfo<T> info, final FilterNode node) {
String sql = "TRUNCATE TABLE " + info.getTable(node);
final String table = info.getTable(node);
String sql = "TRUNCATE TABLE " + table;
if (info.isLoggable(logger, Level.FINEST, sql)) logger.finest(info.getType().getSimpleName() + " clearTable sql=" + sql);
return clearTableDB(info, sql);
return clearTableDB(info, table, sql);
}
//----------------------------- dropTableCompose -----------------------------
@@ -601,7 +624,7 @@ public abstract class DataSqlSource<DBChannel> extends AbstractService implement
public <T> int dropTable(Class<T> clazz, FilterNode node) {
final EntityInfo<T> info = loadEntityInfo(clazz);
if (isOnlyCache(info)) return dropTableCache(info, node);
return DataSqlSource.this.dropTableCompose(info, node).whenComplete((rs, t) -> {
return this.dropTableCompose(info, node).whenComplete((rs, t) -> {
if (t != null) {
futureCompleteConsumer.accept(rs, t);
} else {
@@ -621,14 +644,14 @@ public abstract class DataSqlSource<DBChannel> extends AbstractService implement
if (isOnlyCache(info)) {
return CompletableFuture.supplyAsync(() -> dropTableCache(info, node), getExecutor());
}
if (isAsync()) return DataSqlSource.this.dropTableCompose(info, node).whenComplete((rs, t) -> {
if (isAsync()) return this.dropTableCompose(info, node).whenComplete((rs, t) -> {
if (t != null) {
futureCompleteConsumer.accept(rs, t);
} else {
dropTableCache(info, node);
}
});
return CompletableFuture.supplyAsync(() -> DataSqlSource.this.dropTableCompose(info, node).join(), getExecutor()).whenComplete((rs, t) -> {
return CompletableFuture.supplyAsync(() -> this.dropTableCompose(info, node).join(), getExecutor()).whenComplete((rs, t) -> {
if (t != null) {
futureCompleteConsumer.accept(rs, t);
} else {
@@ -638,9 +661,10 @@ public abstract class DataSqlSource<DBChannel> extends AbstractService implement
}
protected <T> CompletableFuture<Integer> dropTableCompose(final EntityInfo<T> info, final FilterNode node) {
String sql = "DROP TABLE " + info.getTable(node);
final String table = info.getTable(node);
String sql = "DROP TABLE " + table;
if (info.isLoggable(logger, Level.FINEST, sql)) logger.finest(info.getType().getSimpleName() + " dropTable sql=" + sql);
return dropTableDB(info, sql);
return dropTableDB(info, table, sql);
}
protected <T> int clearTableCache(final EntityInfo<T> info, FilterNode node) {
@@ -795,11 +819,11 @@ public abstract class DataSqlSource<DBChannel> extends AbstractService implement
protected <T> CompletableFuture<Integer> updateColumnCompose(final EntityInfo<T> info, Serializable pk, String column, final Serializable colval) {
if (colval instanceof byte[]) {
String sql = "UPDATE " + info.getTable(pk) + " SET " + info.getSQLColumn(null, column) + " = " + prepareParamSign(1) + " WHERE " + info.getPrimarySQLColumn() + " = " + FilterNode.formatToString(info.getSQLValue(info.getPrimarySQLColumn(), pk));
String sql = "UPDATE " + info.getTable(pk) + " SET " + info.getSQLColumn(null, column) + " = " + prepareParamSign(1) + " WHERE " + info.getPrimarySQLColumn() + " = " + info.formatSQLValue(info.getPrimarySQLColumn(), pk, sqlFormatter);
return updateDB(info, null, sql, true, colval);
} else {
String sql = "UPDATE " + info.getTable(pk) + " SET " + info.getSQLColumn(null, column) + " = "
+ formatValueToString(info, info.getSQLValue(column, colval)) + " WHERE " + info.getPrimarySQLColumn() + " = " + FilterNode.formatToString(info.getSQLValue(info.getPrimarySQLColumn(), pk));
+ info.formatSQLValue(column, colval, sqlFormatter) + " WHERE " + info.getPrimarySQLColumn() + " = " + info.formatSQLValue(info.getPrimarySQLColumn(), pk, sqlFormatter);
return updateDB(info, null, sql, false);
}
}
@@ -819,7 +843,7 @@ public abstract class DataSqlSource<DBChannel> extends AbstractService implement
public <T> int updateColumn(Class<T> clazz, String column, Serializable colval, FilterNode node) {
final EntityInfo<T> info = loadEntityInfo(clazz);
if (isOnlyCache(info)) return updateCache(info, -1, column, colval, node);
return DataSqlSource.this.updateColumnCompose(info, column, colval, node).whenComplete((rs, t) -> {
return this.updateColumnCompose(info, column, colval, node).whenComplete((rs, t) -> {
if (t != null) {
futureCompleteConsumer.accept(rs, t);
} else {
@@ -834,14 +858,14 @@ public abstract class DataSqlSource<DBChannel> extends AbstractService implement
if (isOnlyCache(info)) {
return CompletableFuture.supplyAsync(() -> updateCache(info, -1, column, colval, node), getExecutor());
}
if (isAsync()) return DataSqlSource.this.updateColumnCompose(info, column, colval, node).whenComplete((rs, t) -> {
if (isAsync()) return this.updateColumnCompose(info, column, colval, node).whenComplete((rs, t) -> {
if (t != null) {
futureCompleteConsumer.accept(rs, t);
} else {
updateCache(info, rs, column, colval, node);
}
});
return CompletableFuture.supplyAsync(() -> DataSqlSource.this.updateColumnCompose(info, column, colval, node).join(), getExecutor()).whenComplete((rs, t) -> {
return CompletableFuture.supplyAsync(() -> this.updateColumnCompose(info, column, colval, node).join(), getExecutor()).whenComplete((rs, t) -> {
if (t != null) {
futureCompleteConsumer.accept(rs, t);
} else {
@@ -871,7 +895,7 @@ public abstract class DataSqlSource<DBChannel> extends AbstractService implement
return updateDB(info, null, sql, true, colval);
} else {
String sql = "UPDATE " + info.getTable(node) + " a " + (join1 == null ? "" : (", " + join1))
+ " SET " + info.getSQLColumn(alias, column) + " = " + formatValueToString(info, colval)
+ " SET " + info.getSQLColumn(alias, column) + " = " + info.formatSQLValue(colval, sqlFormatter)
+ ((where == null || where.length() == 0) ? (join2 == null ? "" : (" WHERE " + join2))
: (" WHERE " + where + (join2 == null ? "" : (" AND " + join2))));
return updateDB(info, null, sql, false);
@@ -893,7 +917,7 @@ public abstract class DataSqlSource<DBChannel> extends AbstractService implement
if (values == null || values.length < 1) return -1;
final EntityInfo<T> info = loadEntityInfo(clazz);
if (isOnlyCache(info)) return updateCache(info, -1, pk, values);
return DataSqlSource.this.updateColumnCompose(info, pk, values).whenComplete((rs, t) -> {
return this.updateColumnCompose(info, pk, values).whenComplete((rs, t) -> {
if (t != null) {
futureCompleteConsumer.accept(rs, t);
} else {
@@ -909,14 +933,14 @@ public abstract class DataSqlSource<DBChannel> extends AbstractService implement
if (isOnlyCache(info)) {
return CompletableFuture.supplyAsync(() -> updateCache(info, -1, pk, values), getExecutor());
}
if (isAsync()) return DataSqlSource.this.updateColumnCompose(info, pk, values).whenComplete((rs, t) -> {
if (isAsync()) return this.updateColumnCompose(info, pk, values).whenComplete((rs, t) -> {
if (t != null) {
futureCompleteConsumer.accept(rs, t);
} else {
updateCache(info, rs, pk, values);
}
});
return CompletableFuture.supplyAsync(() -> DataSqlSource.this.updateColumnCompose(info, pk, values).join(), getExecutor()).whenComplete((rs, t) -> {
return CompletableFuture.supplyAsync(() -> this.updateColumnCompose(info, pk, values).join(), getExecutor()).whenComplete((rs, t) -> {
if (t != null) {
futureCompleteConsumer.accept(rs, t);
} else {
@@ -934,17 +958,17 @@ public abstract class DataSqlSource<DBChannel> extends AbstractService implement
Attribute<T, Serializable> attr = info.getUpdateAttribute(col.getColumn());
if (attr == null) throw new RuntimeException(info.getType() + " cannot found column " + col.getColumn());
if (setsql.length() > 0) setsql.append(", ");
String c = info.getSQLColumn(null, col.getColumn());
String sqlColumn = info.getSQLColumn(null, col.getColumn());
if (col.getValue() instanceof byte[]) {
if (blobs == null) blobs = new ArrayList<>();
blobs.add((byte[]) col.getValue());
setsql.append(c).append(" = ").append(prepareParamSign(++index));
setsql.append(sqlColumn).append(" = ").append(prepareParamSign(++index));
} else {
setsql.append(c).append(" = ").append(info.formatSQLValue(c, attr, col));
setsql.append(sqlColumn).append(" = ").append(info.formatSQLValue(sqlColumn, attr, col, sqlFormatter));
}
}
if (setsql.length() < 1) return CompletableFuture.completedFuture(0);
String sql = "UPDATE " + info.getTable(pk) + " SET " + setsql + " WHERE " + info.getPrimarySQLColumn() + " = " + FilterNode.formatToString(info.getSQLValue(info.getPrimarySQLColumn(), pk));
String sql = "UPDATE " + info.getTable(pk) + " SET " + setsql + " WHERE " + info.getPrimarySQLColumn() + " = " + info.formatSQLValue(info.getPrimarySQLColumn(), pk, sqlFormatter);
if (blobs == null) return updateDB(info, null, sql, false);
return updateDB(info, null, sql, true, blobs.toArray());
}
@@ -974,7 +998,7 @@ public abstract class DataSqlSource<DBChannel> extends AbstractService implement
if (values == null || values.length < 1) return -1;
final EntityInfo<T> info = loadEntityInfo(clazz);
if (isOnlyCache(info)) return updateCache(info, -1, node, flipper, values);
return DataSqlSource.this.updateColumnCompose(info, node, flipper, values).whenComplete((rs, t) -> {
return this.updateColumnCompose(info, node, flipper, values).whenComplete((rs, t) -> {
if (t != null) {
futureCompleteConsumer.accept(rs, t);
} else {
@@ -990,14 +1014,14 @@ public abstract class DataSqlSource<DBChannel> extends AbstractService implement
if (isOnlyCache(info)) {
return CompletableFuture.supplyAsync(() -> updateCache(info, -1, node, flipper, values), getExecutor());
}
if (isAsync()) return DataSqlSource.this.updateColumnCompose(info, node, flipper, values).whenComplete((rs, t) -> {
if (isAsync()) return this.updateColumnCompose(info, node, flipper, values).whenComplete((rs, t) -> {
if (t != null) {
futureCompleteConsumer.accept(rs, t);
} else {
updateCache(info, rs, node, flipper, values);
}
});
return CompletableFuture.supplyAsync(() -> DataSqlSource.this.updateColumnCompose(info, node, flipper, values).join(), getExecutor()).whenComplete((rs, t) -> {
return CompletableFuture.supplyAsync(() -> this.updateColumnCompose(info, node, flipper, values).join(), getExecutor()).whenComplete((rs, t) -> {
if (t != null) {
futureCompleteConsumer.accept(rs, t);
} else {
@@ -1016,19 +1040,19 @@ public abstract class DataSqlSource<DBChannel> extends AbstractService implement
Attribute<T, Serializable> attr = info.getUpdateAttribute(col.getColumn());
if (attr == null) continue;
if (setsql.length() > 0) setsql.append(", ");
String c = info.getSQLColumn(alias, col.getColumn());
String sqlColumn = info.getSQLColumn(alias, col.getColumn());
if (col.getValue() instanceof byte[]) {
if (blobs == null) blobs = new ArrayList<>();
blobs.add((byte[]) col.getValue());
setsql.append(c).append(" = ").append(prepareParamSign(++index));
setsql.append(sqlColumn).append(" = ").append(prepareParamSign(++index));
} else {
setsql.append(c).append(" = ").append(info.formatSQLValue(c, attr, col));
setsql.append(sqlColumn).append(" = ").append(info.formatSQLValue(sqlColumn, attr, col, sqlFormatter));
}
}
if (setsql.length() < 1) return CompletableFuture.completedFuture(0);
Map<Class, String> joinTabalis = node.getJoinTabalis();
CharSequence join = node.createSQLJoin(this, true, joinTabalis, new HashSet<>(), info);
CharSequence where = node.createSQLExpress(info, joinTabalis);
Map<Class, String> joinTabalis = node == null ? null : node.getJoinTabalis();
CharSequence join = node == null ? null : node.createSQLJoin(this, true, joinTabalis, new HashSet<>(), info);
CharSequence where = node == null ? null : node.createSQLExpress(info, joinTabalis);
StringBuilder join1 = null;
StringBuilder join2 = null;
if (join != null) {
@@ -1070,7 +1094,7 @@ public abstract class DataSqlSource<DBChannel> extends AbstractService implement
Class<T> clazz = (Class) entity.getClass();
final EntityInfo<T> info = loadEntityInfo(clazz);
if (isOnlyCache(info)) return updateCache(info, -1, false, entity, null, selects);
return DataSqlSource.this.updateColumnCompose(info, false, entity, null, selects).whenComplete((rs, t) -> {
return this.updateColumnCompose(info, false, entity, null, selects).whenComplete((rs, t) -> {
if (t != null) {
futureCompleteConsumer.accept(rs, t);
} else {
@@ -1087,14 +1111,14 @@ public abstract class DataSqlSource<DBChannel> extends AbstractService implement
if (isOnlyCache(info)) {
return CompletableFuture.supplyAsync(() -> updateCache(info, -1, false, entity, null, selects), getExecutor());
}
if (isAsync()) return DataSqlSource.this.updateColumnCompose(info, false, entity, null, selects).whenComplete((rs, t) -> {
if (isAsync()) return this.updateColumnCompose(info, false, entity, null, selects).whenComplete((rs, t) -> {
if (t != null) {
futureCompleteConsumer.accept(rs, t);
} else {
updateCache(info, rs, false, entity, null, selects);
}
});
return CompletableFuture.supplyAsync(() -> DataSqlSource.this.updateColumnCompose(info, false, entity, null, selects).join(), getExecutor()).whenComplete((rs, t) -> {
return CompletableFuture.supplyAsync(() -> this.updateColumnCompose(info, false, entity, null, selects).join(), getExecutor()).whenComplete((rs, t) -> {
if (t != null) {
futureCompleteConsumer.accept(rs, t);
} else {
@@ -1109,7 +1133,7 @@ public abstract class DataSqlSource<DBChannel> extends AbstractService implement
Class<T> clazz = (Class) entity.getClass();
final EntityInfo<T> info = loadEntityInfo(clazz);
if (isOnlyCache(info)) return updateCache(info, -1, true, entity, node, selects);
return DataSqlSource.this.updateColumnCompose(info, true, entity, node, selects).whenComplete((rs, t) -> {
return this.updateColumnCompose(info, true, entity, node, selects).whenComplete((rs, t) -> {
if (t != null) {
futureCompleteConsumer.accept(rs, t);
} else {
@@ -1126,14 +1150,14 @@ public abstract class DataSqlSource<DBChannel> extends AbstractService implement
if (isOnlyCache(info)) {
return CompletableFuture.supplyAsync(() -> updateCache(info, -1, true, entity, node, selects), getExecutor());
}
if (isAsync()) return DataSqlSource.this.updateColumnCompose(info, true, entity, node, selects).whenComplete((rs, t) -> {
if (isAsync()) return this.updateColumnCompose(info, true, entity, node, selects).whenComplete((rs, t) -> {
if (t != null) {
futureCompleteConsumer.accept(rs, t);
} else {
updateCache(info, rs, true, entity, node, selects);
}
});
return CompletableFuture.supplyAsync(() -> DataSqlSource.this.updateColumnCompose(info, true, entity, node, selects).join(), getExecutor()).whenComplete((rs, t) -> {
return CompletableFuture.supplyAsync(() -> this.updateColumnCompose(info, true, entity, node, selects).join(), getExecutor()).whenComplete((rs, t) -> {
if (t != null) {
futureCompleteConsumer.accept(rs, t);
} else {
@@ -1157,7 +1181,9 @@ public abstract class DataSqlSource<DBChannel> extends AbstractService implement
blobs.add((byte[]) val);
setsql.append(" = ").append(prepareParamSign(++index));
} else {
setsql.append(" = ").append(formatValueToString(info, val));
CharSequence sqlval = info.formatSQLValue(val, sqlFormatter);
if (sqlval == null && info.isNotNullJson(attr)) sqlval = "''";
setsql.append(" = ").append(sqlval);
}
}
if (neednode) {
@@ -1177,8 +1203,8 @@ public abstract class DataSqlSource<DBChannel> extends AbstractService implement
if (blobs == null) return updateDB(info, null, sql, false);
return updateDB(info, null, sql, true, blobs.toArray());
} else {
final Serializable id = info.getSQLValue(info.getPrimary(), entity);
String sql = "UPDATE " + info.getTable(id) + " a SET " + setsql + " WHERE " + info.getPrimarySQLColumn() + " = " + FilterNode.formatToString(id);
final Serializable id = (Serializable) info.getSQLValue(info.getPrimary(), entity);
String sql = "UPDATE " + info.getTable(id) + " a SET " + setsql + " WHERE " + info.getPrimarySQLColumn() + " = " + info.formatSQLValue(id, sqlFormatter);
if (blobs == null) return updateDB(info, null, sql, false);
return updateDB(info, null, sql, true, blobs.toArray());
}
@@ -1284,7 +1310,7 @@ public abstract class DataSqlSource<DBChannel> extends AbstractService implement
String column = info.getPrimary().field();
int c = 0;
for (Serializable id : pks) {
Sheet<T> sheet = querySheetCompose(false, true, clazz, null, FLIPPER_ONE, FilterNode.create(column, id)).join();
Sheet<T> sheet = querySheetCompose(false, true, false, clazz, null, FLIPPER_ONE, FilterNode.create(column, id)).join();
T value = sheet.isEmpty() ? null : sheet.list().get(0);
if (value != null) c += cache.update(value);
}
@@ -1501,17 +1527,130 @@ public abstract class DataSqlSource<DBChannel> extends AbstractService implement
}
protected <T, K extends Serializable, N extends Number> CompletableFuture<Map<K, N>> queryColumnMapCompose(final EntityInfo<T> info, final String keyColumn, final FilterFunc func, final String funcColumn, FilterNode node) {
final String sqlkey = info.getSQLColumn(null, keyColumn);
final String keySqlColumn = info.getSQLColumn(null, keyColumn);
final Map<Class, String> joinTabalis = node == null ? null : node.getJoinTabalis();
final Set<String> haset = new HashSet<>();
final CharSequence join = node == null ? null : node.createSQLJoin(this, false, joinTabalis, haset, info);
final CharSequence where = node == null ? null : node.createSQLExpress(info, joinTabalis);
final String sql = "SELECT a." + sqlkey + ", " + func.getColumn((funcColumn == null || funcColumn.isEmpty() ? "*" : info.getSQLColumn("a", funcColumn)))
+ " FROM " + info.getTable(node) + " a" + (join == null ? "" : join) + ((where == null || where.length() == 0) ? "" : (" WHERE " + where)) + " GROUP BY a." + sqlkey;
final String funcSqlColumn = func == null ? info.getSQLColumn("a", funcColumn) : func.getColumn((funcColumn == null || funcColumn.isEmpty() ? "*" : info.getSQLColumn("a", funcColumn)));
final String sql = "SELECT a." + keySqlColumn + ", " + funcSqlColumn
+ " FROM " + info.getTable(node) + " a" + (join == null ? "" : join) + ((where == null || where.length() == 0) ? "" : (" WHERE " + where)) + " GROUP BY a." + keySqlColumn;
if (info.isLoggable(logger, Level.FINEST, sql)) logger.finest(info.getType().getSimpleName() + " querycolumnmap sql=" + sql);
return queryColumnMapDB(info, sql, keyColumn);
}
@Override
public <T, K extends Serializable, N extends Number> Map<K, N[]> queryColumnMap(final Class<T> entityClass, final ColumnNode[] funcNodes, final String groupByColumn) {
return queryColumnMap(entityClass, funcNodes, groupByColumn, (FilterNode) null);
}
@Override
public <T, K extends Serializable, N extends Number> CompletableFuture<Map<K, N[]>> queryColumnMapAsync(final Class<T> entityClass, final ColumnNode[] funcNodes, final String groupByColumn) {
return queryColumnMapAsync(entityClass, funcNodes, groupByColumn, (FilterNode) null);
}
@Override
public <T, K extends Serializable, N extends Number> Map<K, N[]> queryColumnMap(final Class<T> entityClass, final ColumnNode[] funcNodes, final String groupByColumn, final FilterBean bean) {
return queryColumnMap(entityClass, funcNodes, groupByColumn, FilterNodeBean.createFilterNode(bean));
}
@Override
public <T, K extends Serializable, N extends Number> CompletableFuture<Map<K, N[]>> queryColumnMapAsync(final Class<T> entityClass, final ColumnNode[] funcNodes, final String groupByColumn, final FilterBean bean) {
return queryColumnMapAsync(entityClass, funcNodes, groupByColumn, FilterNodeBean.createFilterNode(bean));
}
@Override
public <T, K extends Serializable, N extends Number> Map<K, N[]> queryColumnMap(final Class<T> entityClass, final ColumnNode[] funcNodes, final String groupByColumn, final FilterNode node) {
Map<K[], N[]> map = queryColumnMap(entityClass, funcNodes, Utility.ofArray(groupByColumn), node);
final Map<K, N[]> rs = new LinkedHashMap<>();
map.forEach((keys, values) -> rs.put(keys[0], values));
return rs;
}
@Override
public <T, K extends Serializable, N extends Number> CompletableFuture<Map<K, N[]>> queryColumnMapAsync(final Class<T> entityClass, final ColumnNode[] funcNodes, final String groupByColumn, final FilterNode node) {
CompletableFuture<Map<K[], N[]>> future = queryColumnMapAsync(entityClass, funcNodes, Utility.ofArray(groupByColumn), node);
return future.thenApply(map -> {
final Map<K, N[]> rs = new LinkedHashMap<>();
map.forEach((keys, values) -> rs.put(keys[0], values));
return rs;
});
}
@Override
public <T, K extends Serializable, N extends Number> Map<K[], N[]> queryColumnMap(final Class<T> entityClass, final ColumnNode[] funcNodes, final String[] groupByColumns) {
return queryColumnMap(entityClass, funcNodes, groupByColumns, (FilterNode) null);
}
@Override
public <T, K extends Serializable, N extends Number> CompletableFuture<Map<K[], N[]>> queryColumnMapAsync(final Class<T> entityClass, final ColumnNode[] funcNodes, final String[] groupByColumns) {
return queryColumnMapAsync(entityClass, funcNodes, groupByColumns, (FilterNode) null);
}
@Override
public <T, K extends Serializable, N extends Number> Map<K[], N[]> queryColumnMap(final Class<T> entityClass, final ColumnNode[] funcNodes, final String[] groupByColumns, final FilterBean bean) {
return queryColumnMap(entityClass, funcNodes, groupByColumns, FilterNodeBean.createFilterNode(bean));
}
@Override
public <T, K extends Serializable, N extends Number> CompletableFuture<Map<K[], N[]>> queryColumnMapAsync(final Class<T> entityClass, final ColumnNode[] funcNodes, final String[] groupByColumns, final FilterBean bean) {
return queryColumnMapAsync(entityClass, funcNodes, groupByColumns, FilterNodeBean.createFilterNode(bean));
}
@Override
public <T, K extends Serializable, N extends Number> Map<K[], N[]> queryColumnMap(final Class<T> entityClass, final ColumnNode[] funcNodes, final String[] groupByColumns, final FilterNode node) {
final EntityInfo info = loadEntityInfo(entityClass);
final EntityCache cache = info.getCache();
if (cache != null && (isOnlyCache(info) || cache.isFullLoaded())) {
if (node == null || node.isCacheUseable(this)) {
return cache.queryColumnMap(funcNodes, groupByColumns, node);
}
}
return (Map) queryColumnMapCompose(info, funcNodes, groupByColumns, node).join();
}
@Override
public <T, K extends Serializable, N extends Number> CompletableFuture<Map<K[], N[]>> queryColumnMapAsync(final Class<T> entityClass, final ColumnNode[] funcNodes, final String[] groupByColumns, final FilterNode node) {
final EntityInfo info = loadEntityInfo(entityClass);
final EntityCache cache = info.getCache();
if (cache != null && (isOnlyCache(info) || cache.isFullLoaded())) {
if (node == null || node.isCacheUseable(this)) {
return CompletableFuture.completedFuture(cache.queryColumnMap(funcNodes, groupByColumns, node));
}
}
if (isAsync()) return queryColumnMapCompose(info, funcNodes, groupByColumns, node);
return CompletableFuture.supplyAsync(() -> (Map) queryColumnMapCompose(info, funcNodes, groupByColumns, node).join(), getExecutor());
}
protected <T, K extends Serializable, N extends Number> CompletableFuture<Map<K[], N[]>> queryColumnMapCompose(final EntityInfo<T> info, final ColumnNode[] funcNodes, final String[] groupByColumns, final FilterNode node) {
final StringBuilder groupBySqlColumns = new StringBuilder();
if (groupByColumns != null && groupByColumns.length > 0) {
for (int i = 0; i < groupByColumns.length; i++) {
if (groupBySqlColumns.length() > 0) groupBySqlColumns.append(", ");
groupBySqlColumns.append(info.getSQLColumn("a", groupByColumns[i]));
}
}
final StringBuilder funcSqlColumns = new StringBuilder();
for (int i = 0; i < funcNodes.length; i++) {
if (funcSqlColumns.length() > 0) funcSqlColumns.append(", ");
if (funcNodes[i] instanceof ColumnFuncNode) {
funcSqlColumns.append(info.formatSQLValue((Attribute) null, "a", (ColumnFuncNode) funcNodes[i], sqlFormatter));
} else {
funcSqlColumns.append(info.formatSQLValue((Attribute) null, "a", (ColumnNodeValue) funcNodes[i], sqlFormatter));
}
}
final Map<Class, String> joinTabalis = node == null ? null : node.getJoinTabalis();
final Set<String> haset = new HashSet<>();
final CharSequence join = node == null ? null : node.createSQLJoin(this, false, joinTabalis, haset, info);
final CharSequence where = node == null ? null : node.createSQLExpress(info, joinTabalis);
String sql = "SELECT ";
if (groupBySqlColumns.length() > 0) sql += groupBySqlColumns + ", ";
sql += funcSqlColumns + " FROM " + info.getTable(node) + " a" + (join == null ? "" : join) + ((where == null || where.length() == 0) ? "" : (" WHERE " + where));
if (groupBySqlColumns.length() > 0) sql += " GROUP BY " + groupBySqlColumns;
if (info.isLoggable(logger, Level.FINEST, sql)) logger.finest(info.getType().getSimpleName() + " querycolumnmap sql=" + sql);
return queryColumnMapDB(info, sql, funcNodes, groupByColumns);
}
//----------------------------- findCompose -----------------------------
/**
* 根据主键获取对象
@@ -1557,7 +1696,7 @@ public abstract class DataSqlSource<DBChannel> extends AbstractService implement
protected <T> CompletableFuture<T> findCompose(final EntityInfo<T> info, final SelectColumn selects, Serializable pk) {
String column = info.getPrimarySQLColumn();
final String sql = "SELECT " + info.getQueryColumns(null, selects) + " FROM " + info.getTable(pk) + " WHERE " + column + " = " + FilterNode.formatToString(info.getSQLValue(column, pk));
final String sql = "SELECT " + info.getQueryColumns(null, selects) + " FROM " + info.getTable(pk) + " WHERE " + column + " = " + info.formatSQLValue(column, pk, sqlFormatter);
if (info.isLoggable(logger, Level.FINEST, sql)) logger.finest(info.getType().getSimpleName() + " find sql=" + sql);
return findDB(info, sql, true, selects);
}
@@ -1607,7 +1746,7 @@ public abstract class DataSqlSource<DBChannel> extends AbstractService implement
final EntityInfo<T> info = loadEntityInfo(clazz);
final EntityCache<T> cache = info.getCache();
if (cache != null && cache.isFullLoaded() && (node == null || node.isCacheUseable(this))) return cache.find(selects, node);
return DataSqlSource.this.findCompose(info, selects, node).join();
return this.findCompose(info, selects, node).join();
}
@Override
@@ -1617,8 +1756,8 @@ public abstract class DataSqlSource<DBChannel> extends AbstractService implement
if (cache != null && cache.isFullLoaded() && (node == null || node.isCacheUseable(this))) {
return CompletableFuture.completedFuture(cache.find(selects, node));
}
if (isAsync()) return DataSqlSource.this.findCompose(info, selects, node);
return CompletableFuture.supplyAsync(() -> DataSqlSource.this.findCompose(info, selects, node).join(), getExecutor());
if (isAsync()) return this.findCompose(info, selects, node);
return CompletableFuture.supplyAsync(() -> this.findCompose(info, selects, node).join(), getExecutor());
}
protected <T> CompletableFuture<T> findCompose(final EntityInfo<T> info, final SelectColumn selects, final FilterNode node) {
@@ -1694,7 +1833,7 @@ public abstract class DataSqlSource<DBChannel> extends AbstractService implement
}
protected <T> CompletableFuture<Serializable> findColumnCompose(final EntityInfo<T> info, String column, final Serializable defValue, final Serializable pk) {
final String sql = "SELECT " + info.getSQLColumn(null, column) + " FROM " + info.getTable(pk) + " WHERE " + info.getPrimarySQLColumn() + " = " + FilterNode.formatToString(info.getSQLValue(info.getPrimarySQLColumn(), pk));
final String sql = "SELECT " + info.getSQLColumn(null, column) + " FROM " + info.getTable(pk) + " WHERE " + info.getPrimarySQLColumn() + " = " + info.formatSQLValue(info.getPrimarySQLColumn(), pk, sqlFormatter);
if (info.isLoggable(logger, Level.FINEST, sql)) logger.finest(info.getType().getSimpleName() + " find sql=" + sql);
return findColumnDB(info, sql, true, column, defValue);
}
@@ -1707,7 +1846,7 @@ public abstract class DataSqlSource<DBChannel> extends AbstractService implement
Serializable val = cache.findColumn(column, defValue, node);
if (cache.isFullLoaded() || val != null) return val;
}
return DataSqlSource.this.findColumnCompose(info, column, defValue, node).join();
return this.findColumnCompose(info, column, defValue, node).join();
}
@Override
@@ -1718,8 +1857,8 @@ public abstract class DataSqlSource<DBChannel> extends AbstractService implement
Serializable val = cache.findColumn(column, defValue, node);
if (cache.isFullLoaded() || val != null) return CompletableFuture.completedFuture(val);
}
if (isAsync()) return DataSqlSource.this.findColumnCompose(info, column, defValue, node);
return CompletableFuture.supplyAsync(() -> DataSqlSource.this.findColumnCompose(info, column, defValue, node).join(), getExecutor());
if (isAsync()) return this.findColumnCompose(info, column, defValue, node);
return CompletableFuture.supplyAsync(() -> this.findColumnCompose(info, column, defValue, node).join(), getExecutor());
}
protected <T> CompletableFuture<Serializable> findColumnCompose(final EntityInfo<T> info, String column, final Serializable defValue, final FilterNode node) {
@@ -1756,7 +1895,7 @@ public abstract class DataSqlSource<DBChannel> extends AbstractService implement
}
protected <T> CompletableFuture<Boolean> existsCompose(final EntityInfo<T> info, Serializable pk) {
final String sql = "SELECT COUNT(*) FROM " + info.getTable(pk) + " WHERE " + info.getPrimarySQLColumn() + " = " + FilterNode.formatToString(info.getSQLValue(info.getPrimarySQLColumn(), pk));
final String sql = "SELECT COUNT(*) FROM " + info.getTable(pk) + " WHERE " + info.getPrimarySQLColumn() + " = " + info.formatSQLValue(info.getPrimarySQLColumn(), pk, sqlFormatter);
if (info.isLoggable(logger, Level.FINEST, sql)) logger.finest(info.getType().getSimpleName() + " exists sql=" + sql);
return existsDB(info, sql, true);
}
@@ -1779,7 +1918,7 @@ public abstract class DataSqlSource<DBChannel> extends AbstractService implement
boolean rs = cache.exists(node);
if (rs || cache.isFullLoaded()) return rs;
}
return DataSqlSource.this.existsCompose(info, node).join();
return this.existsCompose(info, node).join();
}
@Override
@@ -1790,8 +1929,8 @@ public abstract class DataSqlSource<DBChannel> extends AbstractService implement
boolean rs = cache.exists(node);
if (rs || cache.isFullLoaded()) return CompletableFuture.completedFuture(rs);
}
if (isAsync()) return DataSqlSource.this.existsCompose(info, node);
return CompletableFuture.supplyAsync(() -> DataSqlSource.this.existsCompose(info, node).join(), getExecutor());
if (isAsync()) return this.existsCompose(info, node);
return CompletableFuture.supplyAsync(() -> this.existsCompose(info, node).join(), getExecutor());
}
protected <T> CompletableFuture<Boolean> existsCompose(final EntityInfo<T> info, FilterNode node) {
@@ -1805,33 +1944,70 @@ public abstract class DataSqlSource<DBChannel> extends AbstractService implement
//-----------------------list set----------------------------
@Override
public <T, V extends Serializable> HashSet<V> queryColumnSet(final String selectedColumn, Class<T> clazz, String column, Serializable colval) {
return new LinkedHashSet<>(queryColumnList(selectedColumn, clazz, null, FilterNode.create(column, colval)));
public <T, V extends Serializable> Set<V> queryColumnSet(final String selectedColumn, final Class<T> clazz, final String column, final Serializable colval) {
return queryColumnSet(selectedColumn, clazz, null, FilterNode.create(column, colval));
}
@Override
public <T, V extends Serializable> CompletableFuture<HashSet<V>> queryColumnSetAsync(final String selectedColumn, Class<T> clazz, String column, Serializable colval) {
return queryColumnListAsync(selectedColumn, clazz, null, FilterNode.create(column, colval)).thenApply((list) -> new LinkedHashSet(list));
public <T, V extends Serializable> CompletableFuture<Set<V>> queryColumnSetAsync(final String selectedColumn, final Class<T> clazz, final String column, final Serializable colval) {
return queryColumnSetAsync(selectedColumn, clazz, null, FilterNode.create(column, colval));
}
@Override
public <T, V extends Serializable> HashSet<V> queryColumnSet(final String selectedColumn, final Class<T> clazz, final FilterBean bean) {
return new LinkedHashSet<>(queryColumnList(selectedColumn, clazz, null, FilterNodeBean.createFilterNode(bean)));
public <T, V extends Serializable> Set<V> queryColumnSet(final String selectedColumn, final Class<T> clazz, final FilterBean bean) {
return queryColumnSet(selectedColumn, clazz, null, FilterNodeBean.createFilterNode(bean));
}
@Override
public <T, V extends Serializable> CompletableFuture<HashSet<V>> queryColumnSetAsync(final String selectedColumn, final Class<T> clazz, final FilterBean bean) {
return queryColumnListAsync(selectedColumn, clazz, null, FilterNodeBean.createFilterNode(bean)).thenApply((list) -> new LinkedHashSet(list));
public <T, V extends Serializable> CompletableFuture<Set<V>> queryColumnSetAsync(final String selectedColumn, final Class<T> clazz, final FilterBean bean) {
return queryColumnSetAsync(selectedColumn, clazz, null, FilterNodeBean.createFilterNode(bean));
}
@Override
public <T, V extends Serializable> HashSet<V> queryColumnSet(String selectedColumn, Class<T> clazz, FilterNode node) {
return new LinkedHashSet<>(queryColumnList(selectedColumn, clazz, null, node));
public <T, V extends Serializable> Set<V> queryColumnSet(final String selectedColumn, final Class<T> clazz, final FilterNode node) {
return queryColumnSet(selectedColumn, clazz, null, node);
}
@Override
public <T, V extends Serializable> CompletableFuture<HashSet<V>> queryColumnSetAsync(final String selectedColumn, final Class<T> clazz, final FilterNode node) {
return queryColumnListAsync(selectedColumn, clazz, null, node).thenApply((list) -> new LinkedHashSet(list));
public <T, V extends Serializable> CompletableFuture<Set<V>> queryColumnSetAsync(final String selectedColumn, final Class<T> clazz, final FilterNode node) {
return queryColumnSetAsync(selectedColumn, clazz, null, node);
}
@Override
public <T, V extends Serializable> Set<V> queryColumnSet(final String selectedColumn, final Class<T> clazz, final Flipper flipper, final FilterBean bean) {
return queryColumnSet(selectedColumn, clazz, flipper, FilterNodeBean.createFilterNode(bean));
}
@Override
public <T, V extends Serializable> CompletableFuture<Set<V>> queryColumnSetAsync(final String selectedColumn, final Class<T> clazz, final Flipper flipper, final FilterBean bean) {
return queryColumnSetAsync(selectedColumn, clazz, flipper, FilterNodeBean.createFilterNode(bean));
}
@Override
public <T, V extends Serializable> Set<V> queryColumnSet(final String selectedColumn, final Class<T> clazz, final Flipper flipper, final FilterNode node) {
final Set<T> list = querySet(clazz, SelectColumn.includes(selectedColumn), flipper, node);
final Set<V> rs = new LinkedHashSet<>();
if (list.isEmpty()) return rs;
final EntityInfo<T> info = loadEntityInfo(clazz);
final Attribute<T, V> selected = (Attribute<T, V>) info.getAttribute(selectedColumn);
for (T t : list) {
rs.add(selected.get(t));
}
return rs;
}
@Override
public <T, V extends Serializable> CompletableFuture<Set<V>> queryColumnSetAsync(final String selectedColumn, final Class<T> clazz, final Flipper flipper, final FilterNode node) {
return querySetAsync(clazz, SelectColumn.includes(selectedColumn), flipper, node).thenApply((Set<T> list) -> {
final Set<V> rs = new LinkedHashSet<>();
if (list.isEmpty()) return rs;
final EntityInfo<T> info = loadEntityInfo(clazz);
final Attribute<T, V> selected = (Attribute<T, V>) info.getAttribute(selectedColumn);
for (T t : list) {
rs.add(selected.get(t));
}
return rs;
});
}
@Override
@@ -2125,6 +2301,145 @@ public abstract class DataSqlSource<DBChannel> extends AbstractService implement
});
}
/**
* 根据指定字段值查询对象集合
*
* @param <T> Entity类的泛型
* @param clazz Entity类
* @param column 过滤字段名
* @param colval 过滤字段值
*
* @return Entity对象的集合
*/
@Override
public <T> Set<T> querySet(final Class<T> clazz, final String column, final Serializable colval) {
return querySet(clazz, (SelectColumn) null, null, FilterNode.create(column, colval));
}
@Override
public <T> CompletableFuture<Set<T>> querySetAsync(final Class<T> clazz, final String column, final Serializable colval) {
return querySetAsync(clazz, (SelectColumn) null, null, FilterNode.create(column, colval));
}
@Override
public <T> Set<T> querySet(final Class<T> clazz) {
return querySet(clazz, (SelectColumn) null, null, (FilterNode) null);
}
@Override
public <T> CompletableFuture<Set<T>> querySetAsync(final Class<T> clazz) {
return querySetAsync(clazz, (SelectColumn) null, null, (FilterNode) null);
}
/**
* 根据过滤对象FilterBean查询对象集合
*
* @param <T> Entity类的泛型
* @param clazz Entity类
* @param bean 过滤Bean
*
* @return Entity对象集合
*/
@Override
public <T> Set<T> querySet(final Class<T> clazz, final FilterBean bean) {
return querySet(clazz, (SelectColumn) null, null, FilterNodeBean.createFilterNode(bean));
}
@Override
public <T> CompletableFuture<Set<T>> querySetAsync(final Class<T> clazz, final FilterBean bean) {
return querySetAsync(clazz, (SelectColumn) null, null, FilterNodeBean.createFilterNode(bean));
}
@Override
public <T> Set<T> querySet(final Class<T> clazz, final FilterNode node) {
return querySet(clazz, (SelectColumn) null, null, node);
}
@Override
public <T> CompletableFuture<Set<T>> querySetAsync(final Class<T> clazz, final FilterNode node) {
return querySetAsync(clazz, (SelectColumn) null, null, node);
}
/**
* 根据过滤对象FilterBean查询对象集合 对象只填充或排除SelectField指定的字段
*
* @param <T> Entity类的泛型
* @param clazz Entity类
* @param selects 收集的字段
* @param bean 过滤Bean
*
* @return Entity对象的集合
*/
@Override
public <T> Set<T> querySet(final Class<T> clazz, final SelectColumn selects, final FilterBean bean) {
return querySet(clazz, selects, null, FilterNodeBean.createFilterNode(bean));
}
@Override
public <T> CompletableFuture<Set<T>> querySetAsync(final Class<T> clazz, SelectColumn selects, final FilterBean bean) {
return querySetAsync(clazz, selects, null, FilterNodeBean.createFilterNode(bean));
}
@Override
public <T> Set<T> querySet(final Class<T> clazz, final SelectColumn selects, final FilterNode node) {
return querySet(clazz, selects, null, node);
}
@Override
public <T> CompletableFuture<Set<T>> querySetAsync(final Class<T> clazz, SelectColumn selects, final FilterNode node) {
return querySetAsync(clazz, selects, null, node);
}
@Override
public <T> Set<T> querySet(final Class<T> clazz, final Flipper flipper, final String column, final Serializable colval) {
return querySet(clazz, null, flipper, FilterNode.create(column, colval));
}
@Override
public <T> CompletableFuture<Set<T>> querySetAsync(final Class<T> clazz, final Flipper flipper, final String column, final Serializable colval) {
return querySetAsync(clazz, null, flipper, FilterNode.create(column, colval));
}
@Override
public <T> Set<T> querySet(final Class<T> clazz, final Flipper flipper, final FilterBean bean) {
return querySet(clazz, null, flipper, FilterNodeBean.createFilterNode(bean));
}
@Override
public <T> CompletableFuture<Set<T>> querySetAsync(final Class<T> clazz, final Flipper flipper, final FilterBean bean) {
return querySetAsync(clazz, null, flipper, FilterNodeBean.createFilterNode(bean));
}
@Override
public <T> Set<T> querySet(final Class<T> clazz, final Flipper flipper, final FilterNode node) {
return querySet(clazz, null, flipper, node);
}
@Override
public <T> CompletableFuture<Set<T>> querySetAsync(final Class<T> clazz, final Flipper flipper, final FilterNode node) {
return querySetAsync(clazz, null, flipper, node);
}
@Override
public <T> Set<T> querySet(final Class<T> clazz, final SelectColumn selects, final Flipper flipper, final FilterBean bean) {
return querySet(clazz, selects, flipper, FilterNodeBean.createFilterNode(bean));
}
@Override
public <T> CompletableFuture<Set<T>> querySetAsync(final Class<T> clazz, final SelectColumn selects, final Flipper flipper, final FilterBean bean) {
return querySetAsync(clazz, selects, flipper, FilterNodeBean.createFilterNode(bean));
}
@Override
public <T> Set<T> querySet(final Class<T> clazz, final SelectColumn selects, final Flipper flipper, final FilterNode node) {
return new LinkedHashSet<>(querySheetCompose(true, false, true, clazz, selects, flipper, node).join().list(true));
}
@Override
public <T> CompletableFuture<Set<T>> querySetAsync(final Class<T> clazz, final SelectColumn selects, final Flipper flipper, final FilterNode node) {
return querySheetCompose(true, false, true, clazz, selects, flipper, node).thenApply((rs) -> new LinkedHashSet<>(rs.list(true)));
}
/**
* 根据指定字段值查询对象集合
*
@@ -2256,12 +2571,12 @@ public abstract class DataSqlSource<DBChannel> extends AbstractService implement
@Override
public <T> List<T> queryList(final Class<T> clazz, final SelectColumn selects, final Flipper flipper, final FilterNode node) {
return querySheetCompose(true, false, clazz, selects, flipper, node).join().list(true);
return querySheetCompose(true, false, false, clazz, selects, flipper, node).join().list(true);
}
@Override
public <T> CompletableFuture<List<T>> queryListAsync(final Class<T> clazz, final SelectColumn selects, final Flipper flipper, final FilterNode node) {
return querySheetCompose(true, false, clazz, selects, flipper, node).thenApply((rs) -> rs.list(true));
return querySheetCompose(true, false, false, clazz, selects, flipper, node).thenApply((rs) -> rs.list(true));
}
//-----------------------sheet----------------------------
@@ -2318,24 +2633,28 @@ public abstract class DataSqlSource<DBChannel> extends AbstractService implement
@Override
public <T> Sheet<T> querySheet(final Class<T> clazz, final SelectColumn selects, final Flipper flipper, final FilterNode node) {
return querySheetCompose(true, true, clazz, selects, flipper, node).join();
return querySheetCompose(true, true, false, clazz, selects, flipper, node).join();
}
@Override
public <T> CompletableFuture<Sheet<T>> querySheetAsync(final Class<T> clazz, final SelectColumn selects, final Flipper flipper, final FilterNode node) {
if (isAsync()) return querySheetCompose(true, true, clazz, selects, flipper, node);
return CompletableFuture.supplyAsync(() -> querySheetCompose(true, true, clazz, selects, flipper, node).join(), getExecutor());
if (isAsync()) return querySheetCompose(true, true, false, clazz, selects, flipper, node);
return CompletableFuture.supplyAsync(() -> querySheetCompose(true, true, false, clazz, selects, flipper, node).join(), getExecutor());
}
protected <T> CompletableFuture<Sheet<T>> querySheetCompose(final boolean readcache, final boolean needtotal, final Class<T> clazz, final SelectColumn selects, final Flipper flipper, final FilterNode node) {
protected <T> CompletableFuture<Sheet<T>> querySheetCompose(final boolean readcache, final boolean needtotal, final boolean distinct, final Class<T> clazz, final SelectColumn selects, final Flipper flipper, final FilterNode node) {
final EntityInfo<T> info = loadEntityInfo(clazz);
final EntityCache<T> cache = info.getCache();
if (readcache && cache != null && cache.isFullLoaded()) {
if (node == null || node.isCacheUseable(this)) {
if (info.isLoggable(logger, Level.FINEST, " cache query predicate = ")) logger.finest(clazz.getSimpleName() + " cache query predicate = " + (node == null ? null : node.createPredicate(cache)));
return CompletableFuture.completedFuture(cache.querySheet(needtotal, selects, flipper, node));
return CompletableFuture.completedFuture(cache.querySheet(needtotal, distinct, selects, flipper, node));
}
}
return querySheetDB(info, readcache, needtotal, selects, flipper, node);
return querySheetDB(info, readcache, needtotal, distinct, selects, flipper, node);
}
protected static enum UpdateMode {
INSERT, DELETE, UPDATE, CLEAR, DROP, ALTER, OTHER;
}
}

View File

@@ -265,34 +265,37 @@ public final class EntityCache<T> {
if (filter != null) stream = stream.filter(filter);
Collector<T, Map, ?> collector = null;
final Class valtype = funcAttr == null ? null : funcAttr.type();
switch (func) {
case AVG:
if (valtype == float.class || valtype == Float.class || valtype == double.class || valtype == Double.class) {
collector = (Collector<T, Map, ?>) Collectors.averagingDouble((T t) -> ((Number) funcAttr.get(t)).doubleValue());
} else {
collector = (Collector<T, Map, ?>) Collectors.averagingLong((T t) -> ((Number) funcAttr.get(t)).longValue());
}
break;
case COUNT:
collector = (Collector<T, Map, ?>) Collectors.counting();
break;
case DISTINCTCOUNT:
collector = (Collector<T, Map, ?>) Collectors.mapping((t) -> funcAttr.get(t), Collectors.toSet());
break;
case MAX:
case MIN:
Comparator<T> comp = (o1, o2) -> o1 == null ? (o2 == null ? 0 : -1) : ((Comparable) funcAttr.get(o1)).compareTo(funcAttr.get(o2));
collector = (Collector<T, Map, ?>) ((func == MAX) ? Collectors.maxBy(comp) : Collectors.minBy(comp));
break;
case SUM:
if (valtype == float.class || valtype == Float.class || valtype == double.class || valtype == Double.class) {
collector = (Collector<T, Map, ?>) Collectors.summingDouble((T t) -> ((Number) funcAttr.get(t)).doubleValue());
} else {
collector = (Collector<T, Map, ?>) Collectors.summingLong((T t) -> ((Number) funcAttr.get(t)).longValue());
}
break;
if (func != null) {
switch (func) {
case AVG:
if (valtype == float.class || valtype == Float.class || valtype == double.class || valtype == Double.class) {
collector = (Collector<T, Map, ?>) Collectors.averagingDouble((T t) -> ((Number) funcAttr.get(t)).doubleValue());
} else {
collector = (Collector<T, Map, ?>) Collectors.averagingLong((T t) -> ((Number) funcAttr.get(t)).longValue());
}
break;
case COUNT:
collector = (Collector<T, Map, ?>) Collectors.counting();
break;
case DISTINCTCOUNT:
collector = (Collector<T, Map, ?>) Collectors.mapping((t) -> funcAttr.get(t), Collectors.toSet());
break;
case MAX:
case MIN:
Comparator<T> comp = (o1, o2) -> o1 == null ? (o2 == null ? 0 : -1) : ((Comparable) funcAttr.get(o1)).compareTo(funcAttr.get(o2));
collector = (Collector<T, Map, ?>) ((func == MAX) ? Collectors.maxBy(comp) : Collectors.minBy(comp));
break;
case SUM:
if (valtype == float.class || valtype == Float.class || valtype == double.class || valtype == Double.class) {
collector = (Collector<T, Map, ?>) Collectors.summingDouble((T t) -> ((Number) funcAttr.get(t)).doubleValue());
} else {
collector = (Collector<T, Map, ?>) Collectors.summingLong((T t) -> ((Number) funcAttr.get(t)).longValue());
}
break;
}
}
Map rs = stream.collect(Collectors.groupingBy(t -> keyAttr.get(t), LinkedHashMap::new, collector));
Map rs = collector == null ? stream.collect(Collectors.toMap(t -> keyAttr.get(t), t -> funcAttr.get(t), (key1, key2) -> key2))
: stream.collect(Collectors.groupingBy(t -> keyAttr.get(t), LinkedHashMap::new, collector));
if (func == MAX || func == MIN) {
Map rs2 = new LinkedHashMap();
rs.forEach((x, y) -> {
@@ -307,106 +310,201 @@ public final class EntityCache<T> {
return rs;
}
public <V> Number getNumberResult(final FilterFunc func, final Number defResult, final String column, final FilterNode node) {
final Attribute<T, Serializable> attr = column == null ? null : info.getAttribute(column);
public Map<Serializable[], Number[]> queryColumnMap(final ColumnNode[] funcNodes, final String[] groupByColumns, FilterNode node) {
final Predicate<T> filter = node == null ? null : node.createPredicate(this);
Stream<T> stream = this.list.stream();
if (filter != null) stream = stream.filter(filter);
final Attribute<T, Serializable>[] attrs = new Attribute[groupByColumns.length];
for (int i = 0; i < groupByColumns.length; i++) {
attrs[i] = info.getAttribute(groupByColumns[i]);
}
final Map<String, Serializable[]> valmap = new HashMap<>();
Function<T, Serializable[]> func = t -> {
StringBuilder sb = new StringBuilder();
final Serializable[] vals = new Serializable[attrs.length];
for (int i = 0; i < attrs.length; i++) {
vals[i] = attrs[i].get(t);
sb.append((char) 20).append(vals[i]);
}
final String key = sb.toString();
if (!valmap.containsKey(key)) valmap.put(key, vals);
return valmap.get(key);
};
Map<Serializable[], List<T>> listmap = stream.collect(Collectors.groupingBy(func));
final Map<Serializable[], Number[]> rsmap = new HashMap<>(listmap.size());
listmap.forEach((k, l) -> rsmap.put(k, queryColumnNumbers(l, funcNodes)));
return rsmap;
}
private Number[] queryColumnNumbers(final List<T> list, final ColumnNode[] funcNodes) {
if (true) throw new UnsupportedOperationException("Not supported yet.");
Number[] rs = new Number[funcNodes.length];
for (int i = 0; i < rs.length; i++) {
rs[i] = queryColumnNumber(list, funcNodes[i]);
}
return rs;
}
private Number queryColumnNumber(final List<T> list, final ColumnNode funcNode) {
if (funcNode instanceof ColumnFuncNode) {
return queryColumnNumber(list, (ColumnFuncNode) funcNode);
} else if (funcNode instanceof ColumnNodeValue) {
return queryColumnNumber(list, (ColumnNodeValue) funcNode);
} else {
return null;
}
}
private Number queryColumnNumber(final List<T> list, final ColumnFuncNode funcNode) {
if (funcNode.getValue() instanceof String) {
final Attribute<T, Serializable> attr = info.getAttribute((String) funcNode.getValue());
final Function<T, Number> attrFunc = x -> (Number) attr.get(x);
return getNumberResult(list, funcNode.getFunc(), null, attr.type(), attrFunc, (FilterNode) null);
}
Number num = null;
if (funcNode.getValue() instanceof ColumnFuncNode) {
num = queryColumnNumber(list, (ColumnFuncNode) funcNode.getValue());
} else if (funcNode.getValue() instanceof ColumnNodeValue) {
num = queryColumnNumber(list, (ColumnNodeValue) funcNode.getValue());
}
return num;
}
private Number queryColumnNumber(final List<T> list, final ColumnNodeValue nodeValue) {
return null;
}
private <V> Number getNumberResult(final Collection<T> entityList, final FilterFunc func, final Number defResult, final Class attrType, final Function<T, Number> attrFunc, final FilterNode node) {
final Predicate<T> filter = node == null ? null : node.createPredicate(this);
Stream<T> stream = entityList.stream();
if (filter != null) stream = stream.filter(filter);
switch (func) {
case AVG:
if (attr.type() == int.class || attr.type() == Integer.class || attr.type() == AtomicInteger.class) {
OptionalDouble rs = stream.mapToInt(x -> ((Number) attr.get(x)).intValue()).average();
if (attrType == int.class || attrType == Integer.class || attrType == AtomicInteger.class) {
OptionalDouble rs = stream.mapToInt(x -> ((Number) attrFunc.apply(x)).intValue()).average();
return rs.isPresent() ? (int) rs.getAsDouble() : defResult;
} else if (attr.type() == long.class || attr.type() == Long.class || attr.type() == AtomicLong.class) {
OptionalDouble rs = stream.mapToLong(x -> ((Number) attr.get(x)).longValue()).average();
} else if (attrType == long.class || attrType == Long.class || attrType == AtomicLong.class) {
OptionalDouble rs = stream.mapToLong(x -> ((Number) attrFunc.apply(x)).longValue()).average();
return rs.isPresent() ? (long) rs.getAsDouble() : defResult;
} else if (attr.type() == short.class || attr.type() == Short.class) {
OptionalDouble rs = stream.mapToInt(x -> ((Short) attr.get(x)).intValue()).average();
} else if (attrType == short.class || attrType == Short.class) {
OptionalDouble rs = stream.mapToInt(x -> ((Short) attrFunc.apply(x)).intValue()).average();
return rs.isPresent() ? (short) rs.getAsDouble() : defResult;
} else if (attr.type() == float.class || attr.type() == Float.class) {
OptionalDouble rs = stream.mapToDouble(x -> ((Float) attr.get(x)).doubleValue()).average();
} else if (attrType == float.class || attrType == Float.class) {
OptionalDouble rs = stream.mapToDouble(x -> ((Float) attrFunc.apply(x)).doubleValue()).average();
return rs.isPresent() ? (float) rs.getAsDouble() : defResult;
} else if (attr.type() == double.class || attr.type() == Double.class) {
OptionalDouble rs = stream.mapToDouble(x -> (Double) attr.get(x)).average();
} else if (attrType == double.class || attrType == Double.class) {
OptionalDouble rs = stream.mapToDouble(x -> (Double) attrFunc.apply(x)).average();
return rs.isPresent() ? rs.getAsDouble() : defResult;
}
throw new RuntimeException("getNumberResult error(type:" + type + ", attr.declaringClass: " + attr.declaringClass() + ", attr.field: " + attr.field() + ", attr.type: " + attr.type());
throw new RuntimeException("getNumberResult error(type:" + type + ", attr.type: " + attrType);
case COUNT:
return stream.count();
case DISTINCTCOUNT:
return stream.map(x -> attr.get(x)).distinct().count();
return stream.map(x -> attrFunc.apply(x)).distinct().count();
case MAX:
if (attr.type() == int.class || attr.type() == Integer.class || attr.type() == AtomicInteger.class) {
OptionalInt rs = stream.mapToInt(x -> ((Number) attr.get(x)).intValue()).max();
if (attrType == int.class || attrType == Integer.class || attrType == AtomicInteger.class) {
OptionalInt rs = stream.mapToInt(x -> ((Number) attrFunc.apply(x)).intValue()).max();
return rs.isPresent() ? rs.getAsInt() : defResult;
} else if (attr.type() == long.class || attr.type() == Long.class || attr.type() == AtomicLong.class) {
OptionalLong rs = stream.mapToLong(x -> ((Number) attr.get(x)).longValue()).max();
} else if (attrType == long.class || attrType == Long.class || attrType == AtomicLong.class) {
OptionalLong rs = stream.mapToLong(x -> ((Number) attrFunc.apply(x)).longValue()).max();
return rs.isPresent() ? rs.getAsLong() : defResult;
} else if (attr.type() == short.class || attr.type() == Short.class) {
OptionalInt rs = stream.mapToInt(x -> ((Short) attr.get(x)).intValue()).max();
} else if (attrType == short.class || attrType == Short.class) {
OptionalInt rs = stream.mapToInt(x -> ((Short) attrFunc.apply(x)).intValue()).max();
return rs.isPresent() ? (short) rs.getAsInt() : defResult;
} else if (attr.type() == float.class || attr.type() == Float.class) {
OptionalDouble rs = stream.mapToDouble(x -> ((Float) attr.get(x)).doubleValue()).max();
} else if (attrType == float.class || attrType == Float.class) {
OptionalDouble rs = stream.mapToDouble(x -> ((Float) attrFunc.apply(x)).doubleValue()).max();
return rs.isPresent() ? (float) rs.getAsDouble() : defResult;
} else if (attr.type() == double.class || attr.type() == Double.class) {
OptionalDouble rs = stream.mapToDouble(x -> (Double) attr.get(x)).max();
} else if (attrType == double.class || attrType == Double.class) {
OptionalDouble rs = stream.mapToDouble(x -> (Double) attrFunc.apply(x)).max();
return rs.isPresent() ? rs.getAsDouble() : defResult;
}
throw new RuntimeException("getNumberResult error(type:" + type + ", attr.declaringClass: " + attr.declaringClass() + ", attr.field: " + attr.field() + ", attr.type: " + attr.type());
throw new RuntimeException("getNumberResult error(type:" + type + ", attr.type: " + attrType);
case MIN:
if (attr.type() == int.class || attr.type() == Integer.class || attr.type() == AtomicInteger.class) {
OptionalInt rs = stream.mapToInt(x -> ((Number) attr.get(x)).intValue()).min();
if (attrType == int.class || attrType == Integer.class || attrType == AtomicInteger.class) {
OptionalInt rs = stream.mapToInt(x -> ((Number) attrFunc.apply(x)).intValue()).min();
return rs.isPresent() ? rs.getAsInt() : defResult;
} else if (attr.type() == long.class || attr.type() == Long.class || attr.type() == AtomicLong.class) {
OptionalLong rs = stream.mapToLong(x -> ((Number) attr.get(x)).longValue()).min();
} else if (attrType == long.class || attrType == Long.class || attrType == AtomicLong.class) {
OptionalLong rs = stream.mapToLong(x -> ((Number) attrFunc.apply(x)).longValue()).min();
return rs.isPresent() ? rs.getAsLong() : defResult;
} else if (attr.type() == short.class || attr.type() == Short.class) {
OptionalInt rs = stream.mapToInt(x -> ((Short) attr.get(x)).intValue()).min();
} else if (attrType == short.class || attrType == Short.class) {
OptionalInt rs = stream.mapToInt(x -> ((Short) attrFunc.apply(x)).intValue()).min();
return rs.isPresent() ? (short) rs.getAsInt() : defResult;
} else if (attr.type() == float.class || attr.type() == Float.class) {
OptionalDouble rs = stream.mapToDouble(x -> ((Float) attr.get(x)).doubleValue()).min();
} else if (attrType == float.class || attrType == Float.class) {
OptionalDouble rs = stream.mapToDouble(x -> ((Float) attrFunc.apply(x)).doubleValue()).min();
return rs.isPresent() ? (float) rs.getAsDouble() : defResult;
} else if (attr.type() == double.class || attr.type() == Double.class) {
OptionalDouble rs = stream.mapToDouble(x -> (Double) attr.get(x)).min();
} else if (attrType == double.class || attrType == Double.class) {
OptionalDouble rs = stream.mapToDouble(x -> (Double) attrFunc.apply(x)).min();
return rs.isPresent() ? rs.getAsDouble() : defResult;
}
throw new RuntimeException("getNumberResult error(type:" + type + ", attr.declaringClass: " + attr.declaringClass() + ", attr.field: " + attr.field() + ", attr.type: " + attr.type());
throw new RuntimeException("getNumberResult error(type:" + type + ", attr.type: " + attrType);
case SUM:
if (attr.type() == int.class || attr.type() == Integer.class || attr.type() == AtomicInteger.class) {
return stream.mapToInt(x -> ((Number) attr.get(x)).intValue()).sum();
} else if (attr.type() == long.class || attr.type() == Long.class || attr.type() == AtomicLong.class) {
return stream.mapToLong(x -> ((Number) attr.get(x)).longValue()).sum();
} else if (attr.type() == short.class || attr.type() == Short.class) {
return (short) stream.mapToInt(x -> ((Short) attr.get(x)).intValue()).sum();
} else if (attr.type() == float.class || attr.type() == Float.class) {
return (float) stream.mapToDouble(x -> ((Float) attr.get(x)).doubleValue()).sum();
} else if (attr.type() == double.class || attr.type() == Double.class) {
return stream.mapToDouble(x -> (Double) attr.get(x)).sum();
if (attrType == int.class || attrType == Integer.class || attrType == AtomicInteger.class) {
return stream.mapToInt(x -> ((Number) attrFunc.apply(x)).intValue()).sum();
} else if (attrType == long.class || attrType == Long.class || attrType == AtomicLong.class) {
return stream.mapToLong(x -> ((Number) attrFunc.apply(x)).longValue()).sum();
} else if (attrType == short.class || attrType == Short.class) {
return (short) stream.mapToInt(x -> ((Short) attrFunc.apply(x)).intValue()).sum();
} else if (attrType == float.class || attrType == Float.class) {
return (float) stream.mapToDouble(x -> ((Float) attrFunc.apply(x)).doubleValue()).sum();
} else if (attrType == double.class || attrType == Double.class) {
return stream.mapToDouble(x -> (Double) attrFunc.apply(x)).sum();
}
throw new RuntimeException("getNumberResult error(type:" + type + ", attr.declaringClass: " + attr.declaringClass() + ", attr.field: " + attr.field() + ", attr.type: " + attr.type());
throw new RuntimeException("getNumberResult error(type:" + type + ", attr.type: " + attrType);
}
return defResult;
}
public Sheet<T> querySheet(final SelectColumn selects, final Flipper flipper, final FilterNode node) {
return querySheet(true, selects, flipper, node);
public <V> Number getNumberResult(final FilterFunc func, final Number defResult, final String column, final FilterNode node) {
final Attribute<T, Serializable> attr = column == null ? null : info.getAttribute(column); //COUNT的column=null
final Function<T, Number> attrFunc = attr == null ? null : x -> (Number) attr.get(x);
return getNumberResult(this.list, func, defResult, attr == null ? null : attr.type(), attrFunc, node);
}
public Sheet<T> querySheet(final boolean needtotal, final SelectColumn selects, final Flipper flipper, FilterNode node) {
public Sheet<T> querySheet(final SelectColumn selects, final Flipper flipper, final FilterNode node) {
return querySheet(true, false, selects, flipper, node);
}
protected <T> Stream<T> distinctStream(Stream<T> stream, final List<Attribute<T, Serializable>> keyattrs) {
if (keyattrs == null) return stream;
final Set<String> keys = new HashSet<>();
Predicate<T> filter = t -> {
StringBuilder sb = new StringBuilder();
for (Attribute attr : keyattrs) {
sb.append(attr.get(t));
}
String key = sb.toString();
if (keys.contains(key)) return false;
keys.add(key);
return true;
};
return stream.filter(filter);
}
public Sheet<T> querySheet(final boolean needtotal, final boolean distinct, final SelectColumn selects, final Flipper flipper, FilterNode node) {
final Predicate<T> filter = node == null ? null : node.createPredicate(this);
final Comparator<T> comparator = createComparator(flipper);
long total = 0;
List<Attribute<T, Serializable>> keyattrs = null;
if (distinct) {
final List<Attribute<T, Serializable>> attrs = new ArrayList<>();
info.forEachAttribute((k, v) -> {
if (selects == null || selects.test(k)) attrs.add(v);
});
keyattrs = attrs;
}
if (needtotal) {
Stream<T> stream = this.list.stream();
if (filter != null) stream = stream.filter(filter);
if (distinct) stream = distinctStream(stream, keyattrs);
total = stream.count();
}
if (needtotal && total == 0) return new Sheet<>();
if (needtotal && total == 0) return new Sheet<>(0, new ArrayList());
Stream<T> stream = this.list.stream();
if (filter != null) stream = stream.filter(filter);
if (distinct) stream = distinctStream(stream, keyattrs);
if (comparator != null) stream = stream.sorted(comparator);
if (flipper != null && flipper.getOffset() > 0) stream = stream.skip(flipper.getOffset());
if (flipper != null && flipper.getLimit() > 0) stream = stream.limit(flipper.getLimit());
@@ -593,45 +691,33 @@ public final class EntityCache<T> {
}
}
public <V> T updateColumnDecrement(final Serializable pk, Attribute<T, V> attr, final long incvalue) {
if (pk == null) return null;
T rs = this.map.get(pk);
if (rs == null) return rs;
synchronized (rs) {
return updateColumn(attr, rs, ColumnExpress.DEC, incvalue);
}
}
private <V> T updateColumn(Attribute<T, V> attr, final T entity, final ColumnExpress express, Serializable val) {
final Class ft = attr.type();
Number numb = null;
Serializable newval = null;
switch (express) {
case INC:
numb = (Number) attr.get(entity);
if (numb == null) {
numb = (Number) val;
} else {
numb = numb.longValue() + ((Number) val).longValue();
}
break;
case DEC:
case MUL:
numb = (Number) attr.get(entity);
if (numb == null) {
numb = 0;
} else {
numb = numb.longValue() * ((Number) val).floatValue();
}
break;
case DIV:
case MOD:
case AND:
numb = (Number) attr.get(entity);
if (numb == null) {
numb = 0;
} else {
numb = numb.longValue() & ((Number) val).longValue();
}
break;
case ORR:
numb = (Number) attr.get(entity);
if (numb == null) {
numb = 0;
} else {
numb = numb.longValue() | ((Number) val).longValue();
}
numb = getValue((Number) attr.get(entity), express, val);
break;
case MOV:
if (val instanceof ColumnNodeValue) val = updateColumnNodeValue(attr, entity, (ColumnNodeValue) val);
newval = val;
if (val instanceof Number) numb = (Number) val;
break;
}
if (numb != null) {
@@ -663,6 +749,89 @@ public final class EntityCache<T> {
return entity;
}
private <V> Serializable updateColumnNodeValue(Attribute<T, V> attr, final T entity, ColumnNodeValue node) {
Serializable left = node.getLeft();
if (left instanceof CharSequence) {
left = info.getUpdateAttribute(left.toString()).get(entity);
} else if (left instanceof ColumnNodeValue) {
left = updateColumnNodeValue(attr, entity, (ColumnNodeValue) left);
}
Serializable right = node.getRight();
if (left instanceof CharSequence) {
right = info.getUpdateAttribute(right.toString()).get(entity);
} else if (left instanceof ColumnNodeValue) {
right = updateColumnNodeValue(attr, entity, (ColumnNodeValue) right);
}
return getValue((Number) left, node.getExpress(), right);
}
private <V> Number getValue(Number numb, final ColumnExpress express, Serializable val) {
switch (express) {
case INC:
if (numb == null) {
numb = (Number) val;
} else {
if (numb instanceof Float || ((Number) val) instanceof Float) {
numb = numb.floatValue() + ((Number) val).floatValue();
} else if (numb instanceof Double || ((Number) val) instanceof Double) {
numb = numb.doubleValue() + ((Number) val).doubleValue();
} else {
numb = numb.longValue() + ((Number) val).longValue();
}
}
break;
case DEC:
if (numb == null) {
numb = (Number) val;
} else {
if (numb instanceof Float || ((Number) val) instanceof Float) {
numb = numb.floatValue() - ((Number) val).floatValue();
} else if (numb instanceof Double || ((Number) val) instanceof Double) {
numb = numb.doubleValue() - ((Number) val).doubleValue();
} else {
numb = numb.longValue() - ((Number) val).longValue();
}
}
break;
case MUL:
if (numb == null) {
numb = 0;
} else {
numb = numb.longValue() * ((Number) val).floatValue();
}
break;
case DIV:
if (numb == null) {
numb = 0;
} else {
numb = numb.longValue() / ((Number) val).floatValue();
}
break;
case MOD:
if (numb == null) {
numb = 0;
} else {
numb = numb.longValue() % ((Number) val).intValue();
}
break;
case AND:
if (numb == null) {
numb = 0;
} else {
numb = numb.longValue() & ((Number) val).longValue();
}
break;
case ORR:
if (numb == null) {
numb = 0;
} else {
numb = numb.longValue() | ((Number) val).longValue();
}
break;
}
return numb;
}
public Attribute<T, Serializable> getAttribute(String fieldname) {
return info.getAttribute(fieldname);
}

View File

@@ -9,7 +9,7 @@ import java.io.Serializable;
import java.lang.reflect.*;
import java.sql.*;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.*;
import java.util.concurrent.atomic.*;
import java.util.function.*;
import java.util.logging.*;
@@ -89,16 +89,19 @@ public final class EntityInfo<T> {
final String notcontainSQL;
//用于判断表不存在的使用, 多个SQLState用;隔开
final String tablenotexistSqlstates;
private final String tablenotexistSqlstates;
//用于复制表结构使用
final String tablecopySQL;
private final String tablecopySQL;
//用于存在table_20160202类似这种分布式表
final Set<String> tables = new HashSet<>();
//用于存在database.table_20160202类似这种分布式表
private final Set<String> tables = new CopyOnWriteArraySet<>();
//不能为null的字段名
private final Set<String> notNullColumns = new CopyOnWriteArraySet<>();
//分表 策略
final DistributeTableStrategy<T> tableStrategy;
private final DistributeTableStrategy<T> tableStrategy;
//根据主键查找单个对象的SQL
private final String queryPrepareSQL;
@@ -154,12 +157,6 @@ public final class EntityInfo<T> {
//Flipper.sort转换成以ORDER BY开头SQL的缓存
private final Map<String, String> sortOrderbySqls = new ConcurrentHashMap<>();
//是否由数据库生成主键值
final boolean autoGenerated;
//是否UUID主键
final boolean autouuid;
//所属的DataSource
final DataSource source;
@@ -245,7 +242,7 @@ public final class EntityInfo<T> {
}
//---------------------------------------------
Table t = type.getAnnotation(Table.class);
if (type.getAnnotation(VirtualEntity.class) != null || "memory".equalsIgnoreCase(source.getType())) {
if (type.getAnnotation(VirtualEntity.class) != null || (source == null || "memory".equalsIgnoreCase(source.getType()))) {
this.table = null;
BiFunction<DataSource, Class, List> loader = null;
try {
@@ -287,8 +284,6 @@ public final class EntityInfo<T> {
List<Attribute<T, Serializable>> insertattrs = new ArrayList<>();
List<String> updatecols = new ArrayList<>();
List<Attribute<T, Serializable>> updateattrs = new ArrayList<>();
boolean auto = false;
boolean uuid = false;
Map<Class, Creator<CryptHandler>> cryptCreatorMap = new HashMap<>();
do {
for (Field field : cltmp.getDeclaredFields()) {
@@ -312,25 +307,14 @@ public final class EntityInfo<T> {
}
Attribute attr;
try {
attr = Attribute.create(cltmp, field, cryptHandler);
attr = Attribute.create(type, cltmp, field, cryptHandler);
} catch (RuntimeException e) {
continue;
}
if (field.getAnnotation(javax.persistence.Id.class) != null && idAttr0 == null) {
idAttr0 = attr;
GeneratedValue gv = field.getAnnotation(GeneratedValue.class);
auto = gv != null;
// if (gv != null && gv.strategy() != GenerationType.IDENTITY) {
// throw new RuntimeException(cltmp.getName() + "'s @ID primary not a GenerationType.IDENTITY");
// }
if (gv != null && field.getType() == String.class) { //UUID
uuid = true;
auto = false;
}
if (!auto) {
insertcols.add(sqlfield);
insertattrs.add(attr);
}
insertcols.add(sqlfield);
insertattrs.add(attr);
} else {
if (col == null || col.insertable()) {
insertcols.add(sqlfield);
@@ -341,6 +325,9 @@ public final class EntityInfo<T> {
updateattrs.add(attr);
updateAttributeMap.put(fieldname, attr);
}
if (col != null && !col.nullable()) {
notNullColumns.add(fieldname);
}
}
queryattrs.add(attr);
fields.add(fieldname);
@@ -456,8 +443,6 @@ public final class EntityInfo<T> {
this.deleteNamesPrepareSQL = null;
this.queryNamesPrepareSQL = null;
}
this.autoGenerated = auto;
this.autouuid = uuid;
//----------------cache--------------
Cacheable c = type.getAnnotation(Cacheable.class);
if (this.table == null || (!cacheForbidden && c != null && c.value())) {
@@ -482,15 +467,6 @@ public final class EntityInfo<T> {
return jsonConvert;
}
/**
* 创建主键值目前只支持UUID赋值
*
* @param src Entity对象
*/
public void createPrimaryValue(T src) {
if (autouuid) getPrimary().set(src, Utility.uuid());
}
/**
* 获取Entity缓存器
*
@@ -536,18 +512,26 @@ public final class EntityInfo<T> {
return table == null;
}
public boolean isAutoGenerated() {
return autoGenerated;
}
public boolean isAutouuid() {
return autouuid;
}
public DistributeTableStrategy<T> getTableStrategy() {
return tableStrategy;
}
public Object disTableLock() {
return tables;
}
public boolean containsDisTable(String tablekey) {
return tables.contains(tablekey);
}
public void addDisTable(String tablekey) {
tables.add(tablekey);
}
public boolean removeDisTable(String tablekey) {
return tables.remove(tablekey);
}
public String getTableNotExistSqlStates2() {
return tablenotexistSqlstates;
}
@@ -888,27 +872,72 @@ public final class EntityInfo<T> {
* @return Object
*/
public Object getSQLValue(String fieldname, Serializable fieldvalue) {
if (fieldvalue == null && fieldname != null && isNotNullable(fieldname)) {
if (isNotNullJson(getAttribute(fieldname))) return "";
}
if (this.cryptmap == null) return fieldvalue;
CryptHandler handler = this.cryptmap.get(fieldname);
if (handler == null) return fieldvalue;
return handler.encrypt(fieldvalue);
}
/**
* 字段值转换成带转义的数据库的值
*
* @param fieldname 字段名
* @param fieldvalue 字段值
* @param sqlFormatter 转义器
*
* @return CharSequence
*/
public CharSequence formatSQLValue(String fieldname, Serializable fieldvalue, BiFunction<EntityInfo, Object, CharSequence> sqlFormatter) {
Object val = getSQLValue(fieldname, fieldvalue);
return sqlFormatter == null ? formatToString(val) : sqlFormatter.apply(this, val);
}
/**
* 字段值转换成带转义的数据库的值
*
* @param value 字段值
* @param sqlFormatter 转义器
*
* @return CharSequence
*/
public CharSequence formatSQLValue(Object value, BiFunction<EntityInfo, Object, CharSequence> sqlFormatter) {
return sqlFormatter == null ? formatToString(value) : sqlFormatter.apply(this, value);
}
/**
* 字段值转换成数据库的值
*
* @param <F> 泛型
* @param attr Attribute
* @param entity 记录对象
*
* @return Object
*/
public Serializable getSQLValue(Attribute<T, Serializable> attr, T entity) {
Serializable val = attr.get(entity);
public <F> Object getSQLValue(Attribute<T, F> attr, T entity) {
Object val = attr.get(entity);
CryptHandler cryptHandler = attr.attach();
if (cryptHandler != null) val = (Serializable) cryptHandler.encrypt(val);
if (cryptHandler != null) val = cryptHandler.encrypt(val);
return val;
}
/**
* 字段值转换成带转义的数据库的值
*
* @param <F> 泛型
* @param attr Attribute
* @param entity 记录对象
* @param sqlFormatter 转义器
*
* @return CharSequence
*/
public <F> CharSequence formatSQLValue(Attribute<T, F> attr, T entity, BiFunction<EntityInfo, Object, CharSequence> sqlFormatter) {
Object val = getSQLValue(attr, entity);
return sqlFormatter == null ? formatToString(val) : sqlFormatter.apply(this, val);
}
/**
* 数据库的值转换成数字段值
*
@@ -947,30 +976,88 @@ public final class EntityInfo<T> {
/**
* 拼接UPDATE给字段赋值的SQL片段
*
* @param col 表字段名
* @param attr Attribute
* @param cv ColumnValue
* @param sqlColumn 表字段名
* @param attr Attribute
* @param cv ColumnValue
* @param formatter 转义器
*
* @return CharSequence
*/
protected CharSequence formatSQLValue(String col, Attribute<T, Serializable> attr, final ColumnValue cv) {
protected CharSequence formatSQLValue(String sqlColumn, Attribute<T, Serializable> attr, final ColumnValue cv, BiFunction<EntityInfo, Object, CharSequence> formatter) {
if (cv == null) return null;
Object val = cv.getValue();
CryptHandler handler = attr.attach();
if (handler != null) val = handler.encrypt(val);
//ColumnNodeValue时 cv.getExpress() == ColumnExpress.MOV 只用于updateColumn
if (val instanceof ColumnNodeValue) return formatSQLValue(attr, null, (ColumnNodeValue) val, formatter);
if (val instanceof ColumnFuncNode) return formatSQLValue(attr, null, (ColumnFuncNode) val, formatter);
switch (cv.getExpress()) {
case INC:
return new StringBuilder().append(col).append(" + ").append(val);
return new StringBuilder().append(sqlColumn).append(" + ").append(val);
case DEC:
return new StringBuilder().append(sqlColumn).append(" - ").append(val);
case MUL:
return new StringBuilder().append(col).append(" * ").append(val);
return new StringBuilder().append(sqlColumn).append(" * ").append(val);
case DIV:
return new StringBuilder().append(sqlColumn).append(" / ").append(val);
case MOD:
return new StringBuilder().append(sqlColumn).append(" % ").append(val);
case AND:
return new StringBuilder().append(col).append(" & ").append(val);
return new StringBuilder().append(sqlColumn).append(" & ").append(val);
case ORR:
return new StringBuilder().append(col).append(" | ").append(val);
return new StringBuilder().append(sqlColumn).append(" | ").append(val);
case MOV:
return formatToString(val);
CryptHandler handler = attr.attach();
if (handler != null) val = handler.encrypt(val);
CharSequence rs = formatter == null ? formatToString(val) : formatter.apply(this, val);
if (rs == null && isNotNullJson(attr)) rs = "";
return rs;
}
return formatToString(val);
CryptHandler handler = attr.attach();
if (handler != null) val = handler.encrypt(val);
return formatter == null ? formatToString(val) : formatter.apply(this, val);
}
protected CharSequence formatSQLValue(Attribute<T, Serializable> attr, String tabalis, final ColumnFuncNode node, BiFunction<EntityInfo, Object, CharSequence> formatter) {
if (node.getValue() instanceof ColumnNodeValue) {
return node.getFunc().getColumn(formatSQLValue(attr, tabalis, (ColumnNodeValue) node.getValue(), formatter).toString());
} else {
return node.getFunc().getColumn(this.getSQLColumn(tabalis, String.valueOf(node.getValue())));
}
}
protected CharSequence formatSQLValue(Attribute<T, Serializable> attr, String tabalis, final ColumnNodeValue node, BiFunction<EntityInfo, Object, CharSequence> formatter) {
Serializable left = node.getLeft();
if (left instanceof CharSequence) {
left = this.getSQLColumn(tabalis, left.toString());
} else if (left instanceof ColumnNodeValue) {
left = "(" + formatSQLValue(attr, tabalis, (ColumnNodeValue) left, formatter) + ")";
} else if (left instanceof ColumnFuncNode) {
left = "(" + formatSQLValue(attr, tabalis, (ColumnFuncNode) left, formatter) + ")";
}
Serializable right = node.getRight();
if (right instanceof CharSequence) {
right = this.getSQLColumn(null, right.toString());
} else if (left instanceof ColumnNodeValue) {
right = "(" + formatSQLValue(attr, tabalis, (ColumnNodeValue) right, formatter) + ")";
} else if (left instanceof ColumnFuncNode) {
right = "(" + formatSQLValue(attr, tabalis, (ColumnFuncNode) right, formatter) + ")";
}
switch (node.getExpress()) {
case INC:
return new StringBuilder().append(left).append(" + ").append(right);
case DEC:
return new StringBuilder().append(left).append(" - ").append(right);
case MUL:
return new StringBuilder().append(left).append(" * ").append(right);
case DIV:
return new StringBuilder().append(left).append(" / ").append(right);
case MOD:
return new StringBuilder().append(left).append(" % ").append(right);
case AND:
return new StringBuilder().append(left).append(" & ").append(right);
case ORR:
return new StringBuilder().append(left).append(" | ").append(right);
}
throw new IllegalArgumentException(node + " express cannot be null or MOV");
}
/**
@@ -994,6 +1081,24 @@ public final class EntityInfo<T> {
return logger.isLoggable(l) && l.intValue() >= this.logLevel;
}
public boolean isNotNullable(String fieldname) {
return notNullColumns.contains(fieldname);
}
public boolean isNotNullable(Attribute<T, Serializable> attr) {
return attr == null ? false : notNullColumns.contains(attr.field());
}
public boolean isNotNullJson(Attribute<T, Serializable> attr) {
if (attr == null) return false;
return notNullColumns.contains(attr.field())
&& !Number.class.isAssignableFrom(attr.type())
&& !CharSequence.class.isAssignableFrom(attr.type())
&& java.util.Date.class != attr.type()
&& !attr.type().getName().startsWith("java.sql.")
&& !attr.type().getName().startsWith("java.time.");
}
/**
* 判断日志级别
*
@@ -1021,7 +1126,7 @@ public final class EntityInfo<T> {
*
* @return String
*/
protected String formatToString(Object value) {
private String formatToString(Object value) {
if (value == null) return null;
if (value instanceof CharSequence) {
return new StringBuilder().append('\'').append(value.toString().replace("'", "\\'")).append('\'').toString();

View File

@@ -31,6 +31,11 @@ public enum FilterExpress {
NOTLIKE("NOT LIKE"),
IGNORECASELIKE("LIKE"), //不区分大小写的 LIKE
IGNORECASENOTLIKE("NOT LIKE"), //不区分大小写的 NOT LIKE
LENGTH_EQUAL("="), //字符串值的长度
LENGTH_LESSTHAN("<"), //字符串值的长度 <
LENGTH_LESSTHANOREQUALTO("<="), //字符串值的长度 <=
LENGTH_GREATERTHAN(">"), //字符串值的长度 >
LENGTH_GREATERTHANOREQUALTO(">="), //字符串值的长度 >=
CONTAIN("CONTAIN"), //包含, 相当于反向LIKE
NOTCONTAIN("NOT CONTAIN"), //不包含, 相当于反向LIKE

View File

@@ -34,12 +34,12 @@ public @interface FilterJoinColumn {
/**
*
* 多个关联字段, 默认使用join表(b)的主键, join表与被join表(a)的字段必须一样 <br>
* 例如: SELECT a.* FROM user a INNER JOIN record b ON a.userid = b.userid AND a.usertype = b.usertype <br>
* 那么注解为: &#64;FilterJoinColumn(table = Record.class, columns = {"userid", "usertype"}) <br>
* 例如: SELECT a.* FROM user a INNER JOIN orderinfo b ON a.userid = b.userid AND a.usertype = b.usertype <br>
* 那么注解为: &#64;FilterJoinColumn(table = OrderInfo.class, columns = {"userid", "usertype"}) <br>
* <p>
* columns中的字段名如果不一致可以将两个字段名用=连接成一个字段名 <br>
* 例如: SELECT a.* FROM user a INNER JOIN record b ON a.userid = b.buyerid AND a.usertype = b.usertype <br>
* 那么注解为: &#64;FilterJoinColumn(table = Record.class, columns = {"userid=buyerid", "usertype"}) <br>
* 例如: SELECT a.* FROM user a INNER JOIN orderinfo b ON a.userid = b.buyerid AND a.usertype = b.usertype <br>
* 那么注解为: &#64;FilterJoinColumn(table = OrderInfo.class, columns = {"userid=buyerid", "usertype"}) <br>
*
* @return 关联字段
*/

View File

@@ -397,7 +397,10 @@ public class FilterNode { //FilterNode 不能实现Serializable接口 否则
if (express == NOTCONTAIN) return info.notcontainSQL.replace("${column}", info.getSQLColumn(talis, column)).replace("${keystr}", val);
if (express == IGNORECASENOTCONTAIN) return info.notcontainSQL.replace("${column}", "LOWER(" + info.getSQLColumn(talis, column) + ")").replace("${keystr}", val);
if (express == IGNORECASEEQUAL || express == IGNORECASENOTEQUAL || express == IGNORECASELIKE || express == IGNORECASENOTLIKE) {
if (express == LENGTH_EQUAL || express == LENGTH_LESSTHAN || express == LENGTH_LESSTHANOREQUALTO
|| express == LENGTH_GREATERTHAN || express == LENGTH_GREATERTHANOREQUALTO) {
sb.append("LENGTH(").append(info.getSQLColumn(talis, column)).append(')');
} else if (express == IGNORECASEEQUAL || express == IGNORECASENOTEQUAL || express == IGNORECASELIKE || express == IGNORECASENOTLIKE) {
sb.append("LOWER(").append(info.getSQLColumn(talis, column)).append(')');
if (fk) val = "LOWER(" + info.getSQLColumn(talis, ((FilterKey) val0).getColumn()) + ')';
} else {
@@ -881,7 +884,7 @@ public class FilterNode { //FilterNode 不能实现Serializable接口 否则
@Override
public boolean test(T t) {
return ((Number) attr.get(t)).longValue() > ((Number) fkattr.get(t)).longValue();
return ((Comparable) attr.get(t)).compareTo((Comparable) fkattr.get(t)) > 0;
}
@Override
@@ -892,7 +895,7 @@ public class FilterNode { //FilterNode 不能实现Serializable接口 否则
@Override
public boolean test(T t) {
return ((Number) attr.get(t)).longValue() > ((Number) val).longValue();
return ((Comparable) attr.get(t)).compareTo(((Comparable) val)) > 0;
}
@Override
@@ -905,7 +908,7 @@ public class FilterNode { //FilterNode 不能实现Serializable接口 否则
@Override
public boolean test(T t) {
return ((Number) attr.get(t)).longValue() < ((Number) fkattr.get(t)).longValue();
return ((Comparable) attr.get(t)).compareTo((Comparable) fkattr.get(t)) < 0;
}
@Override
@@ -916,7 +919,7 @@ public class FilterNode { //FilterNode 不能实现Serializable接口 否则
@Override
public boolean test(T t) {
return ((Number) attr.get(t)).longValue() < ((Number) val).longValue();
return ((Comparable) attr.get(t)).compareTo(((Comparable) val)) < 0;
}
@Override
@@ -929,7 +932,7 @@ public class FilterNode { //FilterNode 不能实现Serializable接口 否则
@Override
public boolean test(T t) {
return ((Number) attr.get(t)).longValue() >= ((Number) fkattr.get(t)).longValue();
return ((Comparable) attr.get(t)).compareTo((Comparable) fkattr.get(t)) >= 0;
}
@Override
@@ -940,7 +943,7 @@ public class FilterNode { //FilterNode 不能实现Serializable接口 否则
@Override
public boolean test(T t) {
return ((Number) attr.get(t)).longValue() >= ((Number) val).longValue();
return ((Comparable) attr.get(t)).compareTo(((Comparable) val)) >= 0;
}
@Override
@@ -953,7 +956,7 @@ public class FilterNode { //FilterNode 不能实现Serializable接口 否则
@Override
public boolean test(T t) {
return ((Number) attr.get(t)).longValue() <= ((Number) fkattr.get(t)).longValue();
return ((Comparable) attr.get(t)).compareTo((Comparable) fkattr.get(t)) <= 0;
}
@Override
@@ -964,7 +967,7 @@ public class FilterNode { //FilterNode 不能实现Serializable接口 否则
@Override
public boolean test(T t) {
return ((Number) attr.get(t)).longValue() <= ((Number) val).longValue();
return ((Comparable) attr.get(t)).compareTo(((Comparable) val)) <= 0;
}
@Override
@@ -1406,6 +1409,81 @@ public class FilterNode { //FilterNode 不能实现Serializable接口 否则
return "LOWER(" + field + ") " + express.value() + ' ' + formatToString(valstr2);
}
};
case LENGTH_EQUAL:
final int intval = ((Number) val).intValue();
return new Predicate<T>() {
@Override
public boolean test(T t) {
Object rs = attr.get(t);
return (rs == null && 0 == intval) || (rs != null && rs.toString().length() == intval);
}
@Override
public String toString() {
return "LENGTH(" + field + ") " + express.value() + ' ' + intval;
}
};
case LENGTH_LESSTHAN:
final int intval2 = ((Number) val).intValue();
return new Predicate<T>() {
@Override
public boolean test(T t) {
Object rs = attr.get(t);
return (rs == null && 0 < intval2) || (rs != null && rs.toString().length() < intval2);
}
@Override
public String toString() {
return "LENGTH(" + field + ") " + express.value() + ' ' + intval2;
}
};
case LENGTH_LESSTHANOREQUALTO:
final int intval3 = ((Number) val).intValue();
return new Predicate<T>() {
@Override
public boolean test(T t) {
Object rs = attr.get(t);
return (rs == null && 0 <= intval3) || (rs != null && rs.toString().length() <= intval3);
}
@Override
public String toString() {
return "LENGTH(" + field + ") " + express.value() + ' ' + intval3;
}
};
case LENGTH_GREATERTHAN:
final int intval4 = ((Number) val).intValue();
return new Predicate<T>() {
@Override
public boolean test(T t) {
Object rs = attr.get(t);
return (rs == null && 0 > intval4) || (rs != null && rs.toString().length() > intval4);
}
@Override
public String toString() {
return "LENGTH(" + field + ") " + express.value() + ' ' + intval4;
}
};
case LENGTH_GREATERTHANOREQUALTO:
final int intval5 = ((Number) val).intValue();
return new Predicate<T>() {
@Override
public boolean test(T t) {
Object rs = attr.get(t);
return (rs == null && 0 >= intval5) || (rs != null && rs.toString().length() >= intval5);
}
@Override
public String toString() {
return "LENGTH(" + field + ") " + express.value() + ' ' + intval5;
}
};
case CONTAIN:
return fk ? new Predicate<T>() {
@@ -1811,7 +1889,7 @@ public class FilterNode { //FilterNode 不能实现Serializable接口 否则
return sb;
}
protected static CharSequence formatToString(Object value) {
private static CharSequence formatToString(Object value) {
CharSequence sb = formatToString(null, value);
return sb == null ? null : sb.toString();
}

View File

@@ -363,6 +363,9 @@ public final class FilterNodeBean<T extends FilterBean> implements Comparable<Fi
sb.append(col).append(' ').append(express.value());
} else if (express == ISEMPTY || express == ISNOTEMPTY) {
sb.append(col).append(' ').append(express.value()).append(" ''");
} else if (express == LENGTH_EQUAL || express == LENGTH_LESSTHAN || express == LENGTH_LESSTHANOREQUALTO
|| express == LENGTH_GREATERTHAN || express == LENGTH_GREATERTHANOREQUALTO) {
sb.append("LENGTH(").append(col).append(") ").append(express.value()).append(" ?");
} else {
boolean lower = (express == IGNORECASEEQUAL || express == IGNORECASENOTEQUAL || express == IGNORECASELIKE
|| express == IGNORECASENOTLIKE || express == IGNORECASECONTAIN || express == IGNORECASENOTCONTAIN);

View File

@@ -74,9 +74,9 @@ public abstract class PoolSource<DBChannel> {
this.username = prop.getProperty(JDBC_USER, "");
this.password = prop.getProperty(JDBC_PWD, "");
this.encoding = prop.getProperty(JDBC_ENCODING, "");
this.connectTimeoutSeconds = Integer.decode(prop.getProperty(JDBC_CONNECTTIMEOUT_SECONDS, "3"));
this.readTimeoutSeconds = Integer.decode(prop.getProperty(JDBC_READTIMEOUT_SECONDS, "3"));
this.writeTimeoutSeconds = Integer.decode(prop.getProperty(JDBC_WRITETIMEOUT_SECONDS, "3"));
this.connectTimeoutSeconds = Integer.decode(prop.getProperty(JDBC_CONNECTTIMEOUT_SECONDS, "6"));
this.readTimeoutSeconds = Integer.decode(prop.getProperty(JDBC_READTIMEOUT_SECONDS, "6"));
this.writeTimeoutSeconds = Integer.decode(prop.getProperty(JDBC_WRITETIMEOUT_SECONDS, "6"));
this.maxconns = Math.max(8, Integer.decode(prop.getProperty(JDBC_CONNECTIONS_LIMIT, "" + Runtime.getRuntime().availableProcessors() * 100)));
this.semaphore = semaphore == null ? new Semaphore(this.maxconns) : semaphore;
String dbtype0 = "";

View File

@@ -38,6 +38,8 @@ public abstract class PoolTcpSource extends PoolSource<AsyncConnection> {
//TCP Channel组
protected AsynchronousChannelGroup group;
protected ScheduledThreadPoolExecutor scheduler;
protected final ArrayBlockingQueue<AsyncConnection> connQueue;
public PoolTcpSource(String rwtype, ArrayBlockingQueue queue, Semaphore semaphore, Properties prop, Logger logger, ObjectPool<ByteBuffer> bufferPool, ThreadPoolExecutor executor) {
@@ -50,6 +52,42 @@ public abstract class PoolTcpSource extends PoolSource<AsyncConnection> {
throw new RuntimeException(e);
}
this.connQueue = queue == null ? new ArrayBlockingQueue<>(this.maxconns) : queue;
this.scheduler = new ScheduledThreadPoolExecutor(1, (Runnable r) -> {
final Thread t = new Thread(r, "PoolSource-Scheduled-Thread");
t.setDaemon(true);
return t;
});
this.scheduler.scheduleAtFixedRate(() -> {
runPingTask();
}, 60, 30, TimeUnit.SECONDS);
}
private void runPingTask() {
try {
if (connQueue.isEmpty()) return;
long time = System.currentTimeMillis() - 30 * 1000;
AsyncConnection first = connQueue.peek();
if (first == null || first.getLastReadTime() >= time || first.getLastWriteTime() >= time) return;
pollAsync().whenComplete((conn, e) -> {
if (e != null) return;
if (conn.getLastReadTime() >= time || conn.getLastWriteTime() >= time) {//半分钟内已经用过
offerConnection(conn);
return;
}
CompletableFuture<AsyncConnection> future = sendPingCommand(conn);
if (future == null) { //不支持ping
offerConnection(conn);
return;
}
future.whenComplete((conn2, e2) -> {
if (e2 != null) return;
offerConnection(conn2);
runPingTask();
});
});
} catch (Exception e) {
logger.log(Level.FINEST, "PoolSource task ping failed", e);
}
}
@Override
@@ -216,6 +254,7 @@ public abstract class PoolTcpSource extends PoolSource<AsyncConnection> {
@Override
public void close() {
this.scheduler.shutdownNow();
connQueue.stream().forEach(x -> {
CompletableFuture<AsyncConnection> future = null;
try {
@@ -232,5 +271,7 @@ public abstract class PoolTcpSource extends PoolSource<AsyncConnection> {
});
}
protected abstract CompletableFuture<AsyncConnection> sendPingCommand(final AsyncConnection conn);
protected abstract CompletableFuture<AsyncConnection> sendCloseCommand(final AsyncConnection conn);
}

View File

@@ -86,7 +86,7 @@ public interface Range<E extends Comparable> extends java.io.Serializable, Predi
@Override
public boolean test(Byte t) {
if (max < min) return t >= min;
if (max < min && max <= 0) return t >= min;
return t >= min && t <= max;
}
@@ -131,7 +131,7 @@ public interface Range<E extends Comparable> extends java.io.Serializable, Predi
@Override
public boolean test(Short t) {
if (max < min) return t >= min;
if (max < min && max <= 0) return t >= min;
return t >= min && t <= max;
}
@@ -175,7 +175,7 @@ public interface Range<E extends Comparable> extends java.io.Serializable, Predi
@Override
public boolean test(Integer t) {
if (max < min) return t >= min;
if (max < min && max <= 0) return t >= min;
return t >= min && t <= max;
}
@@ -229,7 +229,7 @@ public interface Range<E extends Comparable> extends java.io.Serializable, Predi
@Override
public boolean test(Long t) {
if (max < min) return t >= min;
if (max < min && max <= 0) return t >= min;
return t >= min && t <= max;
}
@@ -273,7 +273,7 @@ public interface Range<E extends Comparable> extends java.io.Serializable, Predi
@Override
public boolean test(Float t) {
if (max < min) return t >= min;
if (max < min && max <= 0) return t >= min;
return t >= min && t <= max;
}
@@ -317,7 +317,7 @@ public interface Range<E extends Comparable> extends java.io.Serializable, Predi
@Override
public boolean test(Double t) {
if (max < min) return t >= min;
if (max < min && max <= 0) return t >= min;
return t >= min && t <= max;
}

View File

@@ -37,6 +37,8 @@ import static org.redkale.asm.Opcodes.*;
*
* private java.lang.reflect.Type _gtype = String.class;
*
* private java.lang.Object _attach;
*
* &#64;Override
* public String field() {
* return "name";
@@ -63,6 +65,11 @@ import static org.redkale.asm.Opcodes.*;
* }
*
* &#64;Override
* public Object attach() {
* return _attach;
* }
*
* &#64;Override
* public Class declaringClass() {
* return Record.class;
* }
@@ -253,6 +260,21 @@ public interface Attribute<T, F> {
return create(clazz, field.getName(), (Class) null, field, (java.lang.reflect.Method) null, (java.lang.reflect.Method) null, null);
}
/**
* 根据一个Class和Field生成 Attribute 对象。
*
* @param <T> 依附类的类型
* @param <F> 字段类型
* @param subclass 指定依附的子类
* @param clazz 指定依附的类
* @param field 字段,如果该字段不存在则抛异常
*
* @return Attribute对象
*/
public static <T, F> Attribute<T, F> create(Class<T> subclass, Class<T> clazz, final java.lang.reflect.Field field) {
return create(subclass, clazz, field.getName(), (Class) null, field, (java.lang.reflect.Method) null, (java.lang.reflect.Method) null, null);
}
/**
* 根据一个Class和Field生成 Attribute 对象。
*
@@ -268,6 +290,22 @@ public interface Attribute<T, F> {
return create(clazz, field.getName(), (Class) null, field, (java.lang.reflect.Method) null, (java.lang.reflect.Method) null, attach);
}
/**
* 根据一个Class和Field生成 Attribute 对象。
*
* @param <T> 依附类的类型
* @param <F> 字段类型
* @param subclass 指定依附的子类
* @param clazz 指定依附的类
* @param field 字段,如果该字段不存在则抛异常
* @param attach 附加对象
*
* @return Attribute对象
*/
public static <T, F> Attribute<T, F> create(Class<T> subclass, Class<T> clazz, final java.lang.reflect.Field field, Object attach) {
return create(subclass, clazz, field.getName(), (Class) null, field, (java.lang.reflect.Method) null, (java.lang.reflect.Method) null, attach);
}
/**
* 根据一个Class、field别名和Field生成 Attribute 对象。
*
@@ -578,6 +616,28 @@ public interface Attribute<T, F> {
*/
@SuppressWarnings("unchecked")
public static <T, F> Attribute<T, F> create(final Class<T> clazz, String fieldalias, final Class<F> fieldtype, final java.lang.reflect.Field field, java.lang.reflect.Method getter, java.lang.reflect.Method setter, Object attach) {
return create(null, clazz, fieldalias, fieldtype, field, getter, setter, attach);
}
/**
* 根据Class、字段别名、字段类型、Field、getter和setter方法生成 Attribute 对象。 fieldalias/fieldtype、Field、tgetter、setter不能同时为null.
*
* @param <T> 依附类的类型
* @param <F> 字段类型
* @param subclass 指定依附的子类
* @param clazz 指定依附的类
* @param fieldalias 字段别名
* @param fieldtype 字段类型
* @param field 字段
* @param getter getter方法
* @param setter setter方法
* @param attach 附加对象
*
* @return Attribute对象
*/
@SuppressWarnings("unchecked")
public static <T, F> Attribute<T, F> create(Class<T> subclass, final Class<T> clazz, String fieldalias, final Class<F> fieldtype, final java.lang.reflect.Field field, java.lang.reflect.Method getter, java.lang.reflect.Method setter, Object attach) {
if (subclass == null) subclass = clazz;
if (fieldalias != null && fieldalias.isEmpty()) fieldalias = null;
int mod = field == null ? java.lang.reflect.Modifier.STATIC : field.getModifiers();
if (field != null && !java.lang.reflect.Modifier.isStatic(mod) && !java.lang.reflect.Modifier.isPublic(mod)) {
@@ -627,6 +687,7 @@ public interface Attribute<T, F> {
final String fieldname = fieldalias;
Class column = fieldtype;
java.lang.reflect.Type generictype = fieldtype;
if (tfield != null) { // public tfield
column = tfield.getType();
generictype = tfield.getGenericType();
@@ -638,20 +699,28 @@ public interface Attribute<T, F> {
generictype = tsetter.getGenericParameterTypes()[0];
} else if (fieldtype == null) {
throw new RuntimeException("[" + clazz + "]have no public field or setter or getter");
} else if (column == null) {
throw new RuntimeException("[" + clazz + "]have no field type");
}
boolean checkCast = false;
if (generictype instanceof java.lang.reflect.TypeVariable) {
checkCast = true;
generictype = TypeToken.getGenericType(generictype, subclass);
if (generictype instanceof Class) column = (Class) generictype;
}
final Class pcolumn = column;
if (column.isPrimitive()) column = java.lang.reflect.Array.get(java.lang.reflect.Array.newInstance(column, 1), 0).getClass();
final String supDynName = Attribute.class.getName().replace('.', '/');
final String interName = clazz.getName().replace('.', '/');
final String interName = subclass.getName().replace('.', '/');
final String columnName = column.getName().replace('.', '/');
final String interDesc = Type.getDescriptor(clazz);
final String interDesc = Type.getDescriptor(subclass);
final String columnDesc = Type.getDescriptor(column);
ClassLoader loader = Thread.currentThread().getContextClassLoader();
String newDynName = supDynName + "_Dyn_" + clazz.getSimpleName() + "_"
String newDynName = supDynName + "_Dyn_" + subclass.getSimpleName() + "_"
+ fieldname.substring(fieldname.indexOf('.') + 1) + "_" + pcolumn.getSimpleName().replace("[]", "Array");
if (String.class.getClassLoader() != clazz.getClassLoader()) {
loader = clazz.getClassLoader();
if (String.class.getClassLoader() != subclass.getClassLoader()) {
loader = subclass.getClassLoader();
newDynName = interName + "_Dyn" + Attribute.class.getSimpleName() + "_"
+ fieldname.substring(fieldname.indexOf('.') + 1) + "_" + pcolumn.getSimpleName().replace("[]", "Array");
}
@@ -735,7 +804,7 @@ public interface Attribute<T, F> {
}
{ //declaringClass 方法
mv = cw.visitMethod(ACC_PUBLIC, "declaringClass", "()Ljava/lang/Class;", null, null);
mv.visitLdcInsn(Type.getType(clazz));
mv.visitLdcInsn(Type.getType(subclass));
mv.visitInsn(ARETURN);
mv.visitMaxs(1, 1);
mv.visitEnd();
@@ -752,6 +821,8 @@ public interface Attribute<T, F> {
if (pcolumn != column) {
mv.visitMethodInsn(INVOKESTATIC, columnName, "valueOf", "(" + Type.getDescriptor(pcolumn) + ")" + columnDesc, false);
m = 2;
} else {
if (checkCast) mv.visitTypeInsn(CHECKCAST, columnName);
}
}
} else {
@@ -760,6 +831,8 @@ public interface Attribute<T, F> {
if (pcolumn != column) {
mv.visitMethodInsn(INVOKESTATIC, columnName, "valueOf", "(" + Type.getDescriptor(pcolumn) + ")" + columnDesc, false);
m = 2;
} else {
if (checkCast) mv.visitTypeInsn(CHECKCAST, columnName);
}
}
mv.visitInsn(ARETURN);

View File

@@ -5,9 +5,9 @@
*/
package org.redkale.util;
import java.nio.*;
import java.nio.ByteBuffer;
import java.nio.charset.*;
import java.util.*;
import java.util.Arrays;
/**
* 简单的byte[]操作类。
@@ -142,6 +142,21 @@ public final class ByteArray {
return Arrays.copyOf(content, count);
}
/**
* 获取byte[]
*
* @param offset 偏移位
* @param length 长度
*
* @return byte[]
*/
public byte[] getBytes(int offset, int length) {
if (length < 1) return new byte[0];
byte[] bs = new byte[length];
System.arraycopy(this.content, offset, bs, 0, length);
return bs;
}
/**
* 获取byte[]并清空
*
@@ -323,10 +338,21 @@ public final class ByteArray {
* @return 字符串
*/
public String toString(final int offset, int len, final Charset charset) {
if (charset == null) return new String(Utility.decodeUTF8(content, offset, len));
if (charset == null) return new String(content, offset, len, StandardCharsets.UTF_8);
return new String(content, offset, len, charset);
}
/**
* 将指定的起始位置和长度按指定字符集并转义后转成字符串
*
* @param charset 字符集
*
* @return 字符串
*/
public String toDecodeString(final Charset charset) {
return toDecodeString(0, count, charset);
}
/**
* 将指定的起始位置和长度按指定字符集并转义后转成字符串
*
@@ -367,7 +393,7 @@ public final class ByteArray {
start = 0;
len = index;
}
if (charset == null) return new String(Utility.decodeUTF8(bs, start, len));
if (charset == null) return new String(bs, start, len, StandardCharsets.UTF_8);
return new String(bs, start, len, charset);
}

View File

@@ -51,6 +51,14 @@ public class ByteBufferReader {
this.bigEndian = this.currBuffer.order() == ByteOrder.BIG_ENDIAN;
}
public ByteBufferReader append(ByteBuffer... buffs) {
for (ByteBuffer buf : buffs) {
Objects.requireNonNull(buf);
}
this.buffers = Utility.append(this.buffers, buffs);
return this;
}
public static ByteBufferReader create(ByteBuffer buffer) {
return new ByteBufferReader(buffer);
}
@@ -80,7 +88,30 @@ public class ByteBufferReader {
}
public boolean hasRemaining() {
return this.currBuffer.hasRemaining();
boolean v = this.currBuffer.hasRemaining();
if (v) return v;
if (this.currIndex == this.buffers.length - 1) return false;
for (int i = this.currIndex + 1; i < this.buffers.length; i++) {
if (this.buffers[i].hasRemaining()) return true;
}
return false;
}
public int remaining() {
int v = this.currBuffer.remaining();
for (int i = this.currIndex + 1; i < this.buffers.length; i++) {
v += this.buffers[i].remaining();
}
return v;
}
//提前预读一个字节
public byte preget() {
ByteBuffer buf = this.currBuffer;
if (!buf.hasRemaining()) {
buf = this.buffers[this.currIndex + 1];
}
return buf.get(buf.position());
}
public byte get() {

View File

@@ -85,6 +85,13 @@ public interface Creator<T> {
creatorCacheMap.put(Stream.class, (params) -> new ArrayList<>().stream());
creatorCacheMap.put(ConcurrentHashMap.class, (params) -> new ConcurrentHashMap<>());
creatorCacheMap.put(CompletableFuture.class, (params) -> new CompletableFuture<>());
creatorCacheMap.put(Map.Entry.class, new Creator<Map.Entry>() {
@Override
@ConstructorParameters({"key", "value"})
public Map.Entry create(Object... params) {
return new AbstractMap.SimpleEntry(params[0], params[1]);
}
});
creatorCacheMap.put(AbstractMap.SimpleEntry.class, new Creator<AbstractMap.SimpleEntry>() {
@Override
@ConstructorParameters({"key", "value"})
@@ -232,6 +239,8 @@ public interface Creator<T> {
clazz = (Class<T>) ConcurrentHashMap.class;
} else if (Collection.class.isAssignableFrom(clazz) && clazz.isAssignableFrom(ArrayList.class)) {
clazz = (Class<T>) ArrayList.class;
} else if (Map.Entry.class.isAssignableFrom(clazz) && (Modifier.isInterface(clazz.getModifiers()) || Modifier.isAbstract(clazz.getModifiers()) || !Modifier.isPublic(clazz.getModifiers()))) {
clazz = (Class<T>) AbstractMap.SimpleEntry.class;
}
Creator creator = CreatorInner.creatorCacheMap.get(clazz);
if (creator != null) return creator;

Some files were not shown because too many files have changed in this diff Show More