This commit is contained in:
redkale
2023-12-30 08:53:49 +08:00
parent fe6a5dd91c
commit e6f22a1f18
11 changed files with 84 additions and 35 deletions

View File

@@ -14,13 +14,13 @@
## 开发文档
* [快速入门](docs/quick-start.md)
* [Service组件](docs/service.md)
* [数据源组件](docs/source.md)
* [对象序列化](docs/convert.md)
* [Json序列化](docs/convert-json.md)
* [DB数据源组件](docs/datasource.md)
* [Cache数据源组件](docs/cachesource.md)
* [WebSocket](docs/websocket.md)
* [native-image](docs/native-image.md)
* [方法缓存```Cached```](docs/cached.md)
* [定时任务```Scheduled```](docs/scheduled.md)
* [方法缓存Cached](docs/cached.md)
* [定时任务Scheduled](docs/scheduled.md)
* [基本配置](docs/config.md)
* [FAQ](docs/faq.md)

View File

@@ -1,4 +1,32 @@
# DB数据源
   DataSource是数据层操作的抽象接口
## 注解说明
|注解类名 | 功能描述|
| --- | --- |
|@Column |标记字段与JPA用法一致 |
|@Entity |标记实体类与JPA用法一致 |
|@Id |标记主键字段与JPA用法一致 |
|@Table |标记表的别名与JPA用法一致 |
|@Transient |标记是否为表对应的字段与JPA用法一致 |
|@VirtualEntity |用于非数据库表对应的Entity类且仅用于开启缓存模式的DataSource |
|@DistributeTable |标记表进行分表分库存储, 与DistributeTableStrategy接口结合使用 |
|@FilterColumn |用于FilterBean过滤类的字段设置 |
|@FilterJoinColumn |用于FilterBean过滤类的关联表字段设置 |
|@FilterGroup | 用于FilterBean过滤类的过滤条件分组设置 |
## 操作方法
|系列方法 | 功能描述|
| --- | --- |
|insert |插入实体 |
|delete |删除实体 |
|update |更新实体 |
|updateColumn |更新数据的部分字段 |
|find |查找单个对象 |
|queryList |查询对象的List集合 |
|querySheet |查询对象的Sheet页式集合 |
|getNumberXXX |统计查询,用于查询字段的总和、最大值、平均值等数据 |
|queryColumnXXX |单个字段数据查询和字段的统计查询 |
|nativeXXX |直接运行SQL语句用于复杂的关联查询与更新(仅限DataSqlSource) |
## 配置数据源
```properties
@@ -37,6 +65,10 @@ public class Account {
  新增实体对象:
```java
@Resource(name = "platf")
private DataSource source;
public void insertTest() {
//新增单个
Account account = new Account();
account.setAccountid("account1");
@@ -54,6 +86,7 @@ public class Account {
a2.setAccountName("Hello2");
a2.setCreateTime(System.currentTimeMillis());
source.insertAsync(a1, a2);
}
```
  修改实体对象:

View File

@@ -189,7 +189,7 @@ public interface DataSqlSource extends DataSource {
});
}
default Map<String, String> nativeQueryStrStrMap(String sql) {
default Map<String, String> nativeQueryToStrStrMap(String sql) {
return nativeQueryMap(String.class, String.class, sql);
}
@@ -197,11 +197,11 @@ public interface DataSqlSource extends DataSource {
return nativeQueryMapAsync(String.class, String.class, sql);
}
default Map<Integer, String> nativeQueryIntStrMap(String sql) {
default Map<Integer, String> nativeQueryToIntStrMap(String sql) {
return nativeQueryMap(Integer.class, String.class, sql);
}
default CompletableFuture<Map<Integer, String>> nativeQueryIntStrMapAsync(String sql) {
default CompletableFuture<Map<Integer, String>> nativeQueryToIntStrMapAsync(String sql) {
return nativeQueryMapAsync(Integer.class, String.class, sql);
}
@@ -292,19 +292,19 @@ public interface DataSqlSource extends DataSource {
}, params);
}
default Map<String, String> nativeQueryStrStrMap(String sql, Map<String, Object> params) {
default Map<String, String> nativeQueryToStrStrMap(String sql, Map<String, Object> params) {
return nativeQueryMap(String.class, String.class, sql, params);
}
default CompletableFuture<Map<String, String>> nativeQueryStrStrMapAsync(String sql, Map<String, Object> params) {
default CompletableFuture<Map<String, String>> nativeQueryToStrStrMapAsync(String sql, Map<String, Object> params) {
return nativeQueryMapAsync(String.class, String.class, sql, params);
}
default Map<Integer, String> nativeQueryIntStrMap(String sql, Map<String, Object> params) {
default Map<Integer, String> nativeQueryToIntStrMap(String sql, Map<String, Object> params) {
return nativeQueryMap(Integer.class, String.class, sql, params);
}
default CompletableFuture<Map<Integer, String>> nativeQueryIntStrMapAsync(String sql, Map<String, Object> params) {
default CompletableFuture<Map<Integer, String>> nativeQueryToIntStrMapAsync(String sql, Map<String, Object> params) {
return nativeQueryMapAsync(Integer.class, String.class, sql, params);
}
@@ -349,19 +349,19 @@ public interface DataSqlSource extends DataSource {
return nativeQueryMapAsync(keyType, valType, sql, (Map<String, Object>) Copier.copyToMap(bean, Copier.OPTION_SKIP_NULL_VALUE));
}
default Map<String, String> nativeQueryStrStrMap(String sql, Serializable bean) {
default Map<String, String> nativeQueryToStrStrMap(String sql, Serializable bean) {
return nativeQueryMap(String.class, String.class, sql, (Map<String, Object>) Copier.copyToMap(bean, Copier.OPTION_SKIP_NULL_VALUE));
}
default CompletableFuture<Map<String, String>> nativeQueryStrStrMapAsync(String sql, Serializable bean) {
default CompletableFuture<Map<String, String>> nativeQueryToStrStrMapAsync(String sql, Serializable bean) {
return nativeQueryMapAsync(String.class, String.class, sql, (Map<String, Object>) Copier.copyToMap(bean, Copier.OPTION_SKIP_NULL_VALUE));
}
default Map<Integer, String> nativeQueryIntStrMap(String sql, Serializable bean) {
default Map<Integer, String> nativeQueryToIntStrMap(String sql, Serializable bean) {
return nativeQueryMap(Integer.class, String.class, sql, (Map<String, Object>) Copier.copyToMap(bean, Copier.OPTION_SKIP_NULL_VALUE));
}
default CompletableFuture<Map<Integer, String>> nativeQueryIntStrMapAsync(String sql, Serializable bean) {
default CompletableFuture<Map<Integer, String>> nativeQueryToIntStrMapAsync(String sql, Serializable bean) {
return nativeQueryMapAsync(Integer.class, String.class, sql, (Map<String, Object>) Copier.copyToMap(bean, Copier.OPTION_SKIP_NULL_VALUE));
}

View File

@@ -5,9 +5,9 @@
*/
package org.redkale.source;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
import java.lang.annotation.*;
import static java.lang.annotation.ElementType.*;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
/**
* Entity分库分表的注解需要结合DistributeTableStrategy使用 <br>
@@ -16,6 +16,8 @@ import static java.lang.annotation.ElementType.*;
* <p>
* 详情见: https://redkale.org
*
* @see org.redkale.source.DistributeTableStrategy
*
* @author zhangjx
*/
@Target({TYPE})

View File

@@ -15,6 +15,8 @@ import java.io.Serializable;
* <p>
* 详情见: https://redkale.org
*
* @see org.redkale.source.DistributeTable
*
* @author zhangjx
* @param <T> Entity类型
*/

View File

@@ -15,6 +15,10 @@ import org.redkale.annotation.Bean;
* <p>
* 详情见: https://redkale.org
*
* @see org.redkale.source.FilterColumn
* @see org.redkale.source.FilterJoinColumn
* @see org.redkale.source.FilterGroup
*
* @author zhangjx
*/
@Bean

View File

@@ -15,6 +15,8 @@ import static java.lang.annotation.RetentionPolicy.RUNTIME;
* <p>
* 详情见: https://redkale.org
*
* @see org.redkale.source.FilterBean
*
* @author zhangjx
*/
@Inherited

View File

@@ -5,9 +5,9 @@
*/
package org.redkale.source;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
import static java.lang.annotation.ElementType.FIELD;
import java.lang.annotation.*;
import static java.lang.annotation.ElementType.FIELD;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
/**
* 默认情况下FilterBean下的过滤字段之间是AND关系。 <br>
@@ -57,6 +57,9 @@ import java.lang.annotation.*;
* <p>
* 详情见: https://redkale.org
*
* @see org.redkale.source.FilterBean
* @see org.redkale.source.FilterNode
*
* @author zhangjx
*/
@Inherited

View File

@@ -16,6 +16,9 @@ import static java.lang.annotation.RetentionPolicy.RUNTIME;
* <p>
* 详情见: https://redkale.org
*
* @see org.redkale.source.FilterBean
* @see org.redkale.source.FilterNode
*
* @author zhangjx
*/
@Inherited

View File

@@ -20,7 +20,7 @@ import org.redkale.util.LambdaSupplier;
public final class FilterNodes {
private FilterNodes() {
//do nothind
//do nothing
}
public static FilterNode create(String column, Serializable value) {