diff --git a/backend/db/core_dataset_group.sql b/backend/db/core_dataset_group.sql new file mode 100644 index 0000000..700a607 --- /dev/null +++ b/backend/db/core_dataset_group.sql @@ -0,0 +1,224 @@ +/* + Navicat Premium Dump SQL + + Source Server : 华为云-mysql数据库 + Source Server Type : MySQL + Source Server Version : 80403 (8.4.3) + Source Host : 121.37.111.42:3306 + Source Schema : dataease + + Target Server Type : MySQL + Target Server Version : 80403 (8.4.3) + File Encoding : 65001 + + Date: 30/05/2025 14:22:13 +*/ + +SET NAMES utf8mb4; +SET FOREIGN_KEY_CHECKS = 0; + +-- ---------------------------- +-- Table structure for core_dataset_group +-- ---------------------------- +DROP TABLE IF EXISTS `core_dataset_group`; +CREATE TABLE `core_dataset_group` ( + `id` bigint NOT NULL COMMENT 'ID', + `app_id` varchar(40) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NULL DEFAULT NULL COMMENT '应用ID 关联应用系统', + `name` varchar(128) CHARACTER SET utf16 COLLATE utf16_general_ci NULL DEFAULT NULL COMMENT '名称', + `pid` bigint NULL DEFAULT NULL COMMENT '父级ID', + `level` int NULL DEFAULT 0 COMMENT '当前分组处于第几级', + `node_type` varchar(50) CHARACTER SET utf16 COLLATE utf16_general_ci NOT NULL COMMENT 'node类型:folder or dataset', + `type` varchar(50) CHARACTER SET utf16 COLLATE utf16_general_ci NULL DEFAULT NULL COMMENT 'sql,union', + `mode` int NULL DEFAULT 0 COMMENT '连接模式:0-直连,1-同步(包括excel、api等数据存在de中的表)', + `info` longtext CHARACTER SET utf16 COLLATE utf16_general_ci NULL COMMENT '关联关系树', + `create_by` varchar(50) CHARACTER SET utf16 COLLATE utf16_general_ci NULL DEFAULT NULL COMMENT '创建人ID', + `create_time` bigint NULL DEFAULT NULL COMMENT '创建时间', + `qrtz_instance` varchar(1024) CHARACTER SET utf16 COLLATE utf16_general_ci NULL DEFAULT NULL COMMENT 'Quartz 实例 ID', + `sync_status` varchar(45) CHARACTER SET utf16 COLLATE utf16_general_ci NULL DEFAULT NULL COMMENT '同步状态', + `update_by` varchar(50) CHARACTER SET utf16 COLLATE utf16_general_ci NULL DEFAULT NULL COMMENT '更新人ID', + `last_update_time` bigint NULL DEFAULT 0 COMMENT '最后同步时间', + `union_sql` longtext CHARACTER SET utf16 COLLATE utf16_general_ci NULL COMMENT '关联sql', + PRIMARY KEY (`id`) USING BTREE +) ENGINE = InnoDB CHARACTER SET = utf16 COLLATE = utf16_general_ci COMMENT = '数据集分组表' ROW_FORMAT = Dynamic; + +-- ---------------------------- +-- Table structure for core_dataset_table +-- ---------------------------- +DROP TABLE IF EXISTS `core_dataset_table`; +CREATE TABLE `core_dataset_table` ( + `id` bigint NOT NULL COMMENT 'ID', + `app_id` varchar(40) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NULL DEFAULT NULL COMMENT '应用ID 关联应用系统', + `name` varchar(128) CHARACTER SET utf16 COLLATE utf16_general_ci NULL DEFAULT NULL COMMENT '名称', + `table_name` varchar(128) CHARACTER SET utf16 COLLATE utf16_general_ci NULL DEFAULT NULL COMMENT '物理表名', + `datasource_id` bigint NULL DEFAULT NULL COMMENT '数据源ID', + `dataset_group_id` bigint NOT NULL COMMENT '数据集ID', + `type` varchar(50) CHARACTER SET utf16 COLLATE utf16_general_ci NULL DEFAULT NULL COMMENT 'db,sql,union,excel,api', + `info` longtext CHARACTER SET utf16 COLLATE utf16_general_ci NULL COMMENT '表原始信息,表名,sql等', + `sql_variable_details` longtext CHARACTER SET utf16 COLLATE utf16_general_ci NULL COMMENT 'SQL参数', + PRIMARY KEY (`id`) USING BTREE +) ENGINE = InnoDB CHARACTER SET = utf16 COLLATE = utf16_general_ci COMMENT = 'table数据集' ROW_FORMAT = Dynamic; + +-- ---------------------------- +-- Table structure for core_dataset_table_field +-- ---------------------------- +DROP TABLE IF EXISTS `core_dataset_table_field`; +CREATE TABLE `core_dataset_table_field` ( + `id` bigint NOT NULL COMMENT 'ID', + `datasource_id` bigint NULL DEFAULT NULL COMMENT '数据源ID', + `dataset_table_id` bigint NULL DEFAULT NULL COMMENT '数据表ID', + `dataset_group_id` bigint NULL DEFAULT NULL COMMENT '数据集ID', + `chart_id` bigint NULL DEFAULT NULL COMMENT '图表ID', + `origin_name` longtext CHARACTER SET utf16 COLLATE utf16_general_ci NOT NULL COMMENT '原始字段名', + `name` longtext CHARACTER SET utf16 COLLATE utf16_general_ci NULL COMMENT '字段名用于展示', + `description` longtext CHARACTER SET utf16 COLLATE utf16_general_ci NULL COMMENT '描述', + `dataease_name` varchar(255) CHARACTER SET utf16 COLLATE utf16_general_ci NULL DEFAULT NULL COMMENT 'de字段名用作唯一标识', + `field_short_name` varchar(255) CHARACTER SET utf16 COLLATE utf16_general_ci NULL DEFAULT NULL COMMENT 'de字段别名', + `group_list` longtext CHARACTER SET utf16 COLLATE utf16_general_ci NULL COMMENT '分组设置', + `other_group` longtext CHARACTER SET utf16 COLLATE utf16_general_ci NULL COMMENT '未分组的值', + `group_type` varchar(50) CHARACTER SET utf16 COLLATE utf16_general_ci NULL DEFAULT NULL COMMENT '维度/指标标识 d:维度,q:指标', + `type` varchar(255) CHARACTER SET utf16 COLLATE utf16_general_ci NOT NULL COMMENT '原始字段类型', + `size` int NULL DEFAULT NULL COMMENT '字段长度(允许为空,默认0)', + `de_type` int NOT NULL COMMENT 'dataease字段类型:0-文本,1-时间,2-整型数值,3-浮点数值,4-布尔,5-地理位置,6-二进制,7-URL', + `de_extract_type` int NOT NULL COMMENT 'de记录的原始类型', + `ext_field` int NULL DEFAULT NULL COMMENT '是否扩展字段 0原始 1复制 2计算字段...', + `checked` tinyint(1) NULL DEFAULT 1 COMMENT '是否选中', + `column_index` int NULL DEFAULT NULL COMMENT '列位置', + `last_sync_time` bigint NULL DEFAULT NULL COMMENT '同步时间', + `accuracy` int NULL DEFAULT 0 COMMENT '精度', + `date_format` varchar(255) CHARACTER SET utf16 COLLATE utf16_general_ci NULL DEFAULT NULL COMMENT '时间字段类型', + `date_format_type` varchar(255) CHARACTER SET utf16 COLLATE utf16_general_ci NULL DEFAULT NULL COMMENT '时间格式类型', + `params` text CHARACTER SET utf16 COLLATE utf16_general_ci NULL COMMENT '计算字段参数', + PRIMARY KEY (`id`) USING BTREE +) ENGINE = InnoDB CHARACTER SET = utf16 COLLATE = utf16_general_ci COMMENT = 'table数据集表字段' ROW_FORMAT = Dynamic; + +-- ---------------------------- +-- Table structure for core_dataset_table_sql_log +-- ---------------------------- +DROP TABLE IF EXISTS `core_dataset_table_sql_log`; +CREATE TABLE `core_dataset_table_sql_log` ( + `id` varchar(50) CHARACTER SET utf16 COLLATE utf16_general_ci NOT NULL DEFAULT '' COMMENT 'ID', + `table_id` varchar(50) CHARACTER SET utf16 COLLATE utf16_general_ci NOT NULL DEFAULT '' COMMENT '数据集SQL节点ID', + `start_time` bigint NULL DEFAULT NULL COMMENT '开始时间', + `end_time` bigint NULL DEFAULT NULL COMMENT '结束时间', + `spend` bigint NULL DEFAULT NULL COMMENT '耗时(毫秒)', + `sql` longtext CHARACTER SET utf16 COLLATE utf16_general_ci NOT NULL COMMENT '详细信息', + `status` varchar(45) CHARACTER SET utf16 COLLATE utf16_general_ci NULL DEFAULT NULL COMMENT '状态', + PRIMARY KEY (`id`) USING BTREE +) ENGINE = InnoDB CHARACTER SET = utf16 COLLATE = utf16_general_ci COMMENT = 'table数据集查询sql日志' ROW_FORMAT = Dynamic; + +-- ---------------------------- +-- Table structure for core_datasource +-- ---------------------------- +DROP TABLE IF EXISTS `core_datasource`; +CREATE TABLE `core_datasource` ( + `id` bigint UNSIGNED NOT NULL AUTO_INCREMENT COMMENT '主键', + `app_id` varchar(40) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NULL DEFAULT NULL COMMENT '应用ID 关联应用系统', + `name` varchar(255) CHARACTER SET utf16 COLLATE utf16_general_ci NOT NULL COMMENT '名称', + `description` varchar(255) CHARACTER SET utf16 COLLATE utf16_general_ci NULL DEFAULT NULL COMMENT '描述', + `type` varchar(50) CHARACTER SET utf16 COLLATE utf16_general_ci NOT NULL COMMENT '类型', + `pid` bigint NULL DEFAULT NULL COMMENT '父级ID', + `edit_type` varchar(50) CHARACTER SET utf16 COLLATE utf16_general_ci NULL DEFAULT NULL COMMENT '更新方式:0:替换;1:追加', + `configuration` longtext CHARACTER SET utf16 COLLATE utf16_general_ci NOT NULL COMMENT '详细信息', + `create_time` bigint NOT NULL COMMENT '创建时间', + `update_time` bigint NOT NULL COMMENT '更新时间', + `update_by` bigint NULL DEFAULT NULL COMMENT '变更人', + `create_by` varchar(50) CHARACTER SET utf16 COLLATE utf16_general_ci NULL DEFAULT NULL COMMENT '创建人ID', + `status` longtext CHARACTER SET utf16 COLLATE utf16_general_ci NULL COMMENT '状态', + `qrtz_instance` longtext CHARACTER SET utf16 COLLATE utf16_general_ci NULL COMMENT '状态', + `task_status` varchar(50) CHARACTER SET utf16 COLLATE utf16_general_ci NULL DEFAULT NULL COMMENT '任务状态', + `enable_data_fill` tinyint NULL DEFAULT 0 COMMENT '启用数据填报功能', + PRIMARY KEY (`id`) USING BTREE +) ENGINE = InnoDB AUTO_INCREMENT = 1915350839984336899 CHARACTER SET = utf16 COLLATE = utf16_general_ci COMMENT = '数据源表' ROW_FORMAT = Dynamic; + +-- ---------------------------- +-- Table structure for core_datasource_task +-- ---------------------------- +DROP TABLE IF EXISTS `core_datasource_task`; +CREATE TABLE `core_datasource_task` ( + `id` bigint UNSIGNED NOT NULL AUTO_INCREMENT COMMENT '主键', + `ds_id` bigint NOT NULL COMMENT '数据源ID', + `name` varchar(255) CHARACTER SET utf16 COLLATE utf16_general_ci NOT NULL COMMENT '任务名称', + `update_type` varchar(50) CHARACTER SET utf16 COLLATE utf16_general_ci NOT NULL COMMENT '更新方式', + `start_time` bigint NULL DEFAULT NULL COMMENT '开始时间', + `sync_rate` varchar(50) CHARACTER SET utf16 COLLATE utf16_general_ci NOT NULL COMMENT '执行频率:0 一次性 1 cron', + `cron` varchar(255) CHARACTER SET utf16 COLLATE utf16_general_ci NULL DEFAULT NULL COMMENT 'cron表达式', + `simple_cron_value` bigint NULL DEFAULT NULL COMMENT '简单重复间隔', + `simple_cron_type` varchar(50) CHARACTER SET utf16 COLLATE utf16_general_ci NULL DEFAULT NULL COMMENT '简单重复类型:分、时、天', + `end_limit` varchar(50) CHARACTER SET utf16 COLLATE utf16_general_ci NULL DEFAULT NULL COMMENT '结束限制 0 无限制 1 设定结束时间', + `end_time` bigint NULL DEFAULT NULL COMMENT '结束时间', + `create_time` bigint NULL DEFAULT NULL COMMENT '创建时间', + `last_exec_time` bigint NULL DEFAULT NULL COMMENT '上次执行时间', + `last_exec_status` varchar(50) CHARACTER SET utf16 COLLATE utf16_general_ci NULL DEFAULT NULL COMMENT '上次执行结果', + `extra_data` longtext CHARACTER SET utf16 COLLATE utf16_general_ci NULL COMMENT '额外数据', + `task_status` varchar(50) CHARACTER SET utf16 COLLATE utf16_general_ci NULL DEFAULT NULL COMMENT '任务状态', + PRIMARY KEY (`id`) USING BTREE +) ENGINE = InnoDB AUTO_INCREMENT = 1 CHARACTER SET = utf16 COLLATE = utf16_general_ci COMMENT = '数据源定时同步任务' ROW_FORMAT = Dynamic; + +-- ---------------------------- +-- Table structure for core_datasource_task_log +-- ---------------------------- +DROP TABLE IF EXISTS `core_datasource_task_log`; +CREATE TABLE `core_datasource_task_log` ( + `id` bigint UNSIGNED NOT NULL AUTO_INCREMENT COMMENT '主键', + `ds_id` bigint NOT NULL COMMENT '数据源ID', + `task_id` bigint NULL DEFAULT NULL COMMENT '任务ID', + `start_time` bigint NULL DEFAULT NULL COMMENT '开始时间', + `end_time` bigint NULL DEFAULT NULL COMMENT '结束时间', + `task_status` varchar(50) CHARACTER SET utf16 COLLATE utf16_general_ci NOT NULL COMMENT '执行状态', + `table_name` varchar(255) CHARACTER SET utf16 COLLATE utf16_general_ci NOT NULL COMMENT '表名', + `info` longtext CHARACTER SET utf16 COLLATE utf16_general_ci NULL COMMENT '错误信息', + `create_time` bigint NULL DEFAULT NULL COMMENT '创建时间', + `trigger_type` varchar(45) CHARACTER SET utf16 COLLATE utf16_general_ci NULL DEFAULT NULL COMMENT '更新频率类型', + PRIMARY KEY (`id`) USING BTREE, + INDEX `idx_dataset_table_task_log_ds_id`(`ds_id` ASC) USING BTREE, + INDEX `idx_dataset_table_task_log_task_id`(`task_id` ASC) USING BTREE, + INDEX `idx_dataset_table_task_log_A`(`ds_id` ASC, `table_name` ASC, `start_time` ASC) USING BTREE +) ENGINE = InnoDB AUTO_INCREMENT = 1125460897473630209 CHARACTER SET = utf16 COLLATE = utf16_general_ci COMMENT = '数据源定时同步任务执行日志' ROW_FORMAT = Dynamic; + +-- ---------------------------- +-- Table structure for core_de_engine +-- ---------------------------- +DROP TABLE IF EXISTS `core_de_engine`; +CREATE TABLE `core_de_engine` ( + `id` bigint UNSIGNED NOT NULL AUTO_INCREMENT COMMENT '主键', + `name` varchar(50) CHARACTER SET utf16 COLLATE utf16_general_ci NULL DEFAULT NULL COMMENT '名称', + `description` varchar(50) CHARACTER SET utf16 COLLATE utf16_general_ci NULL DEFAULT NULL COMMENT '描述', + `type` varchar(50) CHARACTER SET utf16 COLLATE utf16_general_ci NOT NULL COMMENT '类型', + `configuration` longtext CHARACTER SET utf16 COLLATE utf16_general_ci NOT NULL COMMENT '详细信息', + `create_time` bigint NULL DEFAULT NULL COMMENT 'Create timestamp', + `update_time` bigint NULL DEFAULT NULL COMMENT 'Update timestamp', + `create_by` varchar(50) CHARACTER SET utf16 COLLATE utf16_general_ci NULL DEFAULT NULL COMMENT '创建人ID', + `status` varchar(45) CHARACTER SET utf16 COLLATE utf16_general_ci NULL DEFAULT NULL COMMENT '状态', + PRIMARY KEY (`id`) USING BTREE +) ENGINE = InnoDB AUTO_INCREMENT = 2 CHARACTER SET = utf16 COLLATE = utf16_general_ci COMMENT = '数据引擎' ROW_FORMAT = Dynamic; + +-- ---------------------------- +-- Table structure for core_driver +-- ---------------------------- +DROP TABLE IF EXISTS `core_driver`; +CREATE TABLE `core_driver` ( + `id` bigint UNSIGNED NOT NULL AUTO_INCREMENT COMMENT '主键', + `name` varchar(50) CHARACTER SET utf16 COLLATE utf16_general_ci NOT NULL COMMENT '名称', + `create_time` bigint NOT NULL COMMENT '创建时间', + `type` varchar(255) CHARACTER SET utf16 COLLATE utf16_general_ci NULL DEFAULT NULL COMMENT '数据源类型', + `driver_class` varchar(255) CHARACTER SET utf16 COLLATE utf16_general_ci NULL DEFAULT NULL COMMENT '驱动类', + `description` varchar(255) CHARACTER SET utf16 COLLATE utf16_general_ci NULL DEFAULT NULL COMMENT '描述', + PRIMARY KEY (`id`) USING BTREE +) ENGINE = InnoDB AUTO_INCREMENT = 1 CHARACTER SET = utf16 COLLATE = utf16_general_ci COMMENT = '驱动' ROW_FORMAT = Dynamic; + +-- ---------------------------- +-- Table structure for core_driver_jar +-- ---------------------------- +DROP TABLE IF EXISTS `core_driver_jar`; +CREATE TABLE `core_driver_jar` ( + `id` bigint UNSIGNED NOT NULL AUTO_INCREMENT COMMENT '主键', + `de_driver_id` varchar(50) CHARACTER SET utf16 COLLATE utf16_general_ci NOT NULL COMMENT '驱动主键', + `file_name` varchar(255) CHARACTER SET utf16 COLLATE utf16_general_ci NULL DEFAULT NULL COMMENT '名称', + `version` varchar(255) CHARACTER SET utf16 COLLATE utf16_general_ci NULL DEFAULT NULL COMMENT '版本', + `driver_class` longtext CHARACTER SET utf16 COLLATE utf16_general_ci NULL COMMENT '驱动类', + `trans_name` varchar(255) CHARACTER SET utf16 COLLATE utf16_general_ci NULL DEFAULT NULL COMMENT '替换后的 jar 包名称', + `is_trans_name` tinyint(1) NULL DEFAULT NULL COMMENT '是否将上传 jar 包替换了名称(1-是,0-否)', + PRIMARY KEY (`id`) USING BTREE +) ENGINE = InnoDB AUTO_INCREMENT = 1 CHARACTER SET = utf16 COLLATE = utf16_general_ci COMMENT = '驱动详情' ROW_FORMAT = Dynamic; + +SET FOREIGN_KEY_CHECKS = 1; diff --git a/backend/pom.xml b/backend/pom.xml index 4303a60..aeb0487 100644 --- a/backend/pom.xml +++ b/backend/pom.xml @@ -5,7 +5,7 @@ org.springframework.boot spring-boot-starter-parent - 3.0.13 + 3.3.0 com.stdproject @@ -14,6 +14,9 @@ stdproject Standard Project Backend + 3.3.0 + 21 + 21 21 3.5.3 0.11.5 @@ -23,6 +26,7 @@ org.springframework.boot spring-boot-starter-web + ${spring-boot.version} org.springframework.boot @@ -45,6 +49,13 @@ cache-api + + io.gisbi + sdk-bundle + 2.0.0 + system + ${project.basedir}/libs/sdk-bundle-2.0.jar + com.baomidou diff --git a/backend/src/main/java/com/stdproject/ProjectApplication.java b/backend/src/main/java/com/stdproject/ProjectApplication.java index 1ecc101..f936d25 100644 --- a/backend/src/main/java/com/stdproject/ProjectApplication.java +++ b/backend/src/main/java/com/stdproject/ProjectApplication.java @@ -13,7 +13,7 @@ import org.springframework.transaction.annotation.EnableTransactionManagement; * @author StdProject * @since 2023-12-07 */ -@SpringBootApplication +@SpringBootApplication(scanBasePackages = {"com.stdproject", "io.gisbi.extensions.datasource.utils", "io.gisbi.i18n"}) @MapperScan("com.stdproject.mapper") @EnableCaching @EnableAsync diff --git a/backend/src/main/java/com/stdproject/config/ThreadPoolConfig.java b/backend/src/main/java/com/stdproject/config/ThreadPoolConfig.java new file mode 100644 index 0000000..9b84caf --- /dev/null +++ b/backend/src/main/java/com/stdproject/config/ThreadPoolConfig.java @@ -0,0 +1,22 @@ +package com.stdproject.config; + +import io.gisbi.utils.CommonThreadPool; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; + +/** + * 线程池配置类 + * + * @author StdProject + */ +@Configuration +public class ThreadPoolConfig { + + /** + * 配置CommonThreadPool Bean + */ + @Bean + public CommonThreadPool commonThreadPool() { + return new CommonThreadPool(); + } +} \ No newline at end of file diff --git a/backend/src/main/java/com/stdproject/controller/DynamicDataController.java b/backend/src/main/java/com/stdproject/controller/DynamicDataController.java new file mode 100644 index 0000000..d21f0d0 --- /dev/null +++ b/backend/src/main/java/com/stdproject/controller/DynamicDataController.java @@ -0,0 +1,86 @@ +package com.stdproject.controller; + +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; +import com.stdproject.common.OperationLog; +import com.stdproject.service.IDynamicDataService; +import io.swagger.v3.oas.annotations.Operation; +import io.swagger.v3.oas.annotations.tags.Tag; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.web.bind.annotation.PostMapping; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.RestController; + +import java.util.Map; + +/** + *

+ * 动态数据管理 前端控制器 + *

+ * + * @author StdProject + * @since 2024-01-01 + */ +@Tag(name = "动态数据管理", description = "提供对数据源表数据的增删改查操作") +@RestController +@RequestMapping("/api/dynamicdata") +public class DynamicDataController { + + @Autowired + private IDynamicDataService dynamicDataService; + + @Operation(summary = "向指定数据源的表中添加数据") + @OperationLog(type = "01", module = "动态数据管理", description = "添加表数据") + @PostMapping("addTableData") + public boolean addTableData(Long datasourceId, @RequestParam("tableData") String tableData) throws Exception { + boolean result = dynamicDataService.addTableData( + datasourceId, + tableData + ); + return result; + } + @Operation(summary = "根据主键查询表数据") + @OperationLog(type = "06", module = "动态数据管理", description = "根据主键查询表数据") + @PostMapping("getTableDataByPk") + public Map getTableDataByPk(Long datasourceId,@RequestParam("whereJson") String whereJson) throws Exception { + Map result=dynamicDataService.getTableDataByPk( + datasourceId, + whereJson + ); + return result; + } + + @Operation(summary = "更新表数据") + @OperationLog(type = "02", module = "动态数据管理", description = "更新表数据") + @PostMapping("updateTableData") + public boolean updateTableData(Long datasourceId, @RequestParam("tableData") String tableData) throws Exception { + boolean result = dynamicDataService.updateTableData( + datasourceId, + tableData + ); + return result; + } + + @Operation(summary = "删除表数据") + @OperationLog(type = "03", module = "动态数据管理", description = "删除表数据") + @PostMapping("deleteTableData") + public boolean deleteTableData(Long datasourceId, @RequestParam("whereJson") String whereJson) throws Exception { + boolean result = dynamicDataService.deleteTableData( + datasourceId, + whereJson + ); + return result; + } + + @Operation(summary = "分页查询表数据") + @OperationLog(type = "06", module = "动态数据管理", description = "分页查询表数据") + @PostMapping("queryTableDataPaged") + public Page> queryTableDataPaged(Long datasourceId, @RequestParam("queryJson") String queryJson) throws Exception { + Page> result = dynamicDataService.queryTableDataPaged( + datasourceId, + queryJson + ); + return result; + } + +} \ No newline at end of file diff --git a/backend/src/main/java/com/stdproject/controller/EngineRequest.java b/backend/src/main/java/com/stdproject/controller/EngineRequest.java new file mode 100644 index 0000000..257a713 --- /dev/null +++ b/backend/src/main/java/com/stdproject/controller/EngineRequest.java @@ -0,0 +1,62 @@ +package com.stdproject.controller; + + +import com.stdproject.entity.CoreDeEngine; +import lombok.Data; +import org.springframework.util.StringUtils; + +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +@Data +public class EngineRequest { + private final String REG_WITH_SQL_FRAGMENT = "((?i)WITH[\\s\\S]+(?i)AS?\\s*\\([\\s\\S]+\\))\\s*(?i)SELECT"; + private Pattern WITH_SQL_FRAGMENT = Pattern.compile("((?i)WITH[\\s\\S]+(?i)AS?\\s*\\([\\s\\S]+\\))\\s*(?i)SELECT"); + protected String query; + protected String table; + protected CoreDeEngine engine; + private Integer pageSize; + private Integer page; + private Integer realSize; + private Integer fetchSize = 10000; + private boolean pageable = false; + private boolean previewData = false; + private boolean totalPageFlag; + + public EngineRequest() { + } + + public String getQuery() { + return this.rebuildSqlWithFragment(this.query); + } + + public void setQuery(String query) { + this.query = query; + } + + private String rebuildSqlWithFragment(String sql) { + if (!sql.toLowerCase().startsWith("with")) { + Matcher matcher = this.WITH_SQL_FRAGMENT.matcher(sql); + if (matcher.find()) { + String withFragment = matcher.group(); + if (!StringUtils.isEmpty(withFragment)) { + if (withFragment.length() > 6) { + int lastSelectIndex = withFragment.length() - 6; + sql = sql.replace(withFragment, withFragment.substring(lastSelectIndex)); + withFragment = withFragment.substring(0, lastSelectIndex); + } + + sql = withFragment + " " + sql; + sql = sql.replaceAll(" {2,}", " "); + } + } + } + + return sql; + } + + public String getREG_WITH_SQL_FRAGMENT() { + this.getClass(); + return "((?i)WITH[\\s\\S]+(?i)AS?\\s*\\([\\s\\S]+\\))\\s*(?i)SELECT"; + } +} diff --git a/backend/src/main/java/com/stdproject/entity/CoreDatasource.java b/backend/src/main/java/com/stdproject/entity/CoreDatasource.java new file mode 100644 index 0000000..6178e4e --- /dev/null +++ b/backend/src/main/java/com/stdproject/entity/CoreDatasource.java @@ -0,0 +1,250 @@ +package com.stdproject.entity; + +import com.baomidou.mybatisplus.annotation.IdType; +import com.baomidou.mybatisplus.annotation.TableId; +import com.baomidou.mybatisplus.annotation.TableName; + +import java.io.Serializable; + +/** + *

+ * 数据源表 + *

+ * + * @Author bi-coder + * @since 2024-07-09 + */ +@TableName("core_datasource") +public class CoreDatasource implements Serializable { + + private static final long serialVersionUID = 1L; + + /** + * 主键 + */ + @TableId(value = "id", type = IdType.AUTO) + private Long id; + /** + * 应用ID + */ + private String appId; + /** + * 名称 + */ + private String name; + + /** + * 描述 + */ + private String description; + + /** + * 类型 + */ + private String type; + + /** + * 父级ID + */ + private Long pid; + + /** + * 更新方式:0:替换;1:追加 + */ + private String editType; + + /** + * 详细信息 + */ + private String configuration; + + /** + * 创建时间 + */ + private Long createTime; + + /** + * 更新时间 + */ + private Long updateTime; + + /** + * 变更人 + */ + private Long updateBy; + + /** + * 创建人ID + */ + private String createBy; + + /** + * 状态 + */ + private String status; + + /** + * 状态 + */ + private String qrtzInstance; + + /** + * 任务状态 + */ + private String taskStatus; + + /** + * 开启数据填报 + */ + private Boolean enableDataFill; + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getDescription() { + return description; + } + + public void setDescription(String description) { + this.description = description; + } + + public String getType() { + return type; + } + + public void setType(String type) { + this.type = type; + } + + public Long getPid() { + return pid; + } + + public void setPid(Long pid) { + this.pid = pid; + } + + public String getEditType() { + return editType; + } + + public void setEditType(String editType) { + this.editType = editType; + } + + public String getConfiguration() { + return configuration; + } + + public void setConfiguration(String configuration) { + this.configuration = configuration; + } + + public Long getCreateTime() { + return createTime; + } + + public void setCreateTime(Long createTime) { + this.createTime = createTime; + } + + public Long getUpdateTime() { + return updateTime; + } + + public void setUpdateTime(Long updateTime) { + this.updateTime = updateTime; + } + + public Long getUpdateBy() { + return updateBy; + } + + public void setUpdateBy(Long updateBy) { + this.updateBy = updateBy; + } + + public String getCreateBy() { + return createBy; + } + + public void setCreateBy(String createBy) { + this.createBy = createBy; + } + + public String getStatus() { + return status; + } + + public void setStatus(String status) { + this.status = status; + } + + public String getQrtzInstance() { + return qrtzInstance; + } + + public void setQrtzInstance(String qrtzInstance) { + this.qrtzInstance = qrtzInstance; + } + + public String getTaskStatus() { + return taskStatus; + } + + public void setTaskStatus(String taskStatus) { + this.taskStatus = taskStatus; + } + + public Boolean getEnableDataFill() { + return enableDataFill; + } + + public void setEnableDataFill(Boolean enableDataFill) { + this.enableDataFill = enableDataFill; + } + + @Override + public String toString() { + return "CoreDatasource{" + + "id = " + id + + ", appId = " + appId + + ", name = " + name + + ", description = " + description + + ", type = " + type + + ", pid = " + pid + + ", editType = " + editType + + ", configuration = " + configuration + + ", createTime = " + createTime + + ", updateTime = " + updateTime + + ", updateBy = " + updateBy + + ", createBy = " + createBy + + ", status = " + status + + ", qrtzInstance = " + qrtzInstance + + ", taskStatus = " + taskStatus + + ", enableDataFill = " + enableDataFill + + "}"; + } + + public String getAppId() { + return appId; + } + + public void setAppId(String appId) { + this.appId = appId; + } +} diff --git a/backend/src/main/java/com/stdproject/entity/CoreDeEngine.java b/backend/src/main/java/com/stdproject/entity/CoreDeEngine.java new file mode 100644 index 0000000..8b66b48 --- /dev/null +++ b/backend/src/main/java/com/stdproject/entity/CoreDeEngine.java @@ -0,0 +1,154 @@ +package com.stdproject.entity; + +import com.baomidou.mybatisplus.annotation.IdType; +import com.baomidou.mybatisplus.annotation.TableId; +import com.baomidou.mybatisplus.annotation.TableName; + +import java.io.Serializable; + +/** + *

+ * + *

+ * + * @Author bi-coder + * @since 2023-04-18 + */ +@TableName("core_de_engine") +public class CoreDeEngine implements Serializable { + + private static final long serialVersionUID = 1L; + + /** + * 主键 + */ + @TableId(value = "id", type = IdType.AUTO) + private Long id; + + /** + * 名称 + */ + private String name; + + /** + * 描述 + */ + private String description; + + /** + * 类型 + */ + private String type; + + /** + * 详细信息 + */ + private String configuration; + + /** + * Create timestamp + */ + private Long createTime; + + /** + * Update timestamp + */ + private Long updateTime; + + /** + * 创建人ID + */ + private String createBy; + + /** + * 状态 + */ + private String status; + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getDescription() { + return description; + } + + public void setDescription(String description) { + this.description = description; + } + + public String getType() { + return type; + } + + public void setType(String type) { + this.type = type; + } + + public String getConfiguration() { + return configuration; + } + + public void setConfiguration(String configuration) { + this.configuration = configuration; + } + + public Long getCreateTime() { + return createTime; + } + + public void setCreateTime(Long createTime) { + this.createTime = createTime; + } + + public Long getUpdateTime() { + return updateTime; + } + + public void setUpdateTime(Long updateTime) { + this.updateTime = updateTime; + } + + public String getCreateBy() { + return createBy; + } + + public void setCreateBy(String createBy) { + this.createBy = createBy; + } + + public String getStatus() { + return status; + } + + public void setStatus(String status) { + this.status = status; + } + + @Override + public String toString() { + return "CoreDeEngine{" + + "id = " + id + + ", name = " + name + + ", description = " + description + + ", type = " + type + + ", configuration = " + configuration + + ", createTime = " + createTime + + ", updateTime = " + updateTime + + ", createBy = " + createBy + + ", status = " + status + + "}"; + } +} diff --git a/backend/src/main/java/com/stdproject/entity/CoreDriver.java b/backend/src/main/java/com/stdproject/entity/CoreDriver.java new file mode 100644 index 0000000..d3706a0 --- /dev/null +++ b/backend/src/main/java/com/stdproject/entity/CoreDriver.java @@ -0,0 +1,112 @@ +package com.stdproject.entity; + +import com.baomidou.mybatisplus.annotation.IdType; +import com.baomidou.mybatisplus.annotation.TableId; +import com.baomidou.mybatisplus.annotation.TableName; + +import java.io.Serializable; + +/** + *

+ * 驱动 + *

+ * + * @Author bi-coder + * @since 2023-04-18 + */ +@TableName("core_driver") +public class CoreDriver implements Serializable { + + private static final long serialVersionUID = 1L; + + /** + * 主键 + */ + @TableId(value = "id", type = IdType.AUTO) + private Long id; + + /** + * 名称 + */ + private String name; + + /** + * 创健时间 + */ + private Long createTime; + + /** + * 数据源类型 + */ + private String type; + + /** + * 驱动类 + */ + private String driverClass; + + /** + * 描述 + */ + private String description; + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public Long getCreateTime() { + return createTime; + } + + public void setCreateTime(Long createTime) { + this.createTime = createTime; + } + + public String getType() { + return type; + } + + public void setType(String type) { + this.type = type; + } + + public String getDriverClass() { + return driverClass; + } + + public void setDriverClass(String driverClass) { + this.driverClass = driverClass; + } + + public String getDescription() { + return description; + } + + public void setDescription(String description) { + this.description = description; + } + + @Override + public String toString() { + return "CoreDriver{" + + "id = " + id + + ", name = " + name + + ", createTime = " + createTime + + ", type = " + type + + ", driverClass = " + driverClass + + ", description = " + description + + "}"; + } +} diff --git a/backend/src/main/java/com/stdproject/entity/CoreDriverJar.java b/backend/src/main/java/com/stdproject/entity/CoreDriverJar.java new file mode 100644 index 0000000..86fa1c1 --- /dev/null +++ b/backend/src/main/java/com/stdproject/entity/CoreDriverJar.java @@ -0,0 +1,120 @@ +package com.stdproject.entity; + +import com.baomidou.mybatisplus.annotation.IdType; +import com.baomidou.mybatisplus.annotation.TableId; +import com.baomidou.mybatisplus.annotation.TableName; + +import java.io.Serializable; + +/** + *

+ * 驱动详情 + *

+ * + * @Author bi-coder + * @since 2023-04-17 + */ +@TableName("core_driver_jar") +public class CoreDriverJar implements Serializable { + + private static final long serialVersionUID = 1L; + + /** + * 主键 + */ + @TableId(value = "id", type = IdType.AUTO) + private Long id; + + /** + * 驱动主键 + */ + private String deDriverId; + + /** + * 名称 + */ + private String fileName; + + /** + * 版本 + */ + private String version; + + /** + * 驱动类 + */ + private String driverClass; + + private String transName; + + private Boolean isTransName; + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public String getDeDriverId() { + return deDriverId; + } + + public void setDeDriverId(String deDriverId) { + this.deDriverId = deDriverId; + } + + public String getFileName() { + return fileName; + } + + public void setFileName(String fileName) { + this.fileName = fileName; + } + + public String getVersion() { + return version; + } + + public void setVersion(String version) { + this.version = version; + } + + public String getDriverClass() { + return driverClass; + } + + public void setDriverClass(String driverClass) { + this.driverClass = driverClass; + } + + public String getTransName() { + return transName; + } + + public void setTransName(String transName) { + this.transName = transName; + } + + public Boolean getIsTransName() { + return isTransName; + } + + public void setIsTransName(Boolean isTransName) { + this.isTransName = isTransName; + } + + @Override + public String toString() { + return "CoreDriverJar{" + + "id = " + id + + ", deDriverId = " + deDriverId + + ", fileName = " + fileName + + ", version = " + version + + ", driverClass = " + driverClass + + ", transName = " + transName + + ", isTransName = " + isTransName + + "}"; + } +} diff --git a/backend/src/main/java/com/stdproject/mapper/CoreDatasourceMapper.java b/backend/src/main/java/com/stdproject/mapper/CoreDatasourceMapper.java new file mode 100644 index 0000000..2e72b2e --- /dev/null +++ b/backend/src/main/java/com/stdproject/mapper/CoreDatasourceMapper.java @@ -0,0 +1,18 @@ +package com.stdproject.mapper; + +import com.baomidou.mybatisplus.core.mapper.BaseMapper; +import com.stdproject.entity.CoreDatasource; +import org.apache.ibatis.annotations.Mapper; + +/** + *

+ * 数据源表 Mapper 接口 + *

+ * + * @Author bi-coder + * @since 2024-07-09 + */ +@Mapper +public interface CoreDatasourceMapper extends BaseMapper { + +} diff --git a/backend/src/main/java/com/stdproject/mapper/CoreDeEngineMapper.java b/backend/src/main/java/com/stdproject/mapper/CoreDeEngineMapper.java new file mode 100644 index 0000000..bd5fe0b --- /dev/null +++ b/backend/src/main/java/com/stdproject/mapper/CoreDeEngineMapper.java @@ -0,0 +1,19 @@ +package com.stdproject.mapper; + +import com.baomidou.mybatisplus.core.mapper.BaseMapper; + +import com.stdproject.entity.CoreDeEngine; +import org.apache.ibatis.annotations.Mapper; + +/** + *

+ * Mapper 接口 + *

+ * + * @Author bi-coder + * @since 2023-04-18 + */ +@Mapper +public interface CoreDeEngineMapper extends BaseMapper { + +} diff --git a/backend/src/main/java/com/stdproject/service/IDynamicDataService.java b/backend/src/main/java/com/stdproject/service/IDynamicDataService.java new file mode 100644 index 0000000..4e47e27 --- /dev/null +++ b/backend/src/main/java/com/stdproject/service/IDynamicDataService.java @@ -0,0 +1,129 @@ +package com.stdproject.service; + +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; + +import java.util.Map; + +/** + * 动态数据服务接口 + * 提供对数据源表数据的增删改查操作 + */ +public interface IDynamicDataService { + + /** + * 向指定数据源的表中添加数据 + * + * @param datasourceId 数据源ID + * @param tableData 表数据JSON字符串,格式: + * { + * "tableName": "user", + * "data": [ + * { + * "fieldName": "id", + * "fieldType": "varchar", + * "IsPrimaryKey": true, + * "fieldValue": "0001" + * }, + * { + * "fieldName": "name", + * "fieldType": "varchar", + * "fieldValue": "张三" + * } + * ] + * } + * @return 是否添加成功 + * @throws Exception 操作异常 + */ + boolean addTableData(Long datasourceId, String tableData) throws Exception; + + /** + * 根据主键查询表数据 + * + * @param datasourceId 数据源ID + * @param condition 查询条件JSON字符串,格式: + * { + * "tableName": "user", + * "key": [ + * { + * "fieldName": "id", + * "fieldValue": "0001" + * } + * ] + * } + * @return 查询到的数据Map,如果没有找到则返回null + * @throws Exception 操作异常 + */ + Map getTableDataByPk(Long datasourceId, String condition) throws Exception; + + /** + * 更新表数据 + * + * @param datasourceId 数据源ID + * @param tableData 更新数据JSON字符串,格式: + * { + * "tableName": "user", + * "key": [ + * { + * "fieldName": "id", + * "fieldValue": "0001" + * } + * ], + * "data": [ + * { + * "fieldName": "name", + * "fieldType": "varchar", + * "fieldValue": "李四" + * } + * ] + * } + * @return 是否更新成功 + * @throws Exception 操作异常 + */ + boolean updateTableData(Long datasourceId, String tableData) throws Exception; + + /** + * 删除表数据 + * + * @param datasourceId 数据源ID + * @param condition 删除条件JSON字符串,格式: + * { + * "tableName": "user", + * "key": [ + * { + * "fieldName": "id", + * "fieldValue": "0001" + * } + * ] + * } + * @return 是否删除成功 + * @throws Exception 操作异常 + */ + boolean deleteTableData(Long datasourceId, String condition) throws Exception; + + /** + * 分页查询表数据 + * + * @param datasourceId 数据源ID + * @param condition 查询条件JSON字符串,格式: + * { + * "tableName": "user", + * "pageNum": 1, + * "pageSize": 10, + * "conditions": [ + * { + * "field": "name", + * "operator": "like", + * "value": "张" + * }, + * { + * "field": "age", + * "operator": ">", + * "value": 18 + * } + * ] + * } + * @return 分页查询结果 + * @throws Exception 操作异常 + */ + Page> queryTableDataPaged(Long datasourceId, String condition) throws Exception; +} \ No newline at end of file diff --git a/backend/src/main/java/com/stdproject/service/impl/AppUserServiceImpl.java b/backend/src/main/java/com/stdproject/service/impl/AppUserServiceImpl.java index 6860a8b..800536f 100644 --- a/backend/src/main/java/com/stdproject/service/impl/AppUserServiceImpl.java +++ b/backend/src/main/java/com/stdproject/service/impl/AppUserServiceImpl.java @@ -22,6 +22,7 @@ import java.time.LocalDateTime; * @author StdProject * @since 2023-12-07 */ + @Service public class AppUserServiceImpl extends ServiceImpl implements IAppUserService { diff --git a/backend/src/main/java/com/stdproject/service/impl/DynamicDataServiceImpl.java b/backend/src/main/java/com/stdproject/service/impl/DynamicDataServiceImpl.java new file mode 100644 index 0000000..3374450 --- /dev/null +++ b/backend/src/main/java/com/stdproject/service/impl/DynamicDataServiceImpl.java @@ -0,0 +1,427 @@ +package com.stdproject.service.impl; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; +import com.stdproject.common.BusinessException; +import com.stdproject.entity.CoreDatasource; +import com.stdproject.mapper.CoreDatasourceMapper; +import com.stdproject.service.IDynamicDataService; +import io.gisbi.extensions.datasource.dto.DatasourceRequest; +import io.gisbi.extensions.datasource.dto.DatasourceSchemaDTO; +import io.gisbi.extensions.datasource.dto.TableField; +import io.gisbi.extensions.datasource.factory.ProviderFactory; +import io.gisbi.extensions.datasource.provider.Provider; +import io.gisbi.utils.BeanUtils; +import io.gisbi.utils.IDUtils; +import io.gisbi.utils.JsonUtil; +import jakarta.annotation.Resource; +import lombok.extern.slf4j.Slf4j; +import org.apache.commons.lang3.StringUtils; +import org.springframework.stereotype.Service; +import org.springframework.util.CollectionUtils; + +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +@Slf4j +@Service +public class DynamicDataServiceImpl implements IDynamicDataService { + @Resource + private CoreDatasourceMapper coreDatasourceMapper; + @Override + public boolean addTableData(Long datasourceId, String tableData) throws Exception { + // 根据数据源 id 查询数据源信息,调用通用数据源执行器 + CoreDatasource coreDatasource = coreDatasourceMapper.selectById(datasourceId); + if (coreDatasource == null) { + BusinessException.throwException("数据源不存在"); + } + // 解析 tableData JSON 字符串 "{ \"tableName\": \"user\", \"data\": [ { \"fieldName\": \"id\", \"fieldType\": \"varchar\", \"IsPrimaryKey\": true, \"fieldValue\": \"0001\" }, { \"fieldName\": \"name\", \"fieldType\": \"varchar\", \"fieldValue\": \"张三\" } ] }"; + Map dataMap = JsonUtil.parseObject(tableData, Map.class); + String tableName = (String) dataMap.get("tableName"); + List> fieldList = (List>) dataMap.get("data"); + + if (fieldList == null || fieldList.isEmpty()) { + BusinessException.throwException("没有可插入的数据字段"); + } + + // 构建插入语句 + StringBuilder columns = new StringBuilder(); + StringBuilder values = new StringBuilder(); + + for (int i = 0; i < fieldList.size(); i++) { + Map field = fieldList.get(i); + String fieldName = (String) field.get("fieldName"); + Object fieldValue = field.get("fieldValue"); + boolean isPrimaryKey = field.get("IsPrimaryKey") != null && (boolean) field.get("IsPrimaryKey"); + if (isPrimaryKey) { + if (fieldValue == null) {fieldValue= IDUtils.snowID();} + } + if (i > 0) { + columns.append(", "); + values.append(", "); + } + columns.append(fieldName); + if (fieldValue instanceof String) { + values.append("'").append(fieldValue).append("'"); + } else { + values.append(fieldValue); + } + } + + String sql = String.format("INSERT INTO %s (%s) VALUES (%s)", tableName, columns.toString(), values.toString()); + + // 调用执行器,向数据表中插入 tableData 数据 + DatasourceSchemaDTO datasourceSchemaDTO = new DatasourceSchemaDTO(); + BeanUtils.copyBean(datasourceSchemaDTO, coreDatasource); + + Provider provider = ProviderFactory.getProvider(coreDatasource.getType()); + DatasourceRequest datasourceRequest = new DatasourceRequest(); + datasourceRequest.setQuery(sql); + datasourceRequest.setDsList(Map.of(datasourceSchemaDTO.getId(), datasourceSchemaDTO)); + + log.debug("执行插入数据的SQL: {}", sql); + + // 执行插入操作 + int result= provider.executeUpdate(datasourceRequest); + if (result==1) { + return true; + // process result set + } else { + return false; + } + } + @Override + public Map getTableDataByPk(Long datasourceId, String condtion) throws Exception { + // 获取数据源信息 + CoreDatasource coreDatasource = coreDatasourceMapper.selectById(datasourceId); + if (coreDatasource == null) { + BusinessException.throwException("数据源不存在"); + } + // 解析 JSON 数据 + //condtion={ \"tableName\": \"user\", key:[{ \"fieldName\": \"id\",\"fieldValue\": \"0001\"] + Map dataMap = JsonUtil.parseObject(condtion, Map.class); + String tableName = (String) dataMap.get("tableName"); + // 参数校验 + if (StringUtils.isBlank(tableName)) { + BusinessException.throwException("表名不能为空"); + } + List> keyFields = (List>) dataMap.get("key"); + if (CollectionUtils.isEmpty(keyFields)) { + BusinessException.throwException("主键字段或值不能为空"); + } + + String whereClause = buildWhereCondition(keyFields); + String sql = String.format("SELECT * FROM %s WHERE %s", tableName, whereClause); + // 执行查询操作 + DatasourceSchemaDTO datasourceSchemaDTO = new DatasourceSchemaDTO(); + BeanUtils.copyBean(datasourceSchemaDTO, coreDatasource); + + Provider provider = ProviderFactory.getProvider(coreDatasource.getType()); + DatasourceRequest datasourceRequest = new DatasourceRequest(); + datasourceRequest.setQuery(sql); + datasourceRequest.setDsList(Map.of(datasourceSchemaDTO.getId(), datasourceSchemaDTO)); + + log.debug("执行查询数据的SQL: {}", sql); + + // 获取查询结果 + Map data = provider.fetchResultField(datasourceRequest); + + // 处理查询结果 + List dataList = (List) data.get("data"); + List fields = (List) data.get("fields"); + + if (CollectionUtils.isEmpty(dataList) || dataList.size() == 0) { + return null; + } + + // 将结果转换为 Map 格式 + String[] row = dataList.get(0); + Map resultMap = new LinkedHashMap<>(); + for (int i = 0; i < fields.size(); i++) { + TableField field = fields.get(i); + String fieldName = field.getOriginName(); + resultMap.put(fieldName, row[i]); + } + return resultMap; + } + /** + * 构建 WHERE 条件字符串,用于多字段主键查询/更新/删除操作 + * + * @param keyFields 主键字段列表,格式如:{ "fieldName": "id", "fieldValue": "0001" } + * @return SQL WHERE 子句字符串 + */ + private String buildWhereCondition(List> keyFields) { + StringBuilder whereClause = new StringBuilder(); + + for (int i = 0; i < keyFields.size(); i++) { + Map keyField = keyFields.get(i); + String fieldName = (String) keyField.get("fieldName"); + Object fieldValue = keyField.get("fieldValue"); + + if (i > 0) { + whereClause.append(" AND "); + } + + if (fieldValue == null) { + whereClause.append(fieldName).append(" IS NULL"); + } else if (fieldValue instanceof String) { + whereClause.append(String.format("%s = '%s'", fieldName, fieldValue)); + } else { + whereClause.append(String.format("%s = %s", fieldName, fieldValue)); + } + } + + return whereClause.toString(); + } + @Override + public boolean updateTableData(Long datasourceId, String tableData) throws Exception { + // 获取数据源信息 + CoreDatasource coreDatasource = coreDatasourceMapper.selectById(datasourceId); + if (coreDatasource == null) { + BusinessException.throwException("数据源不存在"); + } + //String tableDataJson = "{ \"tableName\": \"user\", key:[{ \"fieldName\": \"id\",\"fieldValue\": \"0001\"], \"data\": [ { \"fieldName\": \"name\", \"fieldType\": \"varchar\", \"fieldValue\": \"李四\" } ] }"; + // 解析 JSON 数据 + Map dataMap = JsonUtil.parseObject(tableData, Map.class); + String tableName = (String) dataMap.get("tableName"); + if (StringUtils.isBlank(tableName)) { + BusinessException.throwException("表名不能为空"); + } + List> keyFields = (List>) dataMap.get("key"); + if (CollectionUtils.isEmpty(keyFields)) { + BusinessException.throwException("主键字段或值不能为空"); + } + List> fieldList = (List>) dataMap.get("data"); + if (fieldList == null || fieldList.isEmpty()) { + BusinessException.throwException("没有可更新的数据字段"); + } + // 构建 UPDATE 语句 + StringBuilder setClause = new StringBuilder(); + for (int i = 0; i < fieldList.size(); i++) { + Map field = fieldList.get(i); + String fieldName = (String) field.get("fieldName"); + Object fieldValue = field.get("fieldValue"); + + if (i > 0) { + setClause.append(", "); + } + if (fieldValue instanceof String) { + setClause.append(String.format("%s = '%s'", fieldName, fieldValue)); + } else { + setClause.append(String.format("%s = %s", fieldName, fieldValue)); + } + } + + String whereClause = buildWhereCondition(keyFields); + String sql = String.format("UPDATE %s SET %s WHERE %s", tableName, setClause, whereClause); + // 调用执行器执行 SQL + DatasourceSchemaDTO datasourceSchemaDTO = new DatasourceSchemaDTO(); + BeanUtils.copyBean(datasourceSchemaDTO, coreDatasource); + + Provider provider = ProviderFactory.getProvider(coreDatasource.getType()); + DatasourceRequest datasourceRequest = new DatasourceRequest(); + datasourceRequest.setQuery(sql); + datasourceRequest.setDsList(Map.of(datasourceSchemaDTO.getId(), datasourceSchemaDTO)); + + log.debug("执行更新数据的SQL: {}", sql); + + // 执行更新操作 + int result= provider.executeUpdate(datasourceRequest); + if (result==1) { + return true; + // process result set + } else { + return false; + } + } + @Override + public boolean deleteTableData(Long datasourceId, String condtion) throws Exception { + // 获取数据源信息 + CoreDatasource coreDatasource = coreDatasourceMapper.selectById(datasourceId); + if (coreDatasource == null) { + BusinessException.throwException("数据源不存在"); } + + // 解析 JSON 数据 + //String tableDataJson = "{ \"tableName\": \"user\", key:[{ \"fieldName\": \"id\",\"fieldValue\": \"0001\"] }"; + Map dataMap = JsonUtil.parseObject(condtion, Map.class); + String tableName = (String) dataMap.get("tableName"); + if (StringUtils.isBlank(tableName)) { + BusinessException.throwException("表名不能为空"); + } + List> keyFields = (List>) dataMap.get("key"); + if (CollectionUtils.isEmpty(keyFields)) { + BusinessException.throwException("主键字段或值不能为空"); + } + String whereClause = buildWhereCondition(keyFields); + String sql = String.format("DELETE FROM %s WHERE %s", tableName, whereClause); + // 调用执行器执行 SQL + DatasourceSchemaDTO datasourceSchemaDTO = new DatasourceSchemaDTO(); + BeanUtils.copyBean(datasourceSchemaDTO, coreDatasource); + + Provider provider = ProviderFactory.getProvider(coreDatasource.getType()); + DatasourceRequest datasourceRequest = new DatasourceRequest(); + datasourceRequest.setQuery(sql); + datasourceRequest.setDsList(Map.of(datasourceSchemaDTO.getId(), datasourceSchemaDTO)); + + log.debug("执行删除数据的SQL: {}", sql); + + // 执行删除操作 + int result= provider.executeUpdate(datasourceRequest); + if (result==1) { + return true; + // process result set + } else { + return false; + } + } + @Override + public Page> queryTableDataPaged(Long datasourceId, String condition) throws Exception { + // 获取数据源信息 + CoreDatasource coreDatasource = coreDatasourceMapper.selectById(datasourceId); + if (coreDatasource == null) { + BusinessException.throwException("数据源不存在"); + } + + // 解析 condition JSON 数据 + Map dataMap = JsonUtil.parseObject(condition, Map.class); + String tableName = (String) dataMap.get("tableName"); + List> conditionList = (List>) dataMap.get("conditions"); + + Integer pageNum = (Integer) dataMap.getOrDefault("pageNum", 1); + Integer pageSize = (Integer) dataMap.getOrDefault("pageSize", 10); + + if (StringUtils.isBlank(tableName)) { + BusinessException.throwException("表名不能为空"); + } + + // 构建 WHERE 条件子句 + StringBuilder whereClause = new StringBuilder(); + if (conditionList != null && !conditionList.isEmpty()) { + whereClause.append(" WHERE "); + for (int i = 0; i < conditionList.size(); i++) { + Map cond = conditionList.get(i); + String field = (String) cond.get("field"); + String operator = ((String) cond.get("operator")).toLowerCase(); + Object value = cond.get("value"); + + if (i > 0) { + whereClause.append(" AND "); + } + + switch (operator) { + case "like": + whereClause.append(String.format("%s LIKE '%%%s%%'", field, value)); + break; + case "=": + appendValue(whereClause, field, value, "="); + break; + case "<": + appendValue(whereClause, field, value, "<"); + break; + case ">": + appendValue(whereClause, field, value, ">"); + break; + case "<=": + appendValue(whereClause, field, value, "<="); + break; + case ">=": + appendValue(whereClause, field, value, ">="); + break; + case "!=": + case "<>": + appendValue(whereClause, field, value, "<>"); + break; + case "in": + if (!(value instanceof List)) { + BusinessException.throwException("IN 操作符要求值为列表类型"); + } + List values = (List) value; + String inValues = values.stream() + .map(v -> v instanceof String ? "'" + v + "'" : v.toString()) + .collect(Collectors.joining(", ")); + whereClause.append(String.format("%s IN (%s)", field, inValues)); + break; + default: + BusinessException.throwException("不支持的操作符: " + operator); + } + } + } + + // 构建基础 SQL + String baseSql = String.format("SELECT * FROM %s%s", tableName, whereClause); + String dbType = coreDatasource.getType().toLowerCase(); + String pagedSql = buildPagedSQL(baseSql, dbType, pageNum, pageSize); + String countSql = String.format("SELECT COUNT(*) FROM %s%s", tableName, whereClause); + + DatasourceSchemaDTO schemaDTO = new DatasourceSchemaDTO(); + BeanUtils.copyBean(schemaDTO, coreDatasource); + Provider provider = ProviderFactory.getProvider(coreDatasource.getType()); + DatasourceRequest request = new DatasourceRequest(); + request.setDsList(Map.of(schemaDTO.getId(), schemaDTO)); + + // 查询分页数据 + request.setQuery(pagedSql); + Map result = provider.fetchResultField(request); + List dataList = (List) result.get("data"); + List fields = (List) result.get("fields"); + + // 查询总记录数 + request.setQuery(countSql); + Map countResult = provider.fetchResultField(request); + long total = Long.parseLong(((List) countResult.get("data")).get(0)[0]); + + // 将数据封装为 Map 形式 + List> records = new ArrayList<>(); + if (!CollectionUtils.isEmpty(dataList)) { + for (String[] row : dataList) { + Map rowMap = new LinkedHashMap<>(); + for (int i = 0; i < fields.size() && i < row.length; i++) { + String fieldName = fields.get(i).getOriginName(); + String fieldValue = row[i]; + rowMap.put(fieldName, fieldValue); + } + records.add(rowMap); + } + } + + // 返回分页结果 + Page> page = new Page<>(); + page.setCurrent(pageNum); + page.setSize(pageSize); + page.setTotal(total); + page.setRecords(records); + + return page; + } + + + private void appendValue(StringBuilder sb, String field, Object value, String op) { + if (value instanceof String) { + sb.append(String.format("%s %s '%s'", field, op, value)); + } else { + sb.append(String.format("%s %s %s", field, op, value)); + } + } + + private String buildPagedSQL(String baseSql, String dbType, int pageNum, int pageSize) { + int offset = (pageNum - 1) * pageSize; + + switch (dbType) { + case "mysql": + case "mariadb": + return String.format("%s LIMIT %d OFFSET %d", baseSql, pageSize, offset); + case "postgresql": + return String.format("%s LIMIT %d OFFSET %d", baseSql, pageSize, offset); + case "oracle": + int start = offset + 1; + int end = offset + pageSize; + return String.format("SELECT * FROM (SELECT ROWNUM rn, t.* FROM (%s) t WHERE ROWNUM <= %d) WHERE rn >= %d", baseSql, end, start); + case "sqlserver": + return String.format("%s OFFSET %d ROWS FETCH NEXT %d ROWS ONLY", baseSql, offset, pageSize); + default: + // 默认使用 MySQL 方式 + return String.format("%s LIMIT %d OFFSET %d", baseSql, pageSize, offset); + } + } +} diff --git a/backend/src/main/java/com/stdproject/service/manage/EngineManage.java b/backend/src/main/java/com/stdproject/service/manage/EngineManage.java new file mode 100644 index 0000000..f05885b --- /dev/null +++ b/backend/src/main/java/com/stdproject/service/manage/EngineManage.java @@ -0,0 +1,197 @@ +package com.stdproject.service.manage; + +import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper; +import com.stdproject.entity.CoreDatasource; +import com.stdproject.entity.CoreDeEngine; +import com.stdproject.mapper.CoreDatasourceMapper; +import com.stdproject.mapper.CoreDeEngineMapper; +import com.stdproject.service.type.H2; +import com.stdproject.service.type.Mysql; +import io.gisbi.exception.DEException; +import io.gisbi.extensions.datasource.dto.DatasourceDTO; +import io.gisbi.extensions.datasource.dto.DatasourceRequest; +import io.gisbi.extensions.datasource.factory.ProviderFactory; +import io.gisbi.result.ResultMessage; +import io.gisbi.utils.BeanUtils; +import io.gisbi.utils.JsonUtil; +import io.gisbi.utils.ModelUtils; +import jakarta.annotation.Resource; +import org.apache.commons.lang3.StringUtils; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.core.env.Environment; +import org.springframework.stereotype.Component; +import org.springframework.transaction.annotation.Transactional; +import org.springframework.util.CollectionUtils; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +@Component +@Transactional(rollbackFor = Exception.class) +public class EngineManage { + @Resource + private Environment env; + @Resource + private CoreDeEngineMapper deEngineMapper; + + @Resource + private CoreDatasourceMapper datasourceMapper; + + @Value("${gisbi.path.engine:jdbc:h2:/opt/gisbi/desktop_data;AUTO_SERVER=TRUE;AUTO_RECONNECT=TRUE;MODE=MySQL;CASE_INSENSITIVE_IDENTIFIERS=TRUE;DATABASE_TO_UPPER=FALSE}") + private String engineUrl; + + + public CoreDeEngine info() throws DEException { + List deEngines = deEngineMapper.selectList(null); + if (CollectionUtils.isEmpty(deEngines)) { + DEException.throwException("未完整设置数据引擎"); + } + return deEngines.get(0); + } + + public CoreDatasource getDeEngine() { + List deEngines = deEngineMapper.selectList(null); + if (CollectionUtils.isEmpty(deEngines)) { + DEException.throwException("未完整设置数据引擎"); + } + CoreDatasource coreDatasource = new CoreDatasource(); + BeanUtils.copyBean(coreDatasource, deEngines.get(0)); + return coreDatasource; + } + + + public CoreDatasource deEngine() { + List deEngines = deEngineMapper.selectList(null); + CoreDatasource coreDatasource = new CoreDatasource(); + if (CollectionUtils.isEmpty(deEngines)) { + return null; + } + BeanUtils.copyBean(coreDatasource, deEngines.get(0)); + return coreDatasource; + } + + public void validate(CoreDeEngine engine) throws Exception { + if (StringUtils.isEmpty(engine.getType()) || StringUtils.isEmpty(engine.getConfiguration())) { + throw new Exception("未完整设置数据引擎"); + } + try { + + DatasourceRequest datasourceRequest = new DatasourceRequest(); + DatasourceDTO datasource = new DatasourceDTO(); + BeanUtils.copyBean(datasource, engine); + datasourceRequest.setDatasource(datasource); + ProviderFactory.getProvider(engine.getType()).checkStatus(datasourceRequest); + } catch (Exception e) { + DEException.throwException("校验失败:" + e.getMessage()); + } + } + + public ResultMessage save(CoreDeEngine engine) throws Exception { + if (engine.getId() == null) { + deEngineMapper.insert(engine); + } else { + deEngineMapper.updateById(engine); + } + return ResultMessage.success(engine); + } + + public void initSimpleEngine() throws Exception { + initLocalDataSource(); + QueryWrapper queryWrapper = new QueryWrapper<>(); + if (ModelUtils.isDesktop()) { + queryWrapper.eq("type", engineType.h2.name()); + } else { + queryWrapper.eq("type", engineType.mysql.name()); + } + List deEngines = deEngineMapper.selectList(queryWrapper); + if (!CollectionUtils.isEmpty(deEngines)) { + return; + } + + CoreDeEngine engine = new CoreDeEngine(); + if (ModelUtils.isDesktop()) { + engine.setType(engineType.h2.name()); + H2 h2 = new H2(); + h2.setJdbc(engineUrl); + h2.setDataBase("PUBLIC"); + h2.setUsername(env.getProperty("spring.datasource.username")); + h2.setPassword(env.getProperty("spring.datasource.password")); + engine.setConfiguration(JsonUtil.toJSONString(h2).toString()); + } else { + engine.setType(engineType.mysql.name()); + Mysql mysqlConfiguration = new Mysql(); + Pattern WITH_SQL_FRAGMENT = Pattern.compile("jdbc:mysql://(.*):(\\d+)/(.*)"); + Matcher matcher = WITH_SQL_FRAGMENT.matcher(env.getProperty("spring.datasource.url")); + if (!matcher.find()) { + return; + } + mysqlConfiguration.setHost(matcher.group(1)); + mysqlConfiguration.setPort(Integer.valueOf(matcher.group(2))); + String[] databasePrams = matcher.group(3).split("\\?"); + mysqlConfiguration.setDataBase(databasePrams[0]); + if (databasePrams.length == 2) { + mysqlConfiguration.setExtraParams(databasePrams[1]); + } + mysqlConfiguration.setUsername(env.getProperty("spring.datasource.username")); + mysqlConfiguration.setPassword(env.getProperty("spring.datasource.password")); + engine.setConfiguration(JsonUtil.toJSONString(mysqlConfiguration).toString()); + } + engine.setName("默认引擎"); + engine.setDescription("默认引擎"); + deEngineMapper.insert(engine); + } + + + public enum engineType { + mysql("Mysql"), + h2("h2"); + private String alias; + + private engineType(String alias) { + this.alias = alias; + } + + public String getAlias() { + return alias; + } + } + + public void initLocalDataSource() { + QueryWrapper queryWrapper = new QueryWrapper<>(); + queryWrapper.eq("id", 985188400292302848L); + queryWrapper.ne("create_time", 1715053684176L); + if (!datasourceMapper.exists(queryWrapper) && !ModelUtils.isDesktop()) { + Pattern WITH_SQL_FRAGMENT = Pattern.compile("jdbc:mysql://(.*):(\\d+)/(.*)\\?(.*)"); + Matcher matcher = WITH_SQL_FRAGMENT.matcher(env.getProperty("spring.datasource.url")); + if (!matcher.find()) { + return; + } + Map configuration = new HashMap<>(); + configuration.put("dataBase", matcher.group(3)); + configuration.put("username", env.getProperty("spring.datasource.username")); + configuration.put("password", env.getProperty("spring.datasource.password")); + configuration.put("host", matcher.group(1)); + configuration.put("port", Integer.valueOf(matcher.group(2))); + configuration.put("extraParams", ""); + + CoreDatasource initDatasource = new CoreDatasource(); + initDatasource.setId(985188400292302848L); + initDatasource.setName("Demo"); + initDatasource.setType("mysql"); + initDatasource.setPid(0L); + initDatasource.setConfiguration(JsonUtil.toJSONString(configuration).toString()); + initDatasource.setCreateTime(System.currentTimeMillis()); + initDatasource.setUpdateTime(System.currentTimeMillis()); + initDatasource.setCreateBy("1"); + initDatasource.setUpdateBy(1L); + initDatasource.setStatus("success"); + initDatasource.setTaskStatus("WaitingForExecution"); + datasourceMapper.deleteById(985188400292302848L); + datasourceMapper.insert(initDatasource); + } + + } +} diff --git a/backend/src/main/java/com/stdproject/service/provider/ApiUtils.java b/backend/src/main/java/com/stdproject/service/provider/ApiUtils.java new file mode 100644 index 0000000..1edd65a --- /dev/null +++ b/backend/src/main/java/com/stdproject/service/provider/ApiUtils.java @@ -0,0 +1,877 @@ +package com.stdproject.service.provider; + + +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ObjectNode; +import com.jayway.jsonpath.JsonPath; +import io.gisbi.extensions.datasource.dto.ApiDefinition; +import io.gisbi.extensions.datasource.dto.ApiDefinitionRequest; +import io.gisbi.exception.DEException; +import io.gisbi.extensions.datasource.dto.DatasetTableDTO; +import io.gisbi.extensions.datasource.dto.DatasourceRequest; +import io.gisbi.extensions.datasource.dto.TableField; +import io.gisbi.utils.*; +import org.apache.commons.lang3.StringUtils; +import org.json.simple.JSONArray; +import org.springframework.util.CollectionUtils; +import org.springframework.util.ObjectUtils; + +import java.net.URLEncoder; +import java.text.SimpleDateFormat; +import java.util.*; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import java.util.stream.Collectors; + +public class ApiUtils { + + private static String path = "['%s']"; + public static ObjectMapper objectMapper = CommonBeanFactory.getBean(ObjectMapper.class); + + private static TypeReference> listTypeReference = new TypeReference>() { + }; + private static TypeReference>> listForMapTypeReference = new TypeReference>>() { + }; + + /** + * 从数据源配置中解析并获取API数据表信息列表 + * + * @param datasourceRequest 数据源请求对象,包含数据源配置信息 + * @return 数据集表DTO列表,包含表名称、显示名称及所属数据源ID + * @throws DEException 当数据处理过程中出现异常时抛出 + */ + public static List getApiTables(DatasourceRequest datasourceRequest) throws DEException { + List tableDescs = new ArrayList<>(); + // 定义API配置的反序列化类型引用 + TypeReference> listTypeReference = new TypeReference>() { + }; + + // 从数据源配置中解析API定义列表 + List apiDefinitionList = JsonUtil.parseList(datasourceRequest.getDatasource().getConfiguration(), listTypeReference); + + for (ApiDefinition apiDefinition : apiDefinitionList) { + // 过滤空对象和参数类型配置 + if (apiDefinition == null) { + continue; + } + if (StringUtils.isNotEmpty(apiDefinition.getType()) && apiDefinition.getType().equalsIgnoreCase("params")) { + continue; + } + + // 构建数据集表信息对象 + DatasetTableDTO datasetTableDTO = new DatasetTableDTO(); + datasetTableDTO.setTableName(apiDefinition.getDeTableName()); + datasetTableDTO.setName(apiDefinition.getName()); + datasetTableDTO.setDatasourceId(datasourceRequest.getDatasource().getId()); + tableDescs.add(datasetTableDTO); + } + return tableDescs; + } + + + /** + * 将JSON配置字符串解析为表名映射关系 + * + * @param configration 输入的JSON配置字符串,应包含包含"name"和"deTableName"字段的数组对象 + * @return 返回键值对映射,key为配置中的name字段值,value为对应的deTableName字段值 + * @throws DEException 当JSON解析失败时抛出异常 + */ + public static Map getTableNamesMap(String configration) throws DEException { + Map result = new HashMap<>(); + try { + // 解析JSON根节点并遍历数组元素 + JsonNode rootNode = objectMapper.readTree(configration); + for (int i = 0; i < rootNode.size(); i++) { + // 提取每个元素的name和deTableName字段构建映射关系 + result.put(rootNode.get(i).get("name").asText(), rootNode.get(i).get("deTableName").asText()); + } + } catch (Exception e) { + // 统一将底层异常转换为领域异常抛出 + DEException.throwException(e); + } + + return result; + } + + + + public static Map fetchApiResultField(DatasourceRequest datasourceRequest) throws DEException { + Map result = new HashMap<>(); + List dataList = new ArrayList<>(); + List fieldList = new ArrayList<>(); + ApiDefinition apiDefinition = getApiDefinition(datasourceRequest); + if (apiDefinition == null) { + DEException.throwException("未找到"); + } + if (apiDefinition.getRequest().getPage() != null && apiDefinition.getRequest().getPage().getPageType() != null && !apiDefinition.getRequest().getPage().getPageType().equalsIgnoreCase("empty")) { + String response = execHttpRequest(false, apiDefinition, apiDefinition.getApiQueryTimeout() == null || apiDefinition.getApiQueryTimeout() <= 0 ? 10 : apiDefinition.getApiQueryTimeout(), params(datasourceRequest)); + fieldList = getTableFields(apiDefinition); + result.put("fieldList", fieldList); + if (apiDefinition.getRequest().getPage().getPageType().equalsIgnoreCase("pageNumber")) { + int pageCount = Integer.valueOf(JsonPath.read(response, apiDefinition.getRequest().getPage().getResponseData().get(0).getResolutionPath()).toString()); + int beginPage = Integer.valueOf(apiDefinition.getRequest().getPage().getRequestData().get(0).getParameterDefaultValue()); + if (apiDefinition.getRequest().getPage().getResponseData().get(0).getResolutionPathType().equalsIgnoreCase("totalNumber")) { + pageCount = pageCount / Integer.valueOf(apiDefinition.getRequest().getPage().getRequestData().get(1).getParameterDefaultValue()) + 1; + } + for (int i = beginPage; i <= pageCount; i++) { + apiDefinition.getRequest().getPage().getRequestData().get(0).setParameterDefaultValue(String.valueOf(i)); + response = execHttpRequest(false, apiDefinition, apiDefinition.getApiQueryTimeout() == null || apiDefinition.getApiQueryTimeout() <= 0 ? 10 : apiDefinition.getApiQueryTimeout(), params(datasourceRequest)); + dataList.addAll(fetchResult(response, apiDefinition)); + } + } + if (apiDefinition.getRequest().getPage().getPageType().equalsIgnoreCase("cursor")) { + dataList.addAll(fetchResult(response, apiDefinition)); + String cursor = null; + try { + cursor = JsonPath.read(response, apiDefinition.getRequest().getPage().getResponseData().get(0).getResolutionPath()).toString(); + } catch (Exception e) { + } + while (cursor != null) { + apiDefinition.getRequest().getPage().getRequestData().get(0).setParameterDefaultValue(cursor); + response = execHttpRequest(false, apiDefinition, apiDefinition.getApiQueryTimeout() == null || apiDefinition.getApiQueryTimeout() <= 0 ? 10 : apiDefinition.getApiQueryTimeout(), params(datasourceRequest)); + dataList.addAll(fetchResult(response, apiDefinition)); + try { + if (cursor.equalsIgnoreCase(JsonPath.read(response, apiDefinition.getRequest().getPage().getResponseData().get(0).getResolutionPath()).toString())) { + cursor = null; + } else { + cursor = JsonPath.read(response, apiDefinition.getRequest().getPage().getResponseData().get(0).getResolutionPath()).toString(); + } + } catch (Exception e) { + cursor = null; + } + } + } + result.put("dataList", dataList); + return result; + } else { + String response = execHttpRequest(false, apiDefinition, apiDefinition.getApiQueryTimeout() == null || apiDefinition.getApiQueryTimeout() <= 0 ? 10 : apiDefinition.getApiQueryTimeout(), params(datasourceRequest)); + fieldList = getTableFields(apiDefinition); + result.put("fieldList", fieldList); + dataList = fetchResult(response, apiDefinition); + result.put("dataList", dataList); + return result; + } + } + + + private static List getTableFields(ApiDefinition apiDefinition) throws DEException { + return apiDefinition.getFields(); + } + + public static List getTableFields(DatasourceRequest datasourceRequest) throws DEException { + TypeReference> listTypeReference = new TypeReference>() { + }; + + List tableFields = new ArrayList<>(); + try { + List lists = JsonUtil.parseList(datasourceRequest.getDatasource().getConfiguration(), listTypeReference); + for (ApiDefinition apiDefinition : lists) { + if (datasourceRequest.getTable().equalsIgnoreCase(apiDefinition.getDeTableName())) { + tableFields = getTableFields(apiDefinition); + } + } + } catch (Exception e) { + + } + return tableFields; + } + + public static String checkAPIStatus(DatasourceRequest datasourceRequest) throws Exception { + TypeReference> listTypeReference = new TypeReference>() { + }; + List apiDefinitionList = JsonUtil.parseList(datasourceRequest.getDatasource().getConfiguration(), listTypeReference); + List status = new ArrayList(); + for (ApiDefinition apiDefinition : apiDefinitionList) { + if (apiDefinition == null || (apiDefinition.getType() != null && apiDefinition.getType().equalsIgnoreCase("params"))) { + continue; + } + datasourceRequest.setTable(apiDefinition.getName()); + ObjectNode apiItemStatuses = objectMapper.createObjectNode(); + try { + getData(datasourceRequest); + apiItemStatuses.put("name", apiDefinition.getName()); + apiItemStatuses.put("status", "Success"); + } catch (Exception e) { + LogUtil.error("API status Error: " + datasourceRequest.getDatasource().getName() + "-" + apiDefinition.getName(), e); + apiItemStatuses.put("name", apiDefinition.getName()); + apiItemStatuses.put("status", "Error"); + } + status.add(apiItemStatuses); + } + return JsonUtil.toJSONString(status).toString(); + } + + private static List getData(DatasourceRequest datasourceRequest) throws Exception { + ApiDefinition apiDefinition = getApiDefinition(datasourceRequest); + if (apiDefinition == null) { + DEException.throwException("未找到"); + } + String response = execHttpRequest(true, apiDefinition, apiDefinition.getApiQueryTimeout() == null || apiDefinition.getApiQueryTimeout() <= 0 ? 10 : apiDefinition.getApiQueryTimeout(), params(datasourceRequest)); + return fetchResult(response, apiDefinition); + } + + public static String execHttpRequest(boolean preview, ApiDefinition api, int socketTimeout, List paramsList) { + ApiDefinition apiDefinition = new ApiDefinition(); + BeanUtils.copyBean(apiDefinition, api); + + if (apiDefinition.getRequest().getPage() != null && apiDefinition.getRequest().getPage().getPageType() != null && apiDefinition.getRequest().getPage().getPageType().equalsIgnoreCase("pageNumber")) { + apiDefinition.setUrl(apiDefinition.getUrl().replace(apiDefinition.getRequest().getPage().getRequestData().get(0).getBuiltInParameterName(), apiDefinition.getRequest().getPage().getRequestData().get(0).getParameterDefaultValue()).replace(apiDefinition.getRequest().getPage().getRequestData().get(1).getBuiltInParameterName(), apiDefinition.getRequest().getPage().getRequestData().get(1).getParameterDefaultValue())); + apiDefinition.setRequest(JsonUtil.parseObject(JsonUtil.toJSONString(apiDefinition.getRequest()).toString().replace(apiDefinition.getRequest().getPage().getRequestData().get(0).getBuiltInParameterName(), apiDefinition.getRequest().getPage().getRequestData().get(0).getParameterDefaultValue()).replace(apiDefinition.getRequest().getPage().getRequestData().get(1).getBuiltInParameterName(), apiDefinition.getRequest().getPage().getRequestData().get(1).getParameterDefaultValue()), ApiDefinitionRequest.class)); + } + + if (apiDefinition.getRequest().getPage() != null && apiDefinition.getRequest().getPage().getPageType() != null && apiDefinition.getRequest().getPage().getPageType().equalsIgnoreCase("cursor")) { + apiDefinition.setUrl(apiDefinition.getUrl().replace(apiDefinition.getRequest().getPage().getRequestData().get(0).getBuiltInParameterName(), apiDefinition.getRequest().getPage().getRequestData().get(0).getParameterDefaultValue()).replace(apiDefinition.getRequest().getPage().getRequestData().get(1).getBuiltInParameterName(), apiDefinition.getRequest().getPage().getRequestData().get(1).getParameterDefaultValue())); + String defaultCursor = apiDefinition.getRequest().getPage().getRequestData().get(0).getParameterDefaultValue(); + apiDefinition.setRequest(JsonUtil.parseObject(JsonUtil.toJSONString(apiDefinition.getRequest()).toString().replace(apiDefinition.getRequest().getPage().getRequestData().get(0).getBuiltInParameterName(), StringUtils.isEmpty(defaultCursor) ? "" : defaultCursor).replace(apiDefinition.getRequest().getPage().getRequestData().get(1).getBuiltInParameterName(), apiDefinition.getRequest().getPage().getRequestData().get(1).getParameterDefaultValue()), ApiDefinitionRequest.class)); + } + + + String response = ""; + HttpClientConfig httpClientConfig = new HttpClientConfig(); + httpClientConfig.setSocketTimeout(socketTimeout * 1000); + ApiDefinitionRequest apiDefinitionRequest = apiDefinition.getRequest(); + for (Map header : apiDefinitionRequest.getHeaders()) { + if (header.get("name") != null && StringUtils.isNotEmpty(header.get("name").toString()) && header.get("value") != null && StringUtils.isNotEmpty(header.get("value").toString())) { + if (header.get("nameType") != null && header.get("nameType").toString().equalsIgnoreCase("params")) { + String param = header.get("value").toString(); + for (ApiDefinition definition : paramsList) { + for (int i = 0; i < definition.getFields().size(); i++) { + TableField field = definition.getFields().get(i); + if (field.getName().equalsIgnoreCase(param)) { + String resultStr = execHttpRequest(true, definition, definition.getApiQueryTimeout() == null || apiDefinition.getApiQueryTimeout() <= 0 ? 10 : apiDefinition.getApiQueryTimeout(), null); + List dataList = fetchResult(resultStr, definition); + if (dataList.size() > 0) { + httpClientConfig.addHeader(header.get("name").toString(), dataList.get(0)[i]); + } + } + } + } + } else if (header.get("nameType") != null && header.get("nameType").toString().equalsIgnoreCase("custom")) { + List params = new ArrayList<>(); + String regex = "\\$\\{(.*?)\\}"; + Pattern pattern = Pattern.compile(regex); + Matcher matcher = pattern.matcher(header.get("value").toString()); + while (matcher.find()) { + params.add(matcher.group(1)); + } + String result = header.get("value").toString(); + for (String param : params) { + for (ApiDefinition definition : paramsList) { + for (int i = 0; i < definition.getFields().size(); i++) { + TableField field = definition.getFields().get(i); + if (field.getName().equalsIgnoreCase(param)) { + String resultStr = execHttpRequest(true, definition, definition.getApiQueryTimeout() == null || apiDefinition.getApiQueryTimeout() <= 0 ? 10 : apiDefinition.getApiQueryTimeout(), null); + List dataList = fetchResult(resultStr, definition); + if (dataList.size() > 0) { + result = result.replace("${" + param + "}", dataList.get(0)[i]); + } + } + } + } + } + httpClientConfig.addHeader(header.get("name").toString(), result); + } else if (header.get("nameType") != null && header.get("nameType").toString().equalsIgnoreCase("timeFun")) { + String timeFormat = header.get("value").toString(); + Calendar calendar = Calendar.getInstance(); + Date date = calendar.getTime(); + if (StringUtils.isNotEmpty(timeFormat) && timeFormat.split(" ")[0].equalsIgnoreCase("currentDay")) { + SimpleDateFormat simpleDateFormat = new SimpleDateFormat(timeFormat.split(" ")[1]); + httpClientConfig.addHeader(header.get("name").toString(), simpleDateFormat.format(date)); + } + } else { + httpClientConfig.addHeader(header.get("name").toString(), header.get("value").toString()); + } + + } + } + if (apiDefinitionRequest.getAuthManager() != null + && StringUtils.isNotBlank(apiDefinitionRequest.getAuthManager().getUsername()) + && StringUtils.isNotBlank(apiDefinitionRequest.getAuthManager().getPassword()) + && apiDefinitionRequest.getAuthManager().getVerification().equals("Basic Auth")) { + String authValue = "Basic " + Base64.getUrlEncoder().encodeToString((apiDefinitionRequest.getAuthManager().getUsername() + + ":" + apiDefinitionRequest.getAuthManager().getPassword()).getBytes()); + httpClientConfig.addHeader("Authorization", authValue); + } + + List params = new ArrayList<>(); + for (Map argument : apiDefinition.getRequest().getArguments()) { + if (StringUtils.isNotEmpty(argument.get("name")) && StringUtils.isNotEmpty(argument.get("value"))) { + if (argument.get("nameType") != null && argument.get("nameType").toString().equalsIgnoreCase("params")) { + String param = argument.get("value").toString(); + for (ApiDefinition definition : paramsList) { + for (int i = 0; i < definition.getFields().size(); i++) { + TableField field = definition.getFields().get(i); + if (field.getOriginName().equalsIgnoreCase(param)) { + String resultStr = execHttpRequest(true, definition, definition.getApiQueryTimeout() == null || apiDefinition.getApiQueryTimeout() <= 0 ? 10 : apiDefinition.getApiQueryTimeout(), null); + List dataList = fetchResult(resultStr, definition); + if (dataList.size() > 0) { + params.add(argument.get("name") + "=" + dataList.get(0)[i]); + } + } + } + } + } else if (argument.get("nameType") != null && argument.get("nameType").toString().equalsIgnoreCase("custom")) { + List arrayList = new ArrayList<>(); + String regex = "\\$\\{(.*?)\\}"; + Pattern pattern = Pattern.compile(regex); + Matcher matcher = pattern.matcher(argument.get("value").toString()); + while (matcher.find()) { + arrayList.add(matcher.group(1)); + } + String result = argument.get("value").toString(); + for (String param : arrayList) { + for (ApiDefinition definition : paramsList) { + for (int i = 0; i < definition.getFields().size(); i++) { + TableField field = definition.getFields().get(i); + if (field.getName().equalsIgnoreCase(param)) { + String resultStr = execHttpRequest(true, definition, definition.getApiQueryTimeout() == null || apiDefinition.getApiQueryTimeout() <= 0 ? 10 : apiDefinition.getApiQueryTimeout(), null); + List dataList = fetchResult(resultStr, definition); + if (dataList.size() > 0) { + result = result.replace("${" + param + "}", dataList.get(0)[i]); + } + } + } + } + } + params.add(argument.get("name") + "=" + result); + } else if (argument.get("nameType") != null && argument.get("nameType").toString().equalsIgnoreCase("timeFun")) { + String timeFormat = argument.get("value").toString(); + Calendar calendar = Calendar.getInstance(); + Date date = calendar.getTime(); + if (StringUtils.isNotEmpty(timeFormat) && timeFormat.split(" ")[0].equalsIgnoreCase("currentDay")) { + SimpleDateFormat simpleDateFormat = new SimpleDateFormat(timeFormat.split(" ")[1]); + params.add(argument.get("name") + "=" + simpleDateFormat.format(date)); + } + } else { + params.add(argument.get("name") + "=" + URLEncoder.encode(argument.get("value"))); + } + } + } + if (org.apache.commons.collections4.CollectionUtils.isNotEmpty(params)) { + apiDefinition.setUrl(apiDefinition.getUrl() + "?" + StringUtils.join(params, "&")); + } + + switch (apiDefinition.getMethod()) { + case "GET": + response = HttpClientUtil.get(apiDefinition.getUrl().trim(), httpClientConfig); + break; + case "POST": + if (!apiDefinitionRequest.getBody().keySet().contains("type")) { + DEException.throwException("请求类型不能为空"); + } + String type = apiDefinitionRequest.getBody().get("type").toString(); + if (StringUtils.equalsAny(type, "JSON", "XML", "Raw")) { + String raw = null; + if (apiDefinitionRequest.getBody().get("raw") != null) { + raw = apiDefinitionRequest.getBody().get("raw").toString(); + + List bodYparams = new ArrayList<>(); + String regex = "\\$\\{(.*?)\\}"; + Pattern pattern = Pattern.compile(regex); + Matcher matcher = pattern.matcher(raw); + while (matcher.find()) { + bodYparams.add(matcher.group(1)); + } + for (String param : bodYparams) { + for (ApiDefinition definition : paramsList) { + for (int i = 0; i < definition.getFields().size(); i++) { + TableField field = definition.getFields().get(i); + if (field.getOriginName().equalsIgnoreCase(param)) { + String resultStr = execHttpRequest(false, definition, definition.getApiQueryTimeout() == null || apiDefinition.getApiQueryTimeout() <= 0 ? 10 : apiDefinition.getApiQueryTimeout(), null); + List dataList = fetchResult(resultStr, definition); + if (dataList.size() > 0) { + raw = raw.replace("${" + param + "}", dataList.get(0)[i]); + } + } + } + } + } + response = HttpClientUtil.post(apiDefinition.getUrl(), raw, httpClientConfig); + } + } + if (StringUtils.equalsAny(type, "Form_Data", "WWW_FORM")) { + if (apiDefinitionRequest.getBody().get("kvs") != null) { + Map body = new HashMap<>(); + TypeReference> listTypeReference = new TypeReference>() { + }; + List rootNode = null; + try { + rootNode = objectMapper.readValue(JsonUtil.toJSONString(apiDefinition.getRequest().getBody().get("kvs")).toString(), listTypeReference); + } catch (Exception e) { + e.printStackTrace(); + DEException.throwException(e); + } + for (JsonNode jsonNode : rootNode) { + if (jsonNode.has("name") && jsonNode.has("value")) { + if (jsonNode.get("value") != null && StringUtils.isNotEmpty(jsonNode.get("value").asText())) { + if (jsonNode.get("nameType") != null && jsonNode.get("nameType").asText().equalsIgnoreCase("params")) { + String param = jsonNode.get("value").asText(); + for (ApiDefinition definition : paramsList) { + for (int i = 0; i < definition.getFields().size(); i++) { + TableField field = definition.getFields().get(i); + if (field.getOriginName().equalsIgnoreCase(param)) { + String resultStr = execHttpRequest(false, definition, definition.getApiQueryTimeout() == null || apiDefinition.getApiQueryTimeout() <= 0 ? 10 : apiDefinition.getApiQueryTimeout(), null); + List dataList = fetchResult(resultStr, definition); + if (dataList.size() > 0) { + body.put(jsonNode.get("name").asText(), dataList.get(0)[i]); + } + } + } + } + } else if (jsonNode.get("nameType") != null && jsonNode.get("nameType").asText().equalsIgnoreCase("custom")) { + List bodYparams = new ArrayList<>(); + String regex = "\\$\\{(.*?)\\}"; + Pattern pattern = Pattern.compile(regex); + Matcher matcher = pattern.matcher(jsonNode.get("value").asText()); + while (matcher.find()) { + bodYparams.add(matcher.group(1)); + } + String result = jsonNode.get("value").asText(); + for (String param : bodYparams) { + for (ApiDefinition definition : paramsList) { + for (int i = 0; i < definition.getFields().size(); i++) { + TableField field = definition.getFields().get(i); + if (field.getOriginName().equalsIgnoreCase(param)) { + String resultStr = execHttpRequest(false, definition, definition.getApiQueryTimeout() == null || apiDefinition.getApiQueryTimeout() <= 0 ? 10 : apiDefinition.getApiQueryTimeout(), null); + List dataList = fetchResult(resultStr, definition); + if (dataList.size() > 0) { + result = result.replace("${" + param + "}", dataList.get(0)[i]); + } + } + } + } + } + body.put(jsonNode.get("name").asText(), result); + } else if (jsonNode.get("nameType") != null && jsonNode.get("nameType").asText().equalsIgnoreCase("timeFun")) { + String timeFormat = jsonNode.get("value").asText(); + Calendar calendar = Calendar.getInstance(); + Date date = calendar.getTime(); + if (StringUtils.isNotEmpty(timeFormat) && timeFormat.split(" ")[0].equalsIgnoreCase("currentDay")) { + SimpleDateFormat simpleDateFormat = new SimpleDateFormat(timeFormat.split(" ")[1]); + body.put(jsonNode.get("name").toString(), simpleDateFormat.format(date)); + } + } else { + body.put(jsonNode.get("name").asText(), jsonNode.get("value").asText()); + } + } + } + } + response = HttpClientUtil.post(apiDefinition.getUrl(), body, httpClientConfig); + } + } + break; + default: + break; + } + return response; + } + + private static void previewNum(List> field) { + for (Map stringObjectMap : field) { + JSONArray newArray = new JSONArray(); + if (stringObjectMap.get("value") != null) { + try { + TypeReference listTypeReference = new TypeReference() { + }; + JSONArray array = objectMapper.readValue(stringObjectMap.get("value").toString(), listTypeReference); + if (array.size() > 100) { + for (int i = 0; i < Math.min(100, array.size()); i++) { + newArray.add(array.get(i)); + } + stringObjectMap.put("value", newArray); + } + } catch (Exception e) { + + } + } + } + } + + public static ApiDefinition checkApiDefinition(DatasourceRequest datasourceRequest) throws DEException { + ApiDefinition apiDefinition = new ApiDefinition(); + TypeReference> listTypeReference = new TypeReference>() { + }; + List apiDefinitionList = JsonUtil.parseList(datasourceRequest.getDatasource().getConfiguration(), listTypeReference); + if (!CollectionUtils.isEmpty(apiDefinitionList)) { + for (ApiDefinition definition : apiDefinitionList) { + if (definition != null && (definition.getType() == null || !definition.getType().equalsIgnoreCase("params"))) { + apiDefinition = definition; + } + } + } + String response = execHttpRequest(true, apiDefinition, apiDefinition.getApiQueryTimeout() == null || apiDefinition.getApiQueryTimeout() <= 0 ? 10 : apiDefinition.getApiQueryTimeout(), params(datasourceRequest)); + return checkApiDefinition(apiDefinition, response); + } + + private static ApiDefinition checkApiDefinition(ApiDefinition apiDefinition, String response) throws DEException { + if (StringUtils.isEmpty(response)) { + DEException.throwException("该请求返回数据为空"); + } + List> fields = new ArrayList<>(); + if (apiDefinition.isShowApiStructure() || !apiDefinition.isUseJsonPath()) { + String rootPath; + if (response.startsWith("[")) { + rootPath = "$[*]"; + JsonNode jsonArray = null; + try { + jsonArray = objectMapper.readTree(response); + } catch (Exception e) { + DEException.throwException(e); + } + for (Object o : jsonArray) { + handleStr(apiDefinition, o.toString(), fields, rootPath); + } + } else { + rootPath = "$"; + handleStr(apiDefinition, response, fields, rootPath); + } + previewNum(fields); + apiDefinition.setJsonFields(fields); + return apiDefinition; + } else { + List currentData = new ArrayList<>(); + try { + Object object = JsonPath.read(response, apiDefinition.getJsonPath()); + if (object instanceof List) { + currentData = (List) object; + } else { + currentData.add((LinkedHashMap) object); + } + } catch (Exception e) { + DEException.throwException(e); + } + int i = 0; + try { + LinkedHashMap data = currentData.get(0); + } catch (Exception e) { + DEException.throwException("数据不符合规范, " + e.getMessage()); + } + for (LinkedHashMap data : currentData) { + if (i >= apiDefinition.getPreviewNum()) { + break; + } + if (i == 0) { + for (Object o : data.keySet()) { + Map field = new HashMap<>(); + field.put("originName", o.toString()); + field.put("name", o.toString()); + field.put("type", "STRING"); + field.put("checked", true); + field.put("size", 65535); + field.put("deExtractType", 0); + field.put("deType", 0); + field.put("extField", 0); + fields.add(field); + } + } + for (Map field : fields) { + JSONArray array = new JSONArray(); + if (field.get("value") != null) { + try { + TypeReference listTypeReference = new TypeReference() { + }; + array = objectMapper.readValue(field.get("value").toString(), listTypeReference); + } catch (Exception e) { + e.printStackTrace(); + DEException.throwException(e); + } + array.add(Optional.ofNullable(data.get(field.get("originName"))).orElse("").toString().replaceAll("\n", " ").replaceAll("\r", " ")); + } else { + array.add(Optional.ofNullable(data.get(field.get("originName"))).orElse("").toString().replaceAll("\n", " ").replaceAll("\r", " ")); + } + field.put("value", array); + } + i++; + } + apiDefinition.setJsonFields(fields); + return apiDefinition; + } + } + + + private static void handleStr(ApiDefinition apiDefinition, String jsonStr, List> fields, String rootPath) throws DEException { + if (jsonStr.startsWith("[")) { + TypeReference> listTypeReference = new TypeReference>() { + }; + List jsonArray = null; + + try { + jsonArray = objectMapper.readValue(jsonStr, listTypeReference); + } catch (Exception e) { + DEException.throwException(e); + } + for (Object o : jsonArray) { + handleStr(apiDefinition, o.toString(), fields, rootPath); + } + } else { + JsonNode jsonNode = null; + try { + jsonNode = objectMapper.readTree(jsonStr); + } catch (Exception e) { + DEException.throwException(e); + } + Iterator fieldNames = jsonNode.fieldNames(); + while (fieldNames.hasNext()) { + String fieldName = fieldNames.next(); + String value = jsonNode.get(fieldName).toString(); + if (StringUtils.isNotEmpty(value) && !value.startsWith("[") && !value.startsWith("{")) { + value = jsonNode.get(fieldName).asText(); + } + if (StringUtils.isNotEmpty(value) && value.startsWith("[")) { + Map o = new HashMap<>(); + try { + JsonNode jsonArray = objectMapper.readTree(value); + List> childrenField = new ArrayList<>(); + for (JsonNode node : jsonArray) { + if (StringUtils.isNotEmpty(node.toString()) && !node.toString().startsWith("[") && !node.toString().startsWith("{")) { + throw new Exception(node + "is not json type"); + } + } + for (JsonNode node : jsonArray) { + handleStr(apiDefinition, node.toString(), childrenField, rootPath + "." + String.format(path, fieldName) + "[*]"); + } + o.put("children", childrenField); + o.put("childrenDataType", "LIST"); + } catch (Exception e) { + JSONArray array = new JSONArray(); + array.add(StringUtils.isNotEmpty(jsonNode.get(fieldName).toString()) ? jsonNode.get(fieldName).toString() : ""); + o.put("value", array); + } + o.put("jsonPath", rootPath + "." + String.format(path, fieldName)); + setProperty(apiDefinition, o, fieldName); + if (!hasItem(apiDefinition, fields, o)) { + fields.add(o); + } + } else if (StringUtils.isNotEmpty(value) && value.startsWith("{")) { + try { + JsonNode jsonNode1 = objectMapper.readTree(value); + List> children = new ArrayList<>(); + handleStr(apiDefinition, value, children, rootPath + "." + String.format(path, fieldName)); + Map o = new HashMap<>(); + o.put("children", children); + o.put("childrenDataType", "OBJECT"); + o.put("jsonPath", rootPath + "." + fieldName); + setProperty(apiDefinition, o, fieldName); + if (!hasItem(apiDefinition, fields, o)) { + fields.add(o); + } + } catch (Exception e) { + Map o = new HashMap<>(); + o.put("jsonPath", rootPath + "." + String.format(path, fieldName)); + setProperty(apiDefinition, o, fieldName); + JSONArray array = new JSONArray(); + array.add(StringUtils.isNotEmpty(value) ? value : ""); + o.put("value", array); + if (!hasItem(apiDefinition, fields, o)) { + fields.add(o); + } + } + } else { + Map o = new HashMap<>(); + o.put("jsonPath", rootPath + "." + String.format(path, fieldName)); + setProperty(apiDefinition, o, fieldName); + JSONArray array = new JSONArray(); + array.add(StringUtils.isNotEmpty(value) ? value : ""); + o.put("value", array); + if (!hasItem(apiDefinition, fields, o)) { + fields.add(o); + } + } + + } + } + } + + private static void setProperty(ApiDefinition apiDefinition, Map o, String s) { + o.put("originName", s); + o.put("name", s); + o.put("type", "STRING"); + o.put("size", 65535); + o.put("deExtractType", 0); + o.put("deType", 0); + o.put("checked", false); + if (!apiDefinition.isUseJsonPath()) { + for (TableField field : apiDefinition.getFields()) { + if (!ObjectUtils.isEmpty(o.get("jsonPath")) && StringUtils.isNotEmpty(field.getJsonPath()) && field.getJsonPath().equals(o.get("jsonPath").toString())) { + o.put("checked", true); + o.put("name", field.getName()); + o.put("primaryKey", field.isPrimaryKey()); + o.put("length", field.getLength()); + o.put("deExtractType", field.getDeExtractType()); + } + } + } + } + + private static boolean hasItem(ApiDefinition apiDefinition, List> fields, Map item) throws DEException { + boolean has = false; + for (Map field : fields) { + if (field.get("jsonPath").equals(item.get("jsonPath"))) { + has = true; + mergeField(field, item); + mergeValue(field, apiDefinition, item); + break; + } + } + + return has; + } + + + private static void mergeField(Map field, Map item) throws DEException { + if (item.get("children") != null) { + List> fieldChildren = null; + List> itemChildren = null; + try { + fieldChildren = objectMapper.readValue(JsonUtil.toJSONString(field.get("children")).toString(), listForMapTypeReference); + itemChildren = objectMapper.readValue(JsonUtil.toJSONString(item.get("children")).toString(), listForMapTypeReference); + } catch (Exception e) { + DEException.throwException(e); + } + if (fieldChildren == null) { + fieldChildren = new ArrayList<>(); + } + for (Map itemChild : itemChildren) { + boolean hasKey = false; + for (Map fieldChild : fieldChildren) { + if (itemChild.get("jsonPath").toString().equals(fieldChild.get("jsonPath").toString())) { + mergeField(fieldChild, itemChild); + hasKey = true; + } + } + if (!hasKey) { + fieldChildren.add(itemChild); + } + } + } + } + + private static void mergeValue(Map field, ApiDefinition apiDefinition, Map item) throws DEException { + TypeReference listTypeReference = new TypeReference() { + }; + try { + if (!ObjectUtils.isEmpty(field.get("value")) && !ObjectUtils.isEmpty(item.get("value"))) { + JSONArray array = objectMapper.readValue(JsonUtil.toJSONString(field.get("value")).toString(), listTypeReference); + array.add(objectMapper.readValue(JsonUtil.toJSONString(item.get("value")).toString(), listTypeReference).get(0)); + field.put("value", array); + } + if (!ObjectUtils.isEmpty(field.get("children")) && !ObjectUtils.isEmpty(item.get("children"))) { + List> fieldChildren = objectMapper.readValue(JsonUtil.toJSONString(field.get("children")).toString(), listForMapTypeReference); + List> itemChildren = objectMapper.readValue(JsonUtil.toJSONString(item.get("children")).toString(), listForMapTypeReference); + List> fieldArrayChildren = new ArrayList<>(); + for (Map fieldChild : fieldChildren) { + Map find = null; + for (Map itemChild : itemChildren) { + if (fieldChild.get("jsonPath").toString().equals(itemChild.get("jsonPath").toString())) { + find = itemChild; + } + } + if (find != null) { + mergeValue(fieldChild, apiDefinition, find); + } + fieldArrayChildren.add(fieldChild); + } + field.put("children", fieldArrayChildren); + } + } catch (Exception e) { + e.printStackTrace(); + DEException.throwException(e); + } + + } + + private static List fetchResult(String result, ApiDefinition apiDefinition) { + List dataList = new LinkedList<>(); + if (apiDefinition.isUseJsonPath()) { + List currentData = new ArrayList<>(); + Object object = JsonPath.read(result, apiDefinition.getJsonPath()); + if (object instanceof List) { + currentData = (List) object; + } else { + currentData.add((LinkedHashMap) object); + } + for (LinkedHashMap data : currentData) { + String[] row = new String[apiDefinition.getFields().size()]; + int i = 0; + for (TableField field : apiDefinition.getFields()) { + row[i] = Optional.ofNullable(data.get(field.getOriginName())).orElse("").toString().replaceAll("\n", " ").replaceAll("\r", " "); + i++; + } + dataList.add(row); + } + } else { + List jsonPaths = apiDefinition.getFields().stream().map(TableField::getJsonPath).collect(Collectors.toList()); + Long maxLength = 0l; + List> columnDataList = new ArrayList<>(); + for (int i = 0; i < jsonPaths.size(); i++) { + List data = new ArrayList<>(); + Object object = JsonPath.read(result, jsonPaths.get(i)); + if (object instanceof List && jsonPaths.get(i).contains("[*]")) { + data = (List) object; + } else { + if (object != null) { + data.add(object.toString()); + } + } + maxLength = maxLength > data.size() ? maxLength : data.size(); + columnDataList.add(data); + } + for (int i = 0; i < maxLength; i++) { + String[] row = new String[apiDefinition.getFields().size()]; + dataList.add(row); + } + for (int i = 0; i < columnDataList.size(); i++) { + for (int j = 0; j < columnDataList.get(i).size(); j++) { + dataList.get(j)[i] = Optional.ofNullable(String.valueOf(columnDataList.get(i).get(j))).orElse("").replaceAll("\n", " ").replaceAll("\r", " "); + } + } + } + return dataList; + } + + + private static List params(DatasourceRequest datasourceRequest) { + TypeReference> listTypeReference = new TypeReference>() { + }; + List apiDefinitionListTemp = JsonUtil.parseList(datasourceRequest.getDatasource().getConfiguration(), listTypeReference); + return apiDefinitionListTemp.stream().filter(apiDefinition -> apiDefinition != null && apiDefinition.getType() != null && apiDefinition.getType().equalsIgnoreCase("params")).collect(Collectors.toList()); + } + + private static ApiDefinition getApiDefinition(DatasourceRequest datasourceRequest) throws DEException { + List apiDefinitionList = new ArrayList<>(); + TypeReference> listTypeReference = new TypeReference>() { + }; + List apiDefinitionListTemp = JsonUtil.parseList(datasourceRequest.getDatasource().getConfiguration(), listTypeReference); + + if (!CollectionUtils.isEmpty(apiDefinitionListTemp)) { + for (ApiDefinition apiDefinition : apiDefinitionListTemp) { + if (apiDefinition == null || apiDefinition.getType() == null || apiDefinition.getType().equalsIgnoreCase("params")) { + continue; + } + if (apiDefinition.getDeTableName().equalsIgnoreCase(datasourceRequest.getTable()) || apiDefinition.getName().equalsIgnoreCase(datasourceRequest.getTable())) { + apiDefinitionList.add(apiDefinition); + } + + } + } + if (CollectionUtils.isEmpty(apiDefinitionList)) { + DEException.throwException("未找到API数据表"); + } + if (apiDefinitionList.size() > 1) { + DEException.throwException("存在重名的API数据表"); + } + ApiDefinition find = null; + for (ApiDefinition apiDefinition : apiDefinitionList) { + if (apiDefinition == null) { + continue; + } + if (apiDefinition.getName().equalsIgnoreCase(datasourceRequest.getTable()) || apiDefinition.getDeTableName().equalsIgnoreCase(datasourceRequest.getTable())) { + find = apiDefinition; + } + } + return find; + } + +} diff --git a/backend/src/main/java/com/stdproject/service/provider/CalciteProvider.java b/backend/src/main/java/com/stdproject/service/provider/CalciteProvider.java new file mode 100644 index 0000000..dcefeb8 --- /dev/null +++ b/backend/src/main/java/com/stdproject/service/provider/CalciteProvider.java @@ -0,0 +1,1460 @@ +package com.stdproject.service.provider; + +import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper; +import com.jcraft.jsch.Session; +import com.stdproject.controller.EngineRequest; +import com.stdproject.entity.CoreDatasource; +import com.stdproject.entity.CoreDriver; +import com.stdproject.mapper.CoreDatasourceMapper; +import com.stdproject.service.manage.EngineManage; +import com.stdproject.service.type.*; +import com.stdproject.utils.FieldUtils; +import io.gisbi.constant.SQLConstants; +import io.gisbi.exception.DEException; +import io.gisbi.extensions.datasource.dto.*; +import io.gisbi.extensions.datasource.provider.DriverShim; +import io.gisbi.extensions.datasource.provider.ExtendedJdbcClassLoader; +import io.gisbi.extensions.datasource.provider.Provider; +import io.gisbi.extensions.datasource.vo.DatasourceConfiguration; +import io.gisbi.i18n.Translator; +import io.gisbi.utils.*; +import jakarta.annotation.PostConstruct; +import jakarta.annotation.Resource; +import org.apache.calcite.adapter.jdbc.JdbcSchema; +import org.apache.calcite.config.CalciteConnectionProperty; +import org.apache.calcite.config.Lex; +import org.apache.calcite.config.NullCollation; +import org.apache.calcite.jdbc.CalciteConnection; +import org.apache.calcite.schema.Schema; +import org.apache.calcite.schema.SchemaPlus; +import org.apache.calcite.sql.SqlNode; +import org.apache.calcite.sql.parser.SqlParser; +import org.apache.commons.collections4.CollectionUtils; +import org.apache.commons.dbcp2.BasicDataSource; +import org.apache.commons.lang3.StringUtils; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.stereotype.Component; + +import java.io.File; +import java.io.IOException; +import java.math.BigDecimal; +import java.net.URL; +import java.sql.*; +import java.util.*; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import java.util.stream.Collectors; + + +@Component("calciteProvider") +public class CalciteProvider extends Provider { + + @Resource + protected CoreDatasourceMapper coreDatasourceMapper; + @Resource + private EngineManage engineManage; + protected ExtendedJdbcClassLoader extendedJdbcClassLoader; + private Map customJdbcClassLoaders = new HashMap<>(); + @Value("${gisbi.path.driver:/opt/gisbi/drivers}") + private String FILE_PATH; + @Value("${gisbi.path.custom-drivers:/opt/gisbi/custom-drivers/}") + private String CUSTOM_PATH; + private static String split = "DE"; + + @Resource + private CommonThreadPool commonThreadPool; + + @PostConstruct + public void init() throws Exception { + try { + String jarPath = FILE_PATH; + ClassLoader classLoader = Thread.currentThread().getContextClassLoader(); + extendedJdbcClassLoader = new ExtendedJdbcClassLoader(new URL[]{new File(jarPath).toURI().toURL()}, classLoader); + File file = new File(jarPath); + File[] array = file.listFiles(); + Optional.ofNullable(array).ifPresent(files -> { + for (File tmp : array) { + if (tmp.getName().endsWith(".jar")) { + try { + extendedJdbcClassLoader.addFile(tmp); + } catch (IOException e) { + e.printStackTrace(); + } + } + } + }); + } catch (Exception e) { + + } + } + + @Override + public List getSchema(DatasourceRequest datasourceRequest) { + List schemas = new ArrayList<>(); + String queryStr = getSchemaSql(datasourceRequest.getDatasource()); + try (ConnectionObj con = getConnection(datasourceRequest.getDatasource()); Statement statement = getStatement(con.getConnection(), 30); ResultSet resultSet = statement.executeQuery(queryStr)) { + while (resultSet.next()) { + schemas.add(resultSet.getString(1)); + } + } catch (Exception e) { + DEException.throwException(e.getMessage()); + } + return schemas; + } + + @Override + public String checkStatus(DatasourceRequest datasourceRequest) throws Exception { + DatasourceConfiguration.DatasourceType datasourceType = DatasourceConfiguration.DatasourceType.valueOf(datasourceRequest.getDatasource().getType()); + switch (datasourceType) { + case pg: + DatasourceConfiguration configuration = JsonUtil.parseObject(datasourceRequest.getDatasource().getConfiguration(), Pg.class); + List schemas = getSchema(datasourceRequest); + if (CollectionUtils.isEmpty(schemas) || !schemas.contains(configuration.getSchema())) { + DEException.throwException("无效的 schema!"); + } + break; + default: + break; + } + + try (ConnectionObj con = getConnection(datasourceRequest.getDatasource())) { + datasourceRequest.setDsVersion(con.getConnection().getMetaData().getDatabaseMajorVersion()); + String querySql = getTablesSql(datasourceRequest).get(0); + Statement statement = getStatement(con.getConnection(), 30); + ResultSet resultSet = statement.executeQuery(querySql); + if (resultSet != null) { + resultSet.close(); + } + if (statement != null) { + statement.close(); + } + } catch (Exception e) { + throw e; + } + return "Success"; + } + + @Override + public List getTables(DatasourceRequest datasourceRequest) { + List tables = new ArrayList<>(); + try (Connection con = getConnectionFromPool(datasourceRequest.getDatasource().getId()); Statement statement = getStatement(con, 30)) { + datasourceRequest.setDsVersion(con.getMetaData().getDatabaseMajorVersion()); + List tablesSqls = getTablesSql(datasourceRequest); + for (String tablesSql : tablesSqls) { + ResultSet resultSet = statement.executeQuery(tablesSql); + while (resultSet.next()) { + tables.add(getTableDesc(datasourceRequest, resultSet)); + } + } + } catch (Exception e) { + DEException.throwException(e.getMessage()); + } + return tables; + } + + @Override + public Map fetchResultField(DatasourceRequest datasourceRequest) throws DEException { + // 不跨数据源 + if (datasourceRequest.getDsList().size() == 1) { + return jdbcFetchResultField(datasourceRequest); + } + + List datasetTableFields = new ArrayList<>(); + List list = new LinkedList<>(); + PreparedStatement statement = null; + ResultSet resultSet = null; + Connection connection = take(); + try { + CalciteConnection calciteConnection = connection.unwrap(CalciteConnection.class); + statement = calciteConnection.prepareStatement(datasourceRequest.getQuery()); + resultSet = statement.executeQuery(); + ResultSetMetaData metaData = resultSet.getMetaData(); + int columnCount = metaData.getColumnCount(); + for (int i = 1; i <= columnCount; i++) { + TableField tableField = new TableField(); + tableField.setOriginName(metaData.getColumnLabel(i)); + tableField.setType(metaData.getColumnTypeName(i)); + tableField.setPrecision(metaData.getPrecision(i)); + int deType = FieldUtils.transType2DeType(tableField.getType()); + tableField.setDeExtractType(deType); + tableField.setDeType(deType); + tableField.setScale(metaData.getScale(i)); + datasetTableFields.add(tableField); + } + list = getDataResult(resultSet); + } catch (Exception | AssertionError e) { + String msg; + if (e.getCause() != null && e.getCause().getCause() != null) { + msg = e.getMessage() + " [" + e.getCause().getCause().getMessage() + "]"; + } else { + msg = e.getMessage(); + } + DEException.throwException(Translator.get("i18n_fetch_error") + msg); + } finally { + try { + if (resultSet != null) resultSet.close(); + if (statement != null) statement.close(); + } catch (Exception e) { + } + } + Map map = new LinkedHashMap<>(); + map.put("fields", datasetTableFields); + map.put("data", list); + return map; + } + + @Override + public String transSqlDialect(String sql, Map dsMap) throws DEException { + DatasourceSchemaDTO value = dsMap.entrySet().iterator().next().getValue(); + try (Connection connection = getConnectionFromPool(value.getId());) { + // 获取数据库version + if (connection != null) { + value.setDsVersion(connection.getMetaData().getDatabaseMajorVersion()); + } + SqlParser parser = SqlParser.create(sql, SqlParser.Config.DEFAULT.withLex(Lex.JAVA)); + SqlNode sqlNode = parser.parseStmt(); + return sqlNode.toSqlString(getDialect(value)).toString(); + } catch (Exception e) { + DEException.throwException(e.getMessage()); + } + return null; + } + + private List fetchResultField(ResultSet rs) throws Exception { + List fieldList = new ArrayList<>(); + ResultSetMetaData metaData = rs.getMetaData(); + int columnCount = metaData.getColumnCount(); + for (int j = 0; j < columnCount; j++) { + String columnName = metaData.getColumnName(j + 1); + String label = StringUtils.isNotEmpty(metaData.getColumnLabel(j + 1)) ? metaData.getColumnLabel(j + 1) : columnName; + TableField tableField = new TableField(); + tableField.setOriginName(columnName); + tableField.setName(label); + tableField.setType(metaData.getColumnTypeName(j + 1)); + tableField.setFieldType(tableField.getType()); + int deType = FieldUtils.transType2DeType(tableField.getType()); + tableField.setDeExtractType(deType); + tableField.setDeType(deType); + fieldList.add(tableField); + } + return fieldList; + } + + @Override + public List fetchTableField(DatasourceRequest datasourceRequest) throws DEException { + List datasetTableFields = new ArrayList<>(); + DatasourceSchemaDTO datasourceSchemaDTO = datasourceRequest.getDsList().entrySet().iterator().next().getValue(); + datasourceRequest.setDatasource(datasourceSchemaDTO); + + DatasourceConfiguration datasourceConfiguration = JsonUtil.parseObject(datasourceRequest.getDatasource().getConfiguration(), DatasourceConfiguration.class); + + String table = datasourceRequest.getTable(); + if (StringUtils.isEmpty(table)) { + ResultSet resultSet = null; + try (Connection con = getConnectionFromPool(datasourceRequest.getDatasource().getId()); Statement statement = getStatement(con, 30)) { + if (DatasourceConfiguration.DatasourceType.valueOf(datasourceSchemaDTO.getType()) == DatasourceConfiguration.DatasourceType.oracle) { + statement.executeUpdate("ALTER SESSION SET CURRENT_SCHEMA = " + datasourceConfiguration.getSchema()); + } + resultSet = statement.executeQuery(datasourceRequest.getQuery()); + datasetTableFields.addAll(getField(resultSet, datasourceRequest)); + } catch (Exception e) { + DEException.throwException(e.getMessage()); + } finally { + if (resultSet != null) { + try { + resultSet.close(); + } catch (SQLException e) { + e.printStackTrace(); + } + } + } + } else { + ResultSet resultSet = null; + try (Connection con = getConnectionFromPool(datasourceRequest.getDatasource().getId()); Statement statement = getStatement(con, 30)) { + datasourceRequest.setDsVersion(con.getMetaData().getDatabaseMajorVersion()); + if (datasourceRequest.getDatasource().getType().equalsIgnoreCase("mongo")) { + resultSet = statement.executeQuery("select * from " + String.format(" `%s`", table) + " limit 0 offset 0 "); + return fetchResultField(resultSet); + } + if (isDorisCatalog(datasourceRequest)) { + resultSet = statement.executeQuery("desc " + String.format(" `%s`", table)); + } else { + resultSet = statement.executeQuery(getTableFiledSql(datasourceRequest)); + } + while (resultSet.next()) { + TableField tableFieldDesc = getTableFieldDesc(datasourceRequest, resultSet, 3); + boolean repeat = false; + for (TableField ele : datasetTableFields) { + if (StringUtils.equalsIgnoreCase(ele.getOriginName(), tableFieldDesc.getOriginName())) { + repeat = true; + break; + } + } + if (!repeat) { + datasetTableFields.add(tableFieldDesc); + } + } + } catch (Exception e) { + DEException.throwException(e.getMessage()); + } finally { + if (resultSet != null) { + try { + resultSet.close(); + } catch (SQLException e) { + e.printStackTrace(); + } + } + } + } + + return datasetTableFields; + } + + private boolean isDorisCatalog(DatasourceRequest datasourceRequest) { + if (!datasourceRequest.getDatasource().getType().equalsIgnoreCase("doris")) { + return false; + } + DatasourceConfiguration configuration = JsonUtil.parseObject(datasourceRequest.getDatasource().getConfiguration(), Mysql.class); + String database = ""; + if (StringUtils.isEmpty(configuration.getUrlType()) || configuration.getUrlType().equalsIgnoreCase("hostName")) { + database = configuration.getDataBase(); + } else { + Pattern WITH_SQL_FRAGMENT = Pattern.compile("jdbc:mysql://(.*):(\\d+)/(.*)"); + Matcher matcher = WITH_SQL_FRAGMENT.matcher(configuration.getJdbcUrl()); + matcher.find(); + String[] databasePrams = matcher.group(3).split("\\?"); + database = databasePrams[0]; + } + return database.contains("."); + } + + @Override + public ConnectionObj getConnection(DatasourceDTO coreDatasource) throws Exception { + ConnectionObj connectionObj = new ConnectionObj(); + DatasourceConfiguration configuration = null; + DatasourceConfiguration.DatasourceType datasourceType = DatasourceConfiguration.DatasourceType.valueOf(coreDatasource.getType()); + switch (datasourceType) { + case mysql: + case mongo: + case StarRocks: + case doris: + case TiDB: + case mariadb: + configuration = JsonUtil.parseObject(coreDatasource.getConfiguration(), Mysql.class); + break; + case impala: + configuration = JsonUtil.parseObject(coreDatasource.getConfiguration(), Impala.class); + break; + case sqlServer: + configuration = JsonUtil.parseObject(coreDatasource.getConfiguration(), Sqlserver.class); + break; + case oracle: + configuration = JsonUtil.parseObject(coreDatasource.getConfiguration(), Oracle.class); + break; + case db2: + configuration = JsonUtil.parseObject(coreDatasource.getConfiguration(), Db2.class); + break; + case pg: + configuration = JsonUtil.parseObject(coreDatasource.getConfiguration(), Pg.class); + break; + case redshift: + configuration = JsonUtil.parseObject(coreDatasource.getConfiguration(), Redshift.class); + break; + case h2: + configuration = JsonUtil.parseObject(coreDatasource.getConfiguration(), H2.class); + break; + case ck: + configuration = JsonUtil.parseObject(coreDatasource.getConfiguration(), CK.class); + break; + default: + configuration = JsonUtil.parseObject(coreDatasource.getConfiguration(), Mysql.class); + } + startSshSession(configuration, connectionObj, null); + Properties props = new Properties(); + if (StringUtils.isNotBlank(configuration.getUsername())) { + props.setProperty("user", configuration.getUsername()); + } + if (StringUtils.isNotBlank(configuration.getPassword())) { + props.setProperty("password", configuration.getPassword()); + } + String driverClassName = configuration.getDriver(); + ExtendedJdbcClassLoader jdbcClassLoader = extendedJdbcClassLoader; + Connection conn = null; + try { + Driver driverClass = (Driver) jdbcClassLoader.loadClass(driverClassName).newInstance(); + conn = driverClass.connect(configuration.getJdbc(), props); + + } catch (Exception e) { + DEException.throwException(e.getMessage()); + } + connectionObj.setConnection(conn); + return connectionObj; + } + + private DatasetTableDTO getTableDesc(DatasourceRequest datasourceRequest, ResultSet resultSet) throws SQLException { + DatasetTableDTO tableDesc = new DatasetTableDTO(); + tableDesc.setDatasourceId(datasourceRequest.getDatasource().getId()); + tableDesc.setType("db"); + tableDesc.setTableName(resultSet.getString(1)); + if (resultSet.getMetaData().getColumnCount() > 1) { + tableDesc.setName(resultSet.getString(2)); + } else { + tableDesc.setName(resultSet.getString(1)); + } + return tableDesc; + } + + private List getDriver() { + List drivers = new ArrayList<>(); + Map beansOfType = CommonBeanFactory.getApplicationContext().getBeansOfType((DatasourceConfiguration.class)); + beansOfType.keySet().forEach(key -> drivers.add(beansOfType.get(key).getDriver())); + return drivers; + } + + public Map jdbcFetchResultField(DatasourceRequest datasourceRequest) throws DEException { + DatasourceSchemaDTO value = datasourceRequest.getDsList().entrySet().iterator().next().getValue(); + datasourceRequest.setDatasource(value); + + DatasourceConfiguration datasourceConfiguration = JsonUtil.parseObject(datasourceRequest.getDatasource().getConfiguration(), DatasourceConfiguration.class); + + Map map = new LinkedHashMap<>(); + List fieldList = new ArrayList<>(); + List dataList = new LinkedList<>(); + + // schema + ResultSet resultSet = null; + try (Connection con = getConnectionFromPool(datasourceRequest.getDatasource().getId()); Statement statement = getPreparedStatement(con, datasourceConfiguration.getQueryTimeout(), datasourceRequest.getQuery(), datasourceRequest.getTableFieldWithValues())) { + if (DatasourceConfiguration.DatasourceType.valueOf(value.getType()) == DatasourceConfiguration.DatasourceType.oracle) { + statement.executeUpdate("ALTER SESSION SET CURRENT_SCHEMA = " + datasourceConfiguration.getSchema()); + } + + if (CollectionUtils.isNotEmpty(datasourceRequest.getTableFieldWithValues())) { + LogUtil.info("execWithPreparedStatement sql: " + datasourceRequest.getQuery()); + for (int i = 0; i < datasourceRequest.getTableFieldWithValues().size(); i++) { + ((PreparedStatement) statement).setObject(i + 1, datasourceRequest.getTableFieldWithValues().get(i).getValue(), datasourceRequest.getTableFieldWithValues().get(i).getType()); + LogUtil.info("execWithPreparedStatement param[" + (i + 1) + "]: " + datasourceRequest.getTableFieldWithValues().get(i).getValue()); + } + resultSet = ((PreparedStatement) statement).executeQuery(); + } else { + resultSet = statement.executeQuery(datasourceRequest.getQuery()); + } + fieldList = getField(resultSet, datasourceRequest); + dataList = getData(resultSet, datasourceRequest); + } catch (SQLException e) { + DEException.throwException("SQL ERROR: " + e.getMessage()); + } catch (Exception e) { + DEException.throwException("Datasource connection exception: " + e.getMessage()); + } finally { + if (resultSet != null) { + try { + resultSet.close(); + } catch (SQLException e) { + e.printStackTrace(); + } + } + } + + map.put("fields", fieldList); + map.put("data", dataList); + return map; + } + + @Override + public void exec(DatasourceRequest datasourceRequest) throws DEException { + DatasourceSchemaDTO value = datasourceRequest.getDsList().entrySet().iterator().next().getValue(); + datasourceRequest.setDatasource(value); + DatasourceConfiguration datasourceConfiguration = JsonUtil.parseObject(datasourceRequest.getDatasource().getConfiguration(), DatasourceConfiguration.class); + // schema + ResultSet resultSet = null; + try (Connection con = getConnectionFromPool(datasourceRequest.getDatasource().getId()); Statement statement = getPreparedStatement(con, datasourceConfiguration.getQueryTimeout(), datasourceRequest.getQuery(), datasourceRequest.getTableFieldWithValues())) { + if (DatasourceConfiguration.DatasourceType.valueOf(value.getType()) == DatasourceConfiguration.DatasourceType.oracle) { + statement.executeUpdate("ALTER SESSION SET CURRENT_SCHEMA = " + datasourceConfiguration.getSchema()); + } + if (CollectionUtils.isNotEmpty(datasourceRequest.getTableFieldWithValues())) { + LogUtil.info("execWithPreparedStatement sql: " + datasourceRequest.getQuery()); + for (int i = 0; i < datasourceRequest.getTableFieldWithValues().size(); i++) { + ((PreparedStatement) statement).setObject(i + 1, datasourceRequest.getTableFieldWithValues().get(i).getValue(), datasourceRequest.getTableFieldWithValues().get(i).getType()); + LogUtil.info("execWithPreparedStatement param[" + (i + 1) + "]: " + datasourceRequest.getTableFieldWithValues().get(i).getValue()); + } + ((PreparedStatement) statement).execute(); + } else { + statement.execute(datasourceRequest.getQuery()); + } + + } catch (SQLException e) { + DEException.throwException("SQL ERROR: " + e.getMessage()); + } catch (Exception e) { + DEException.throwException("Datasource connection exception: " + e.getMessage()); + } finally { + if (resultSet != null) { + try { + resultSet.close(); + } catch (SQLException e) { + e.printStackTrace(); + } + } + } + } + + @Override + public int executeUpdate(DatasourceRequest datasourceRequest) throws DEException { + DatasourceSchemaDTO value = datasourceRequest.getDsList().entrySet().iterator().next().getValue(); + datasourceRequest.setDatasource(value); + DatasourceConfiguration datasourceConfiguration = JsonUtil.parseObject(datasourceRequest.getDatasource().getConfiguration(), DatasourceConfiguration.class); + // schema + ResultSet resultSet = null; + try (Connection con = getConnectionFromPool(datasourceRequest.getDatasource().getId()); Statement statement = getPreparedStatement(con, datasourceConfiguration.getQueryTimeout(), datasourceRequest.getQuery(), datasourceRequest.getTableFieldWithValues())) { + if (DatasourceConfiguration.DatasourceType.valueOf(value.getType()) == DatasourceConfiguration.DatasourceType.oracle) { + statement.executeUpdate("ALTER SESSION SET CURRENT_SCHEMA = " + datasourceConfiguration.getSchema()); + } + + if (CollectionUtils.isNotEmpty(datasourceRequest.getTableFieldWithValues())) { + LogUtil.info("execWithPreparedStatement sql: " + datasourceRequest.getQuery()); + for (int i = 0; i < datasourceRequest.getTableFieldWithValues().size(); i++) { + ((PreparedStatement) statement).setObject(i + 1, datasourceRequest.getTableFieldWithValues().get(i).getValue(), datasourceRequest.getTableFieldWithValues().get(i).getType()); + LogUtil.info("execWithPreparedStatement param[" + (i + 1) + "]: " + datasourceRequest.getTableFieldWithValues().get(i).getValue()); + } + return ((PreparedStatement) statement).executeUpdate(); + } else { + return statement.executeUpdate(datasourceRequest.getQuery()); + } + + } catch (SQLException e) { + DEException.throwException("SQL ERROR: " + e.getMessage()); + } catch (Exception e) { + DEException.throwException("Datasource connection exception: " + e.getMessage()); + } finally { + if (resultSet != null) { + try { + resultSet.close(); + } catch (SQLException e) { + e.printStackTrace(); + } + } + } + + return 0; + } + + private List getField(ResultSet rs, DatasourceRequest datasourceRequest) throws Exception { + List fieldList = new ArrayList<>(); + ResultSetMetaData metaData = rs.getMetaData(); + int columnCount = metaData.getColumnCount(); + for (int j = 0; j < columnCount; j++) { + String f = metaData.getColumnName(j + 1); + if (StringUtils.equalsIgnoreCase(f, "DE_ROWNUM")) { + continue; + } + String l = StringUtils.isNotEmpty(metaData.getColumnLabel(j + 1)) ? metaData.getColumnLabel(j + 1) : f; + String t = metaData.getColumnTypeName(j + 1).toUpperCase(); + TableField field = new TableField(); + field.setOriginName(l); + field.setName(l); + field.setFieldType(t); + field.setType(t); + fieldList.add(field); + } + return fieldList; + } + + private List getData(ResultSet rs, DatasourceRequest datasourceRequest) throws Exception { + String charset = null; + String targetCharset = "UTF-8"; + if (datasourceRequest != null && datasourceRequest.getDatasource().getType().equalsIgnoreCase("oracle")) { + DatasourceConfiguration jdbcConfiguration = JsonUtil.parseObject(datasourceRequest.getDatasource().getConfiguration(), DatasourceConfiguration.class); + + if (StringUtils.isNotEmpty(jdbcConfiguration.getCharset()) && !jdbcConfiguration.getCharset().equalsIgnoreCase("Default")) { + charset = jdbcConfiguration.getCharset(); + } + if (StringUtils.isNotEmpty(jdbcConfiguration.getTargetCharset()) && !jdbcConfiguration.getTargetCharset().equalsIgnoreCase("Default")) { + targetCharset = jdbcConfiguration.getTargetCharset(); + } + } + List list = new LinkedList<>(); + ResultSetMetaData metaData = rs.getMetaData(); + int columnCount = metaData.getColumnCount(); + while (rs.next()) { + String[] row = new String[columnCount]; + for (int j = 0; j < columnCount; j++) { + int columnType = metaData.getColumnType(j + 1); + switch (columnType) { + case Types.DATE: + if (rs.getDate(j + 1) != null) { + row[j] = rs.getDate(j + 1).toString(); + } + break; + case Types.BOOLEAN: + row[j] = rs.getBoolean(j + 1) ? "1" : "0"; + break; + case Types.NUMERIC: + BigDecimal bigDecimal = rs.getBigDecimal(j + 1); + row[j] = bigDecimal == null ? null : bigDecimal.toString(); + break; + default: + if (metaData.getColumnTypeName(j + 1).toLowerCase().equalsIgnoreCase("blob")) { + row[j] = rs.getBlob(j + 1) == null ? "" : rs.getBlob(j + 1).toString(); + } else { + if (charset != null && StringUtils.isNotEmpty(rs.getString(j + 1))) { + String originStr = new String(rs.getString(j + 1).getBytes(charset), targetCharset); + row[j] = new String(originStr.getBytes("UTF-8"), "UTF-8"); + } else { + row[j] = rs.getString(j + 1); + } + } + + break; + } + } + list.add(row); + } + return list; + } + + @Override + public void hidePW(DatasourceDTO datasourceDTO) { + DatasourceConfiguration configuration = null; + DatasourceConfiguration.DatasourceType datasourceType = DatasourceConfiguration.DatasourceType.valueOf(datasourceDTO.getType()); + switch (datasourceType) { + case mysql: + case mongo: + case mariadb: + case TiDB: + case StarRocks: + case doris: + configuration = JsonUtil.parseObject(datasourceDTO.getConfiguration(), Mysql.class); + if (StringUtils.isNotEmpty(configuration.getUrlType()) && configuration.getUrlType().equalsIgnoreCase("jdbcUrl")) { + if (configuration.getJdbcUrl().contains("password=")) { + String[] params = configuration.getJdbcUrl().split("\\?")[1].split("&"); + String pd = ""; + for (int i = 0; i < params.length; i++) { + if (params[i].contains("password=")) { + pd = params[i]; + } + } + configuration.setJdbcUrl(configuration.getJdbcUrl().replace(pd, "password=******")); + datasourceDTO.setConfiguration(JsonUtil.toJSONString(configuration).toString()); + } + } + break; + case pg: + configuration = JsonUtil.parseObject(datasourceDTO.getConfiguration(), Pg.class); + if (StringUtils.isNotEmpty(configuration.getUrlType()) && configuration.getUrlType().equalsIgnoreCase("jdbcUrl")) { + if (configuration.getJdbcUrl().contains("password=")) { + String[] params = configuration.getJdbcUrl().split("\\?")[1].split("&"); + String pd = ""; + for (int i = 0; i < params.length; i++) { + if (params[i].contains("password=")) { + pd = params[i]; + } + } + configuration.setJdbcUrl(configuration.getJdbcUrl().replace(pd, "password=******")); + datasourceDTO.setConfiguration(JsonUtil.toJSONString(configuration).toString()); + } + } + break; + case redshift: + configuration = JsonUtil.parseObject(datasourceDTO.getConfiguration(), Redshift.class); + if (StringUtils.isNotEmpty(configuration.getUrlType()) && configuration.getUrlType().equalsIgnoreCase("jdbcUrl")) { + if (configuration.getJdbcUrl().contains("password=")) { + String[] params = configuration.getJdbcUrl().split("\\?")[1].split("&"); + String pd = ""; + for (int i = 0; i < params.length; i++) { + if (params[i].contains("password=")) { + pd = params[i]; + } + } + configuration.setJdbcUrl(configuration.getJdbcUrl().replace(pd, "password=******")); + datasourceDTO.setConfiguration(JsonUtil.toJSONString(configuration).toString()); + } + } + break; + case ck: + configuration = JsonUtil.parseObject(datasourceDTO.getConfiguration(), CK.class); + if (StringUtils.isNotEmpty(configuration.getUrlType()) && configuration.getUrlType().equalsIgnoreCase("jdbcUrl")) { + if (configuration.getJdbcUrl().contains("password=")) { + String[] params = configuration.getJdbcUrl().split("\\?")[1].split("&"); + String pd = ""; + for (int i = 0; i < params.length; i++) { + if (params[i].contains("password=")) { + pd = params[i]; + } + } + configuration.setJdbcUrl(configuration.getJdbcUrl().replace(pd, "password=******")); + datasourceDTO.setConfiguration(JsonUtil.toJSONString(configuration).toString()); + } + } + break; + case impala: + configuration = JsonUtil.parseObject(datasourceDTO.getConfiguration(), Impala.class); + if (StringUtils.isNotEmpty(configuration.getUrlType()) && configuration.getUrlType().equalsIgnoreCase("jdbcUrl")) { + if (configuration.getJdbcUrl().contains("password=")) { + String[] params = configuration.getJdbcUrl().split(";"); + String pd = ""; + for (int i = 0; i < params.length; i++) { + if (params[i].contains("password=")) { + pd = params[i]; + } + } + configuration.setJdbcUrl(configuration.getJdbcUrl().replace(pd, "password=******")); + datasourceDTO.setConfiguration(JsonUtil.toJSONString(configuration).toString()); + } + } + break; + default: + break; + } + } + + private TableField getTableFieldDesc(DatasourceRequest datasourceRequest, ResultSet resultSet, int commentIndex) throws SQLException { + TableField tableField = new TableField(); + tableField.setOriginName(resultSet.getString(1)); + tableField.setType(resultSet.getString(2).toUpperCase()); + tableField.setFieldType(tableField.getType()); + int deType = FieldUtils.transType2DeType(tableField.getType()); + tableField.setDeExtractType(deType); + tableField.setDeType(deType); + tableField.setName(resultSet.getString(commentIndex)); + try { + tableField.setPrimary(resultSet.getInt(4) > 0); + } catch (Exception e) { + } + return tableField; + } + + public Connection initConnection(Map dsMap) throws SQLException { + Connection connection = take(); + CalciteConnection calciteConnection = null; + calciteConnection = connection.unwrap(CalciteConnection.class); + DatasourceRequest datasourceRequest = new DatasourceRequest(); + datasourceRequest.setDsList(dsMap); + buildSchema(datasourceRequest, calciteConnection); + return connection; + } + + private void registerDriver() { + for (String driverClass : getDriver()) { + try { + Driver driver = (Driver) extendedJdbcClassLoader.loadClass(driverClass).newInstance(); + DriverManager.registerDriver(new DriverShim(driver)); + } catch (Exception e) { + e.printStackTrace(); + } + } + } + + private Connection getCalciteConnection() { + registerDriver(); + Properties info = new Properties(); + info.setProperty(CalciteConnectionProperty.LEX.camelName(), "JAVA"); + info.setProperty(CalciteConnectionProperty.FUN.camelName(), "all"); + info.setProperty(CalciteConnectionProperty.CASE_SENSITIVE.camelName(), "false"); + info.setProperty(CalciteConnectionProperty.PARSER_FACTORY.camelName(), "org.apache.calcite.sql.parser.impl.SqlParserImpl#FACTORY"); + info.setProperty(CalciteConnectionProperty.DEFAULT_NULL_COLLATION.camelName(), NullCollation.LAST.name()); + info.setProperty("remarks", "true"); + Connection connection = null; + try { + Class.forName("org.apache.calcite.jdbc.Driver"); + connection = DriverManager.getConnection("jdbc:calcite:", info); + } catch (Exception e) { + DEException.throwException(e.getMessage()); + } + return connection; + } + + // 构建root schema + private SchemaPlus buildSchema(DatasourceRequest datasourceRequest, CalciteConnection calciteConnection) { + SchemaPlus rootSchema = calciteConnection.getRootSchema(); + Map dsList = datasourceRequest.getDsList(); + for (Map.Entry next : dsList.entrySet()) { + DatasourceSchemaDTO ds = next.getValue(); + commonThreadPool.addTask(() -> { + try { + BasicDataSource dataSource = new BasicDataSource(); + Schema schema = null; + DatasourceConfiguration configuration = null; + DatasourceConfiguration.DatasourceType datasourceType = DatasourceConfiguration.DatasourceType.valueOf(ds.getType()); + try { + if (rootSchema.getSubSchema(ds.getSchemaAlias()) != null) { + JdbcSchema jdbcSchema = rootSchema.getSubSchema(ds.getSchemaAlias()).unwrap(JdbcSchema.class); + BasicDataSource basicDataSource = (BasicDataSource) jdbcSchema.getDataSource(); + basicDataSource.close(); + rootSchema.removeSubSchema(ds.getSchemaAlias()); + } + switch (datasourceType) { + case mysql: + case mongo: + case mariadb: + case TiDB: + case StarRocks: + case doris: + configuration = JsonUtil.parseObject(ds.getConfiguration(), Mysql.class); + if (StringUtils.isNotBlank(configuration.getUsername())) { + dataSource.setUsername(configuration.getUsername()); + } + if (StringUtils.isNotBlank(configuration.getPassword())) { + dataSource.setPassword(configuration.getPassword()); + } + dataSource.setDefaultQueryTimeout(Integer.valueOf(configuration.getQueryTimeout())); + dataSource.setInitialSize(configuration.getInitialPoolSize()); + dataSource.setMaxTotal(configuration.getMaxPoolSize()); + dataSource.setMinIdle(configuration.getMinPoolSize()); + startSshSession(configuration, null, ds.getId()); + dataSource.setUrl(configuration.getJdbc()); + schema = JdbcSchema.create(rootSchema, ds.getSchemaAlias(), dataSource, null, configuration.getDataBase()); + rootSchema.add(ds.getSchemaAlias(), schema); + break; + case impala: + configuration = JsonUtil.parseObject(ds.getConfiguration(), Impala.class); + if (StringUtils.isNotBlank(configuration.getUsername())) { + dataSource.setUsername(configuration.getUsername()); + } + if (StringUtils.isNotBlank(configuration.getPassword())) { + dataSource.setPassword(configuration.getPassword()); + } + dataSource.setInitialSize(configuration.getInitialPoolSize()); + dataSource.setMaxTotal(configuration.getMaxPoolSize()); + dataSource.setMinIdle(configuration.getMinPoolSize()); + dataSource.setDefaultQueryTimeout(Integer.valueOf(configuration.getQueryTimeout())); + startSshSession(configuration, null, ds.getId()); + dataSource.setUrl(configuration.getJdbc()); + schema = JdbcSchema.create(rootSchema, ds.getSchemaAlias(), dataSource, null, configuration.getDataBase()); + rootSchema.add(ds.getSchemaAlias(), schema); + break; + case sqlServer: + configuration = JsonUtil.parseObject(ds.getConfiguration(), Sqlserver.class); + if (StringUtils.isNotBlank(configuration.getUsername())) { + dataSource.setUsername(configuration.getUsername()); + } + if (StringUtils.isNotBlank(configuration.getPassword())) { + dataSource.setPassword(configuration.getPassword()); + } + dataSource.setInitialSize(configuration.getInitialPoolSize()); + dataSource.setMaxTotal(configuration.getMaxPoolSize()); + dataSource.setMinIdle(configuration.getMinPoolSize()); + dataSource.setDefaultQueryTimeout(Integer.valueOf(configuration.getQueryTimeout())); + startSshSession(configuration, null, ds.getId()); + dataSource.setUrl(configuration.getJdbc()); + schema = JdbcSchema.create(rootSchema, ds.getSchemaAlias(), dataSource, null, configuration.getSchema()); + rootSchema.add(ds.getSchemaAlias(), schema); + break; + case oracle: + configuration = JsonUtil.parseObject(ds.getConfiguration(), Oracle.class); + if (StringUtils.isNotBlank(configuration.getUsername())) { + dataSource.setUsername(configuration.getUsername()); + } + if (StringUtils.isNotBlank(configuration.getPassword())) { + dataSource.setPassword(configuration.getPassword()); + } + dataSource.setInitialSize(configuration.getInitialPoolSize()); + dataSource.setMaxTotal(configuration.getMaxPoolSize()); + dataSource.setMinIdle(configuration.getMinPoolSize()); + dataSource.setDefaultQueryTimeout(Integer.valueOf(configuration.getQueryTimeout())); + startSshSession(configuration, null, ds.getId()); + dataSource.setUrl(configuration.getJdbc()); + schema = JdbcSchema.create(rootSchema, ds.getSchemaAlias(), dataSource, null, configuration.getSchema()); + rootSchema.add(ds.getSchemaAlias(), schema); + break; + case db2: + configuration = JsonUtil.parseObject(ds.getConfiguration(), Db2.class); + if (StringUtils.isNotBlank(configuration.getUsername())) { + dataSource.setUsername(configuration.getUsername()); + } + if (StringUtils.isNotBlank(configuration.getPassword())) { + dataSource.setPassword(configuration.getPassword()); + } + dataSource.setInitialSize(configuration.getInitialPoolSize()); + dataSource.setMaxTotal(configuration.getMaxPoolSize()); + dataSource.setMinIdle(configuration.getMinPoolSize()); + dataSource.setDefaultQueryTimeout(Integer.valueOf(configuration.getQueryTimeout())); + startSshSession(configuration, null, ds.getId()); + dataSource.setUrl(configuration.getJdbc()); + schema = JdbcSchema.create(rootSchema, ds.getSchemaAlias(), dataSource, null, configuration.getSchema()); + rootSchema.add(ds.getSchemaAlias(), schema); + break; + case ck: + configuration = JsonUtil.parseObject(ds.getConfiguration(), CK.class); + if (StringUtils.isNotBlank(configuration.getUsername())) { + dataSource.setUsername(configuration.getUsername()); + } + if (StringUtils.isNotBlank(configuration.getPassword())) { + dataSource.setPassword(configuration.getPassword()); + } + dataSource.setInitialSize(configuration.getInitialPoolSize()); + dataSource.setMaxTotal(configuration.getMaxPoolSize()); + dataSource.setMinIdle(configuration.getMinPoolSize()); + dataSource.setDefaultQueryTimeout(Integer.valueOf(configuration.getQueryTimeout())); + startSshSession(configuration, null, ds.getId()); + dataSource.setUrl(configuration.getJdbc()); + schema = JdbcSchema.create(rootSchema, ds.getSchemaAlias(), dataSource, null, configuration.getDataBase()); + rootSchema.add(ds.getSchemaAlias(), schema); + break; + case pg: + configuration = JsonUtil.parseObject(ds.getConfiguration(), Pg.class); + if (StringUtils.isNotBlank(configuration.getUsername())) { + dataSource.setUsername(configuration.getUsername()); + } + if (StringUtils.isNotBlank(configuration.getPassword())) { + dataSource.setPassword(configuration.getPassword()); + } + dataSource.setInitialSize(configuration.getInitialPoolSize()); + dataSource.setMaxTotal(configuration.getMaxPoolSize()); + dataSource.setMinIdle(configuration.getMinPoolSize()); + dataSource.setDefaultQueryTimeout(Integer.valueOf(configuration.getQueryTimeout())); + startSshSession(configuration, null, ds.getId()); + dataSource.setUrl(configuration.getJdbc()); + schema = JdbcSchema.create(rootSchema, ds.getSchemaAlias(), dataSource, null, configuration.getSchema()); + rootSchema.add(ds.getSchemaAlias(), schema); + break; + case redshift: + configuration = JsonUtil.parseObject(ds.getConfiguration(), Redshift.class); + if (StringUtils.isNotBlank(configuration.getUsername())) { + dataSource.setUsername(configuration.getUsername()); + } + if (StringUtils.isNotBlank(configuration.getPassword())) { + dataSource.setPassword(configuration.getPassword()); + } + dataSource.setInitialSize(configuration.getInitialPoolSize()); + dataSource.setMaxTotal(configuration.getMaxPoolSize()); + dataSource.setMinIdle(configuration.getMinPoolSize()); + dataSource.setDefaultQueryTimeout(Integer.valueOf(configuration.getQueryTimeout())); + startSshSession(configuration, null, ds.getId()); + dataSource.setUrl(configuration.getJdbc()); + schema = JdbcSchema.create(rootSchema, ds.getSchemaAlias(), dataSource, null, configuration.getSchema()); + rootSchema.add(ds.getSchemaAlias(), schema); + break; + case h2: + configuration = JsonUtil.parseObject(ds.getConfiguration(), H2.class); + if (StringUtils.isNotBlank(configuration.getUsername())) { + dataSource.setUsername(configuration.getUsername()); + } + if (StringUtils.isNotBlank(configuration.getPassword())) { + dataSource.setPassword(configuration.getPassword()); + } + dataSource.setInitialSize(configuration.getInitialPoolSize()); + dataSource.setMaxTotal(configuration.getMaxPoolSize()); + dataSource.setMinIdle(configuration.getMinPoolSize()); + dataSource.setDefaultQueryTimeout(Integer.valueOf(configuration.getQueryTimeout())); + startSshSession(configuration, null, ds.getId()); + dataSource.setUrl(configuration.getJdbc()); + schema = JdbcSchema.create(rootSchema, ds.getSchemaAlias(), dataSource, null, configuration.getDataBase()); + rootSchema.add(ds.getSchemaAlias(), schema); + break; + default: + configuration = JsonUtil.parseObject(ds.getConfiguration(), Mysql.class); + if (StringUtils.isNotBlank(configuration.getUsername())) { + dataSource.setUsername(configuration.getUsername()); + } + if (StringUtils.isNotBlank(configuration.getPassword())) { + dataSource.setPassword(configuration.getPassword()); + } + dataSource.setInitialSize(configuration.getInitialPoolSize()); + dataSource.setMaxTotal(configuration.getMaxPoolSize()); + dataSource.setMinIdle(configuration.getMinPoolSize()); + dataSource.setDefaultQueryTimeout(Integer.valueOf(configuration.getQueryTimeout())); + startSshSession(configuration, null, ds.getId()); + dataSource.setUrl(configuration.getJdbc()); + schema = JdbcSchema.create(rootSchema, ds.getSchemaAlias(), dataSource, null, configuration.getDataBase()); + rootSchema.add(ds.getSchemaAlias(), schema); + } + } catch (Exception e) { + LogUtil.error("Fail to create connection: " + ds.getName(), e); + } + } catch (Exception e) { + } + }); + } + return rootSchema; + } + + private List getDataResult(ResultSet rs) { + List list = new LinkedList<>(); + try { + ResultSetMetaData metaData = rs.getMetaData(); + int columnCount = metaData.getColumnCount(); + while (rs.next()) { + String[] row = new String[columnCount]; + for (int j = 0; j < columnCount; j++) { + int columnType = metaData.getColumnType(j + 1); + switch (columnType) { + case Types.DATE: + if (rs.getDate(j + 1) != null) { + row[j] = rs.getDate(j + 1).toString(); + } + break; + case Types.BOOLEAN: + row[j] = rs.getBoolean(j + 1) ? "true" : "false"; + break; + default: + if (metaData.getColumnTypeName(j + 1).toLowerCase().equalsIgnoreCase("blob")) { + row[j] = rs.getBlob(j + 1) == null ? "" : rs.getBlob(j + 1).toString(); + } else { + row[j] = rs.getString(j + 1); + } + break; + } + } + list.add(row); + } + } catch (Exception e) { + DEException.throwException(e.getMessage()); + } + return list; + } + + private String getTableFiledSql(DatasourceRequest datasourceRequest) { + String sql = ""; + DatasourceConfiguration configuration = null; + String database = ""; + DatasourceConfiguration.DatasourceType datasourceType = DatasourceConfiguration.DatasourceType.valueOf(datasourceRequest.getDatasource().getType()); + switch (datasourceType) { + case StarRocks: + case doris: + configuration = JsonUtil.parseObject(datasourceRequest.getDatasource().getConfiguration(), Mysql.class); + if (StringUtils.isEmpty(configuration.getUrlType()) || configuration.getUrlType().equalsIgnoreCase("hostName")) { + database = configuration.getDataBase(); + } else { + Pattern WITH_SQL_FRAGMENT = Pattern.compile("jdbc:mysql://(.*):(\\d+)/(.*)"); + Matcher matcher = WITH_SQL_FRAGMENT.matcher(configuration.getJdbcUrl()); + matcher.find(); + String[] databasePrams = matcher.group(3).split("\\?"); + database = databasePrams[0]; + } + if (database.contains(".")) { + sql = "select * from " + datasourceRequest.getTable() + " limit 0 offset 0 "; + } else { + sql = String.format("SELECT COLUMN_NAME,DATA_TYPE,COLUMN_COMMENT FROM INFORMATION_SCHEMA.COLUMNS WHERE TABLE_SCHEMA = '%s' AND TABLE_NAME = '%s'", database, datasourceRequest.getTable()); + } + break; + case mysql: + case mongo: + case mariadb: + case TiDB: + configuration = JsonUtil.parseObject(datasourceRequest.getDatasource().getConfiguration(), Mysql.class); + if (StringUtils.isEmpty(configuration.getUrlType()) || configuration.getUrlType().equalsIgnoreCase("hostName")) { + database = configuration.getDataBase(); + } else { + Pattern WITH_SQL_FRAGMENT = Pattern.compile("jdbc:mysql://(.*):(\\d+)/(.*)"); + Matcher matcher = WITH_SQL_FRAGMENT.matcher(configuration.getJdbcUrl()); + matcher.find(); + String[] databasePrams = matcher.group(3).split("\\?"); + database = databasePrams[0]; + } + sql = String.format("SELECT COLUMN_NAME,DATA_TYPE,COLUMN_COMMENT,IF(COLUMN_KEY='PRI',1,0) FROM INFORMATION_SCHEMA.COLUMNS WHERE TABLE_SCHEMA = '%s' AND TABLE_NAME = '%s'", database, datasourceRequest.getTable()); + break; + case oracle: + configuration = JsonUtil.parseObject(datasourceRequest.getDatasource().getConfiguration(), Oracle.class); + if (StringUtils.isEmpty(configuration.getSchema())) { + DEException.throwException(Translator.get("i18n_schema_is_empty")); + } + sql = String.format("SELECT a.COLUMN_NAME , a.DATA_TYPE , b.COMMENTS ,0 FROM all_tab_columns a LEFT JOIN all_col_comments b ON a.owner = b.owner AND a.table_name = b.table_name AND a.column_name = b.column_name WHERE a.owner = '%s' AND a.table_name = '%s' ORDER BY a.table_name, a.column_id", configuration.getSchema(), datasourceRequest.getTable()); + break; + case db2: + configuration = JsonUtil.parseObject(datasourceRequest.getDatasource().getConfiguration(), Db2.class); + if (StringUtils.isEmpty(configuration.getSchema())) { + DEException.throwException(Translator.get("i18n_schema_is_empty")); + } + sql = String.format("SELECT COLNAME , TYPENAME , REMARKS FROM SYSCAT.COLUMNS WHERE TABSCHEMA = '%s' AND TABNAME = '%s' ", configuration.getSchema(), datasourceRequest.getTable()); + break; + case sqlServer: + configuration = JsonUtil.parseObject(datasourceRequest.getDatasource().getConfiguration(), Sqlserver.class); + if (StringUtils.isEmpty(configuration.getSchema())) { + DEException.throwException(Translator.get("i18n_schema_is_empty")); + } + + sql = String.format("SELECT \n" + " c.name ,t.name ,ep.value, 0 \n" + "FROM \n" + " sys.columns AS c\n" + "LEFT JOIN sys.extended_properties AS ep ON c.object_id = ep.major_id AND c.column_id = ep.minor_id\n" + "LEFT JOIN sys.types AS t ON c.user_type_id = t.user_type_id\n" + "LEFT JOIN sys.objects AS o ON c.object_id = o.object_id\n" + "WHERE o.name = '%s'", datasourceRequest.getTable()); + break; + case pg: + configuration = JsonUtil.parseObject(datasourceRequest.getDatasource().getConfiguration(), Pg.class); + if (StringUtils.isEmpty(configuration.getSchema())) { + DEException.throwException(Translator.get("i18n_schema_is_empty")); + } + sql = String.format("SELECT\n" + + " a.attname AS ColumnName,\n" + + " t.typname,\n" + + " b.description AS ColumnDescription,\n" + + " CASE\n" + + " WHEN d.indisprimary THEN 1\n" + + " ELSE 0\n" + + " END\n" + + "FROM\n" + + " pg_class c\n" + + " JOIN pg_attribute a ON a.attrelid = c.oid\n" + + " LEFT JOIN pg_description b ON a.attrelid = b.objoid AND a.attnum = b.objsubid\n" + + " JOIN pg_type t ON a.atttypid = t.oid\n" + + " LEFT JOIN pg_index d ON d.indrelid = a.attrelid AND d.indisprimary AND a.attnum = ANY(d.indkey)\n" + + "where\n" + + " c.relnamespace = (SELECT oid FROM pg_namespace WHERE nspname = '%s')\n" + + " AND c.relname = '%s'\n" + + " AND a.attnum > 0\n" + + " AND NOT a.attisdropped\n" + + "ORDER BY\n" + + " a.attnum;", configuration.getSchema(), datasourceRequest.getTable()); + break; + case redshift: + configuration = JsonUtil.parseObject(datasourceRequest.getDatasource().getConfiguration(), CK.class); + sql = String.format("SELECT\n" + " a.attname AS ColumnName,\n" + " t.typname,\n" + " b.description AS ColumnDescription,\n" + " 0\n" + "FROM\n" + " pg_class c\n" + " JOIN pg_attribute a ON a.attrelid = c.oid\n" + " LEFT JOIN pg_description b ON a.attrelid = b.objoid AND a.attnum = b.objsubid\n" + " JOIN pg_type t ON a.atttypid = t.oid\n" + "WHERE\n" + " c.relname = '%s'\n" + " AND a.attnum > 0\n" + " AND NOT a.attisdropped\n" + "ORDER BY\n" + " a.attnum\n" + " ", datasourceRequest.getTable()); + break; + case ck: + configuration = JsonUtil.parseObject(datasourceRequest.getDatasource().getConfiguration(), CK.class); + + if (StringUtils.isEmpty(configuration.getUrlType()) || configuration.getUrlType().equalsIgnoreCase("hostName")) { + database = configuration.getDataBase(); + } else { + Pattern WITH_SQL_FRAGMENT = Pattern.compile("jdbc:clickhouse://(.*):(\\d+)/(.*)"); + Matcher matcher = WITH_SQL_FRAGMENT.matcher(configuration.getJdbcUrl()); + matcher.find(); + String[] databasePrams = matcher.group(3).split("\\?"); + database = databasePrams[0]; + } + sql = String.format(" SELECT\n" + " name,\n" + " type,\n" + " comment,\n" + " 0\n" + "FROM\n" + " system.columns\n" + "WHERE\n" + " database = '%s' \n" + " AND table = '%s' ", database, datasourceRequest.getTable()); + break; + case impala: + sql = String.format("DESCRIBE `%s`", datasourceRequest.getTable()); + break; + case h2: + sql = String.format("SELECT COLUMN_NAME, DATA_TYPE, REMARKS FROM INFORMATION_SCHEMA.COLUMNS WHERE TABLE_NAME = '%s'", datasourceRequest.getTable()); + break; + default: + break; + } + + return sql; + } + + private List getTablesSql(DatasourceRequest datasourceRequest) throws DEException { + List tableSqls = new ArrayList<>(); + DatasourceConfiguration.DatasourceType datasourceType = DatasourceConfiguration.DatasourceType.valueOf(datasourceRequest.getDatasource().getType()); + DatasourceConfiguration configuration = null; + String database = ""; + switch (datasourceType) { + case StarRocks: + case doris: + configuration = JsonUtil.parseObject(datasourceRequest.getDatasource().getConfiguration(), Mysql.class); + if (StringUtils.isEmpty(configuration.getUrlType()) || configuration.getUrlType().equalsIgnoreCase("hostName")) { + database = configuration.getDataBase(); + } else { + Pattern WITH_SQL_FRAGMENT = Pattern.compile("jdbc:mysql://(.*):(\\d+)/(.*)"); + Matcher matcher = WITH_SQL_FRAGMENT.matcher(configuration.getJdbcUrl()); + matcher.find(); + String[] databasePrams = matcher.group(3).split("\\?"); + database = databasePrams[0]; + } + if (database.contains(".")) { + tableSqls.add("show tables"); + } else { + tableSqls.add(String.format("SELECT TABLE_NAME,TABLE_COMMENT FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_SCHEMA = '%s' ;", database)); + } + break; + case mongo: + tableSqls.add("show tables"); + break; + case mysql: + case mariadb: + case TiDB: + configuration = JsonUtil.parseObject(datasourceRequest.getDatasource().getConfiguration(), Mysql.class); + if (StringUtils.isEmpty(configuration.getUrlType()) || configuration.getUrlType().equalsIgnoreCase("hostName")) { + database = configuration.getDataBase(); + } else { + Pattern WITH_SQL_FRAGMENT = Pattern.compile("jdbc:mysql://(.*):(\\d+)/(.*)"); + Matcher matcher = WITH_SQL_FRAGMENT.matcher(configuration.getJdbcUrl()); + matcher.find(); + String[] databasePrams = matcher.group(3).split("\\?"); + database = databasePrams[0]; + } + tableSqls.add(String.format("SELECT TABLE_NAME,TABLE_COMMENT FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_SCHEMA = '%s' ;", database)); + break; + case oracle: + configuration = JsonUtil.parseObject(datasourceRequest.getDatasource().getConfiguration(), Oracle.class); + if (StringUtils.isEmpty(configuration.getSchema())) { + DEException.throwException(Translator.get("i18n_schema_is_empty")); + } + tableSqls.add("select table_name, comments, owner from all_tab_comments where owner='" + configuration.getSchema() + "' AND table_type = 'TABLE'"); + tableSqls.add("select table_name, comments, owner from all_tab_comments where owner='" + configuration.getSchema() + "' AND table_type = 'VIEW'"); + break; + case db2: + configuration = JsonUtil.parseObject(datasourceRequest.getDatasource().getConfiguration(), Db2.class); + if (StringUtils.isEmpty(configuration.getSchema())) { + DEException.throwException(Translator.get("i18n_schema_is_empty")); + } + tableSqls.add("select TABNAME, REMARKS from syscat.tables WHERE TABSCHEMA ='DE_SCHEMA' AND \"TYPE\" = 'T'".replace("DE_SCHEMA", configuration.getSchema())); + break; + case sqlServer: + configuration = JsonUtil.parseObject(datasourceRequest.getDatasource().getConfiguration(), Sqlserver.class); + if (StringUtils.isEmpty(configuration.getSchema())) { + DEException.throwException(Translator.get("i18n_schema_is_empty")); + } + tableSqls.add("SELECT \n" + " t.name AS TableName, \n" + " ep.value AS TableDescription \n" + "FROM \n" + " sys.tables t \n" + "LEFT OUTER JOIN sys.schemas sc ON sc.schema_id =t.schema_id \n" + "LEFT OUTER JOIN \n" + " sys.extended_properties ep ON t.object_id = ep.major_id \n" + " AND ep.minor_id = 0 \n" + " AND ep.class = 1 \n" + " AND ep.name = 'MS_Description'\n" + "where sc.name ='DS_SCHEMA'".replace("DS_SCHEMA", configuration.getSchema())); + tableSqls.add("SELECT \n" + " t.name AS TableName, \n" + " ep.value AS TableDescription \n" + "FROM \n" + " sys.views t \n" + "LEFT OUTER JOIN sys.schemas sc ON sc.schema_id =t.schema_id \n" + "LEFT OUTER JOIN \n" + " sys.extended_properties ep ON t.object_id = ep.major_id \n" + " AND ep.minor_id = 0 \n" + " AND ep.class = 1 \n" + " AND ep.name = 'MS_Description'\n" + "where sc.name ='DS_SCHEMA'".replace("DS_SCHEMA", configuration.getSchema())); + break; + case pg: + configuration = JsonUtil.parseObject(datasourceRequest.getDatasource().getConfiguration(), Pg.class); + if (StringUtils.isEmpty(configuration.getSchema())) { + DEException.throwException(Translator.get("i18n_schema_is_empty")); + } + tableSqls.add("SELECT \n" + " relname AS TableName, \n" + " obj_description(relfilenode::regclass, 'pg_class') AS TableDescription \n" + "FROM \n" + " pg_class \n" + "WHERE \n" + " relkind in ('r','p', 'f') \n" + " AND relnamespace = (SELECT oid FROM pg_namespace WHERE nspname = 'SCHEMA') ".replace("SCHEMA", configuration.getSchema())); + break; + case redshift: + configuration = JsonUtil.parseObject(datasourceRequest.getDatasource().getConfiguration(), CK.class); + tableSqls.add("SELECT \n" + " relname AS TableName, \n" + " obj_description(relfilenode::regclass, 'pg_class') AS TableDescription \n" + "FROM \n" + " pg_class \n" + "WHERE \n" + " relkind in ('r','p', 'f') \n" + " AND relnamespace = (SELECT oid FROM pg_namespace WHERE nspname = 'SCHEMA') ".replace("SCHEMA", configuration.getSchema())); + break; + case ck: + configuration = JsonUtil.parseObject(datasourceRequest.getDatasource().getConfiguration(), CK.class); + if (StringUtils.isEmpty(configuration.getUrlType()) || configuration.getUrlType().equalsIgnoreCase("hostName")) { + database = configuration.getDataBase(); + } else { + Pattern WITH_SQL_FRAGMENT = Pattern.compile("jdbc:clickhouse://(.*):(\\d+)/(.*)"); + Matcher matcher = WITH_SQL_FRAGMENT.matcher(configuration.getJdbcUrl()); + matcher.find(); + String[] databasePrams = matcher.group(3).split("\\?"); + database = databasePrams[0]; + } + if (datasourceRequest.getDsVersion() < 22) { + tableSqls.add("SELECT name, name FROM system.tables where database='DATABASE';".replace("DATABASE", database)); + } else { + tableSqls.add("SELECT name, comment FROM system.tables where database='DATABASE';".replace("DATABASE", database)); + } + + + break; + default: + tableSqls.add("show tables"); + } + return tableSqls; + + } + + private String getSchemaSql(DatasourceDTO datasource) throws DEException { + DatasourceConfiguration.DatasourceType datasourceType = DatasourceConfiguration.DatasourceType.valueOf(datasource.getType()); + switch (datasourceType) { + case oracle: + return "select * from all_users"; + case sqlServer: + return "select name from sys.schemas;"; + case db2: + DatasourceConfiguration configuration = JsonUtil.parseObject(datasource.getConfiguration(), Db2.class); + return "select SCHEMANAME from syscat.SCHEMATA WHERE \"DEFINER\" ='USER'".replace("USER", configuration.getUsername().toUpperCase()); + case pg: + return "SELECT nspname FROM pg_namespace;"; + case redshift: + return "SELECT nspname FROM pg_namespace;"; + default: + return "show tables;"; + } + } + + public Statement getStatement(Connection connection, int queryTimeout) { + if (connection == null) { + DEException.throwException("Failed to get connection!"); + } + Statement stat = null; + try { + stat = connection.createStatement(); + stat.setQueryTimeout(queryTimeout); + } catch (Exception e) { + DEException.throwException(e.getMessage()); + } + return stat; + } + + public Statement getPreparedStatement(Connection connection, int queryTimeout, String sql, List values) throws Exception { + if (connection == null) { + throw new Exception("Failed to get connection!"); + } + if (CollectionUtils.isNotEmpty(values)) { + PreparedStatement stat = null; + try { + stat = connection.prepareStatement(sql); + stat.setQueryTimeout(queryTimeout); + } catch (Exception e) { + DEException.throwException(e.getMessage()); + } + return stat; + } else { + return getStatement(connection, queryTimeout); + } + } + + protected boolean isDefaultClassLoader(String customDriver) { + return StringUtils.isEmpty(customDriver) || customDriver.equalsIgnoreCase("default"); + } + + protected ExtendedJdbcClassLoader getCustomJdbcClassLoader(CoreDriver coreDriver) { + if (coreDriver == null) { + DEException.throwException("Can not found custom Driver"); + } + ExtendedJdbcClassLoader customJdbcClassLoader = customJdbcClassLoaders.get(coreDriver.getId()); + if (customJdbcClassLoader == null) { + return addCustomJdbcClassLoader(coreDriver); + } else { + if (StringUtils.isNotEmpty(customJdbcClassLoader.getDriver()) && customJdbcClassLoader.getDriver().equalsIgnoreCase(coreDriver.getDriverClass())) { + return customJdbcClassLoader; + } else { + customJdbcClassLoaders.remove(coreDriver.getId()); + return addCustomJdbcClassLoader(coreDriver); + } + } + } + + private synchronized ExtendedJdbcClassLoader addCustomJdbcClassLoader(CoreDriver coreDriver) { + ClassLoader classLoader = Thread.currentThread().getContextClassLoader(); + while (classLoader.getParent() != null) { + classLoader = classLoader.getParent(); + if (classLoader.toString().contains("ExtClassLoader")) { + break; + } + } + try { + ExtendedJdbcClassLoader customJdbcClassLoader = new ExtendedJdbcClassLoader(new URL[]{new File(CUSTOM_PATH + coreDriver.getId()).toURI().toURL()}, classLoader); + customJdbcClassLoader.setDriver(coreDriver.getDriverClass()); + File file = new File(CUSTOM_PATH + coreDriver.getId()); + File[] array = file.listFiles(); + Optional.ofNullable(array).ifPresent(files -> { + for (File tmp : array) { + if (tmp.getName().endsWith(".jar")) { + try { + customJdbcClassLoader.addFile(tmp); + } catch (IOException e) { + e.printStackTrace(); + } + } + } + }); + customJdbcClassLoaders.put(coreDriver.getId(), customJdbcClassLoader); + return customJdbcClassLoader; + } catch (Exception e) { + DEException.throwException(e.getMessage()); + } + return null; + } + + private Connection connection = null; + + public void initConnectionPool() { + LogUtil.info("Begin to init datasource pool..."); + QueryWrapper datasourceQueryWrapper = new QueryWrapper(); + List coreDatasources = coreDatasourceMapper.selectList(datasourceQueryWrapper).stream().filter(coreDatasource -> !Arrays.asList("folder", "API", "Excel").contains(coreDatasource.getType())).collect(Collectors.toList()); + CoreDatasource engine = engineManage.deEngine(); + if (engine != null) { + coreDatasources.add(engine); + } + + for (CoreDatasource coreDatasource : coreDatasources) { + Map dsMap = new HashMap<>(); + DatasourceSchemaDTO datasourceSchemaDTO = new DatasourceSchemaDTO(); + BeanUtils.copyBean(datasourceSchemaDTO, coreDatasource); + datasourceSchemaDTO.setSchemaAlias(String.format(SQLConstants.SCHEMA, datasourceSchemaDTO.getId())); + dsMap.put(datasourceSchemaDTO.getId(), datasourceSchemaDTO); + commonThreadPool.addTask(() -> { + try { + connection = initConnection(dsMap); + } catch (Exception ignore) { + } + }); + } + LogUtil.info("dsMap size..." + coreDatasources.size()); + + } + + public void update(DatasourceDTO datasourceDTO) throws DEException { + DatasourceSchemaDTO datasourceSchemaDTO = new DatasourceSchemaDTO(); + BeanUtils.copyBean(datasourceSchemaDTO, datasourceDTO); + datasourceSchemaDTO.setSchemaAlias(String.format(SQLConstants.SCHEMA, datasourceSchemaDTO.getId())); + DatasourceRequest datasourceRequest = new DatasourceRequest(); + datasourceRequest.setDsList(Map.of(datasourceSchemaDTO.getId(), datasourceSchemaDTO)); + try { + CalciteConnection calciteConnection = connection.unwrap(CalciteConnection.class); + SchemaPlus rootSchema = buildSchema(datasourceRequest, calciteConnection); + } catch (Exception e) { + DEException.throwException(e.getMessage()); + } + } + + public void updateDsPoolAfterCheckStatus(DatasourceDTO datasourceDTO) throws DEException { + DatasourceSchemaDTO datasourceSchemaDTO = new DatasourceSchemaDTO(); + BeanUtils.copyBean(datasourceSchemaDTO, datasourceDTO); + datasourceSchemaDTO.setSchemaAlias(String.format(SQLConstants.SCHEMA, datasourceSchemaDTO.getId())); + DatasourceRequest datasourceRequest = new DatasourceRequest(); + datasourceRequest.setDsList(Map.of(datasourceSchemaDTO.getId(), datasourceSchemaDTO)); + try { + CalciteConnection calciteConnection = connection.unwrap(CalciteConnection.class); + SchemaPlus rootSchema = calciteConnection.getRootSchema(); + if (rootSchema.getSubSchema(datasourceSchemaDTO.getSchemaAlias()) == null) { + buildSchema(datasourceRequest, calciteConnection); + } + DatasourceConfiguration configuration = JsonUtil.parseObject(datasourceDTO.getConfiguration(), DatasourceConfiguration.class); + if(configuration.isUseSSH()){ + Session session =Provider.getSessions().get(datasourceDTO.getId()); + session.disconnect(); + Provider.getSessions().remove(datasourceDTO.getId()); + startSshSession(configuration, null, datasourceDTO.getId()); + } + } catch (Exception e) { + DEException.throwException(e.getMessage()); + } + } + + public void delete(CoreDatasource datasource) throws DEException { + DatasourceSchemaDTO datasourceSchemaDTO = new DatasourceSchemaDTO(); + BeanUtils.copyBean(datasourceSchemaDTO, datasource); + datasourceSchemaDTO.setSchemaAlias(String.format(SQLConstants.SCHEMA, datasourceSchemaDTO.getId())); + try { + CalciteConnection calciteConnection = connection.unwrap(CalciteConnection.class); + SchemaPlus rootSchema = calciteConnection.getRootSchema(); + if (rootSchema.getSubSchema(datasourceSchemaDTO.getSchemaAlias()) != null) { + JdbcSchema jdbcSchema = rootSchema.getSubSchema(datasourceSchemaDTO.getSchemaAlias()).unwrap(JdbcSchema.class); + BasicDataSource basicDataSource = (BasicDataSource) jdbcSchema.getDataSource(); + basicDataSource.close(); + rootSchema.removeSubSchema(datasourceSchemaDTO.getSchemaAlias()); + } + } catch (Exception e) { + DEException.throwException(e.getMessage()); + } + Provider.getLPorts().remove(datasource.getId()); + if (Provider.getSessions().get(datasource.getId()) != null) { + Provider.getSessions().get(datasource.getId()).disconnect(); + } + Provider.getSessions().remove(datasource.getId()); + } + + public Connection take() { + if (connection == null) { // 第一次检查,无需锁 + synchronized (Connection.class) { // 同步块 + if (connection == null) { // 第二次检查,需要锁 + connection = getCalciteConnection(); + } + } + } + return connection; + } + + private Connection getConnectionFromPool(Long dsId) { + try { + Connection connection = take(); + CalciteConnection calciteConnection = connection.unwrap(CalciteConnection.class); + SchemaPlus rootSchema = calciteConnection.getRootSchema(); + if (rootSchema.getSubSchema(String.format(SQLConstants.SCHEMA, dsId)) == null) { + DEException.throwException(Translator.get("i18n_check_datasource_connection")); + } + JdbcSchema jdbcSchema = rootSchema.getSubSchema(String.format(SQLConstants.SCHEMA, dsId)).unwrap(JdbcSchema.class); + BasicDataSource basicDataSource = (BasicDataSource) jdbcSchema.getDataSource(); + return basicDataSource.getConnection(); + } catch (Exception e) { + DEException.throwException(Translator.get("i18n_invalid_connection") + e.getMessage()); + } + return null; + } + + public void exec(EngineRequest engineRequest) throws Exception { + DatasourceConfiguration configuration = JsonUtil.parseObject(engineRequest.getEngine().getConfiguration(), DatasourceConfiguration.class); + int queryTimeout = configuration.getQueryTimeout(); + DatasourceDTO datasource = new DatasourceDTO(); + BeanUtils.copyBean(datasource, engineRequest.getEngine()); + try (Connection connection = getConnectionFromPool(datasource.getId()); Statement stat = getStatement(connection, queryTimeout)) { + PreparedStatement preparedStatement = connection.prepareStatement(engineRequest.getQuery()); + preparedStatement.setQueryTimeout(queryTimeout); + Boolean result = preparedStatement.execute(); + } catch (Exception e) { + throw e; + } + } +} diff --git a/backend/src/main/java/com/stdproject/service/type/CK.java b/backend/src/main/java/com/stdproject/service/type/CK.java new file mode 100644 index 0000000..cfeade6 --- /dev/null +++ b/backend/src/main/java/com/stdproject/service/type/CK.java @@ -0,0 +1,31 @@ +package com.stdproject.service.type; + +import io.gisbi.extensions.datasource.vo.DatasourceConfiguration; +import lombok.Data; +import org.apache.commons.lang3.StringUtils; +import org.springframework.stereotype.Component; + +@Data +@Component("ck") +public class CK extends DatasourceConfiguration { + private String driver = "com.clickhouse.jdbc.ClickHouseDriver"; + private String extraParams = ""; + + public String getJdbc() { + if(StringUtils.isNoneEmpty(getUrlType()) && !getUrlType().equalsIgnoreCase("hostName")){ + return getJdbcUrl(); + } + if(StringUtils.isEmpty(extraParams.trim())){ + return "jdbc:clickhouse://HOSTNAME:PORT/DATABASE" + .replace("HOSTNAME", getLHost().trim()) + .replace("PORT", getLPort().toString().trim()) + .replace("DATABASE", getDataBase().trim()); + }else { + return "jdbc:clickhouse://HOSTNAME:PORT/DATABASE?EXTRA_PARAMS" + .replace("HOSTNAME", getLHost().trim()) + .replace("PORT", getLPort().toString().trim()) + .replace("DATABASE", getDataBase().trim()) + .replace("EXTRA_PARAMS", getExtraParams().trim()); + } + } +} diff --git a/backend/src/main/java/com/stdproject/service/type/Db2.java b/backend/src/main/java/com/stdproject/service/type/Db2.java new file mode 100644 index 0000000..8d98352 --- /dev/null +++ b/backend/src/main/java/com/stdproject/service/type/Db2.java @@ -0,0 +1,39 @@ +package com.stdproject.service.type; + +import io.gisbi.extensions.datasource.vo.DatasourceConfiguration; +import lombok.Data; +import org.apache.commons.lang3.StringUtils; +import org.springframework.stereotype.Component; + +@Data +@Component("db2") +public class Db2 extends DatasourceConfiguration { + private String driver = "com.ibm.db2.jcc.DB2Driver"; + private String extraParams = ""; + + public String getJdbc() { + if(StringUtils.isNoneEmpty(getUrlType()) && !getUrlType().equalsIgnoreCase("hostName")){ + return getJdbcUrl(); + } + if(StringUtils.isEmpty(extraParams.trim())){ + if (StringUtils.isEmpty(getSchema())) { + return "jdbc:db2://HOSTNAME:PORT/DATABASE" + .replace("HOSTNAME", getLHost().trim()) + .replace("PORT", getLPort().toString().trim()) + .replace("DATABASE", getDataBase().trim()); + } else { + return "jdbc:db2://HOSTNAME:PORT/DATABASE:currentSchema=SCHEMA;" + .replace("HOSTNAME", getLHost().trim()) + .replace("PORT", getLPort().toString().trim()) + .replace("DATABASE", getDataBase().trim()) + .replace("SCHEMA",getSchema().trim()); + } + }else { + return "jdbc:db2://HOSTNAME:PORT/DATABASE:EXTRA_PARAMS" + .replace("HOSTNAME", getLHost().trim()) + .replace("PORT", getLPort().toString().trim()) + .replace("DATABASE", getDataBase().trim()) + .replace("EXTRA_PARAMS", getExtraParams().trim()); + } + } +} diff --git a/backend/src/main/java/com/stdproject/service/type/Es.java b/backend/src/main/java/com/stdproject/service/type/Es.java new file mode 100644 index 0000000..1a84164 --- /dev/null +++ b/backend/src/main/java/com/stdproject/service/type/Es.java @@ -0,0 +1,14 @@ +package com.stdproject.service.type; + + +import lombok.Data; + +@Data +public class Es { + private String url; + private String username; + private String password; + private String version; + private String uri; + +} diff --git a/backend/src/main/java/com/stdproject/service/type/H2.java b/backend/src/main/java/com/stdproject/service/type/H2.java new file mode 100644 index 0000000..18901e7 --- /dev/null +++ b/backend/src/main/java/com/stdproject/service/type/H2.java @@ -0,0 +1,11 @@ +package com.stdproject.service.type; + +import io.gisbi.extensions.datasource.vo.DatasourceConfiguration; +import lombok.Data; +import org.springframework.stereotype.Component; + +@Data +@Component("h2") +public class H2 extends DatasourceConfiguration { + private String driver = "org.h2.Driver"; +} diff --git a/backend/src/main/java/com/stdproject/service/type/Impala.java b/backend/src/main/java/com/stdproject/service/type/Impala.java new file mode 100644 index 0000000..14c46ab --- /dev/null +++ b/backend/src/main/java/com/stdproject/service/type/Impala.java @@ -0,0 +1,36 @@ +package com.stdproject.service.type; + +import io.gisbi.extensions.datasource.vo.DatasourceConfiguration; +import lombok.Data; +import org.apache.commons.lang3.StringUtils; +import org.springframework.stereotype.Component; + +import java.util.Arrays; +import java.util.List; + +@Data +@Component("impala") +public class Impala extends DatasourceConfiguration { + private String driver = "com.cloudera.impala.jdbc.Driver"; + private String extraParams = ""; + private List illegalParameters = Arrays.asList("autoDeserialize", "queryInterceptors", "statementInterceptors", "detectCustomCollations"); + private List showTableSqls = Arrays.asList("show tables"); + + public String getJdbc() { + if(StringUtils.isNoneEmpty(getUrlType()) && !getUrlType().equalsIgnoreCase("hostName")){ + return getJdbcUrl(); + } + if(StringUtils.isEmpty(extraParams.trim())){ + return "jdbc:impala://HOSTNAME:PORT/DATABASE" + .replace("HOSTNAME", getLHost().trim()) + .replace("PORT", getLPort().toString().trim()) + .replace("DATABASE", getDataBase().trim()); + }else { + return "jdbc:impala://HOSTNAME:PORT/DATABASE;EXTRA_PARAMS" + .replace("HOSTNAME", getLHost().trim()) + .replace("PORT", getLPort().toString().trim()) + .replace("DATABASE", getDataBase().trim()) + .replace("EXTRA_PARAMS", getExtraParams().trim()); + } + } +} diff --git a/backend/src/main/java/com/stdproject/service/type/Mongo.java b/backend/src/main/java/com/stdproject/service/type/Mongo.java new file mode 100644 index 0000000..a988c53 --- /dev/null +++ b/backend/src/main/java/com/stdproject/service/type/Mongo.java @@ -0,0 +1,42 @@ +package com.stdproject.service.type; + +import io.gisbi.extensions.datasource.vo.DatasourceConfiguration; +import lombok.Data; +import org.apache.commons.lang3.StringUtils; +import org.springframework.stereotype.Component; + +import java.util.Arrays; +import java.util.List; + +@Data +@Component("mongo") +public class Mongo extends DatasourceConfiguration { + private String driver = "com.mysql.cj.jdbc.Driver"; + private String extraParams = "characterEncoding=UTF-8&connectTimeout=5000&useSSL=false&allowPublicKeyRetrieval=true&zeroDateTimeBehavior=convertToNull"; + private List illegalParameters = Arrays.asList("autoDeserialize", "queryInterceptors", "statementInterceptors", "detectCustomCollations"); + private List showTableSqls = Arrays.asList("show tables"); + + public String getJdbc() { + if(StringUtils.isNoneEmpty(getUrlType()) && !getUrlType().equalsIgnoreCase("hostName")){ + return getJdbcUrl(); + } + if (StringUtils.isEmpty(extraParams.trim())) { + return "jdbc:mysql://HOSTNAME:PORT/DATABASE" + .replace("HOSTNAME", getLHost().trim()) + .replace("PORT", getLPort().toString().trim()) + .replace("DATABASE", getDataBase().trim()); + } else { + for (String illegalParameter : illegalParameters) { + if (getExtraParams().contains(illegalParameter)) { + throw new RuntimeException("Illegal parameter: " + illegalParameter); + } + } + + return "jdbc:mysql://HOSTNAME:PORT/DATABASE?EXTRA_PARAMS" + .replace("HOSTNAME", getLHost().trim()) + .replace("PORT", getLPort().toString().trim()) + .replace("DATABASE", getDataBase().trim()) + .replace("EXTRA_PARAMS", getExtraParams().trim()); + } + } +} diff --git a/backend/src/main/java/com/stdproject/service/type/Mysql.java b/backend/src/main/java/com/stdproject/service/type/Mysql.java new file mode 100644 index 0000000..d645038 --- /dev/null +++ b/backend/src/main/java/com/stdproject/service/type/Mysql.java @@ -0,0 +1,48 @@ +package com.stdproject.service.type; + +import io.gisbi.exception.DEException; +import io.gisbi.extensions.datasource.vo.DatasourceConfiguration; +import lombok.Data; +import org.apache.commons.lang3.StringUtils; +import org.springframework.stereotype.Component; + +import java.net.URLDecoder; +import java.util.Arrays; +import java.util.List; + +@Data +@Component("mysql") +public class Mysql extends DatasourceConfiguration { + private String driver = "com.mysql.cj.jdbc.Driver"; + private String extraParams = "characterEncoding=UTF-8&connectTimeout=5000&useSSL=false&allowPublicKeyRetrieval=true&zeroDateTimeBehavior=convertToNull"; + private List illegalParameters = Arrays.asList("maxAllowedPacket", "autoDeserialize", "queryInterceptors", "statementInterceptors", "detectCustomCollations", "allowloadlocalinfile", "allowUrlInLocalInfile", "allowLoadLocalInfileInPath"); + private List showTableSqls = Arrays.asList("show tables"); + + public String getJdbc() { + if (StringUtils.isNoneEmpty(getUrlType()) && !getUrlType().equalsIgnoreCase("hostName")) { + for (String illegalParameter : illegalParameters) { + if (URLDecoder.decode(getJdbcUrl()).toLowerCase().contains(illegalParameter.toLowerCase()) || URLDecoder.decode(getExtraParams()).contains(illegalParameter.toLowerCase())) { + DEException.throwException("Illegal parameter: " + illegalParameter); + } + } + return getJdbcUrl(); + } + if (StringUtils.isEmpty(extraParams.trim())) { + return "jdbc:mysql://HOSTNAME:PORT/DATABASE" + .replace("HOSTNAME", getLHost().trim()) + .replace("PORT", getLPort().toString().trim()) + .replace("DATABASE", getDataBase().trim()); + } else { + for (String illegalParameter : illegalParameters) { + if (URLDecoder.decode(getExtraParams()).toLowerCase().contains(illegalParameter.toLowerCase()) || URLDecoder.decode(getExtraParams()).contains(illegalParameter.toLowerCase())) { + DEException.throwException("Illegal parameter: " + illegalParameter); + } + } + return "jdbc:mysql://HOSTNAME:PORT/DATABASE?EXTRA_PARAMS" + .replace("HOSTNAME", getLHost().trim()) + .replace("PORT", getLPort().toString().trim()) + .replace("DATABASE", getDataBase().trim()) + .replace("EXTRA_PARAMS", getExtraParams().trim()); + } + } +} diff --git a/backend/src/main/java/com/stdproject/service/type/Oracle.java b/backend/src/main/java/com/stdproject/service/type/Oracle.java new file mode 100644 index 0000000..9267644 --- /dev/null +++ b/backend/src/main/java/com/stdproject/service/type/Oracle.java @@ -0,0 +1,30 @@ +package com.stdproject.service.type; + +import io.gisbi.extensions.datasource.vo.DatasourceConfiguration; +import lombok.Data; +import org.apache.commons.lang3.StringUtils; +import org.springframework.stereotype.Component; + +@Data +@Component("oracle") +public class Oracle extends DatasourceConfiguration { + private String driver = "oracle.jdbc.driver.OracleDriver"; + private String extraParams = ""; + + public String getJdbc() { + if(StringUtils.isNoneEmpty(getUrlType()) && !getUrlType().equalsIgnoreCase("hostName")){ + return getJdbcUrl(); + } + if (StringUtils.isNotEmpty(getConnectionType()) && getConnectionType().equalsIgnoreCase("serviceName")) { + return "jdbc:oracle:thin:@HOSTNAME:PORT/DATABASE" + .replace("HOSTNAME", getLHost().trim()) + .replace("PORT", getLPort().toString().trim()) + .replace("DATABASE", getDataBase().trim()); + }else { + return "jdbc:oracle:thin:@HOSTNAME:PORT:DATABASE" + .replace("HOSTNAME", getLHost().trim()) + .replace("PORT", getLPort().toString().trim()) + .replace("DATABASE", getDataBase().trim()); + } + } +} diff --git a/backend/src/main/java/com/stdproject/service/type/Pg.java b/backend/src/main/java/com/stdproject/service/type/Pg.java new file mode 100644 index 0000000..9405869 --- /dev/null +++ b/backend/src/main/java/com/stdproject/service/type/Pg.java @@ -0,0 +1,40 @@ +package com.stdproject.service.type; + +import io.gisbi.extensions.datasource.vo.DatasourceConfiguration; +import lombok.Data; +import org.apache.commons.lang3.StringUtils; +import org.springframework.stereotype.Component; + +@Data +@Component("pg") +public class Pg extends DatasourceConfiguration { + private String driver = "org.postgresql.Driver"; + private String extraParams = ""; + + public String getJdbc() { + if(StringUtils.isNoneEmpty(getUrlType()) && !getUrlType().equalsIgnoreCase("hostName")){ + return getJdbcUrl(); + } + if(StringUtils.isEmpty(extraParams.trim())){ + if (StringUtils.isEmpty(getSchema())) { + return "jdbc:postgresql://HOSTNAME:PORT/DATABASE" + .replace("HOSTNAME", getLHost().trim()) + .replace("PORT", getLPort().toString().trim()) + .replace("DATABASE", getDataBase().trim()); + } else { + return "jdbc:postgresql://HOSTNAME:PORT/DATABASE?currentSchema=SCHEMA" + .replace("HOSTNAME", getLHost().trim()) + .replace("PORT", getLPort().toString().trim()) + .replace("DATABASE", getDataBase().trim()) + .replace("SCHEMA", getSchema().trim()); + } + }else { + return "jdbc:postgresql://HOSTNAME:PORT/DATABASE?EXTRA_PARAMS" + .replace("HOSTNAME", getLHost().trim()) + .replace("PORT", getLPort().toString().trim()) + .replace("DATABASE", getDataBase().trim()) + .replace("EXTRA_PARAMS", getExtraParams().trim()); + + } + } +} diff --git a/backend/src/main/java/com/stdproject/service/type/Redshift.java b/backend/src/main/java/com/stdproject/service/type/Redshift.java new file mode 100644 index 0000000..81f22d2 --- /dev/null +++ b/backend/src/main/java/com/stdproject/service/type/Redshift.java @@ -0,0 +1,23 @@ +package com.stdproject.service.type; + +import io.gisbi.extensions.datasource.vo.DatasourceConfiguration; +import lombok.Data; +import org.apache.commons.lang3.StringUtils; +import org.springframework.stereotype.Component; + +@Data +@Component("redshift") +public class Redshift extends DatasourceConfiguration { + private String driver = "com.amazon.redshift.jdbc42.Driver"; + private String extraParams = ""; + + public String getJdbc() { + if(StringUtils.isNoneEmpty(getUrlType()) && !getUrlType().equalsIgnoreCase("hostName")){ + return getJdbcUrl(); + } + return "jdbc:redshift://HOSTNAME:PORT/DATABASE" + .replace("HOSTNAME", getLHost().trim()) + .replace("PORT", getLPort().toString().trim()) + .replace("DATABASE", getDataBase().trim()); + } +} diff --git a/backend/src/main/java/com/stdproject/service/type/Sqlserver.java b/backend/src/main/java/com/stdproject/service/type/Sqlserver.java new file mode 100644 index 0000000..820b285 --- /dev/null +++ b/backend/src/main/java/com/stdproject/service/type/Sqlserver.java @@ -0,0 +1,36 @@ +package com.stdproject.service.type; + +import io.gisbi.extensions.datasource.vo.DatasourceConfiguration; +import lombok.Data; +import org.apache.commons.lang3.StringUtils; +import org.springframework.stereotype.Component; + +import java.util.Arrays; +import java.util.List; + +@Data +@Component("sqlServer") +public class Sqlserver extends DatasourceConfiguration { + private String driver = "com.microsoft.sqlserver.jdbc.SQLServerDriver"; + private String extraParams = ""; + private List illegalParameters = Arrays.asList("autoDeserialize", "queryInterceptors", "statementInterceptors", "detectCustomCollations"); + private List showTableSqls = Arrays.asList("show tables"); + + public String getJdbc() { + if(StringUtils.isNoneEmpty(getUrlType()) && !getUrlType().equalsIgnoreCase("hostName")){ + return getJdbcUrl(); + } + if (StringUtils.isEmpty(extraParams.trim())) { + return "jdbc:sqlserver://HOSTNAME:PORT;DatabaseName=DATABASE" + .replace("HOSTNAME", getLHost().trim()) + .replace("PORT", getLPort().toString().trim()) + .replace("DATABASE", getDataBase().trim()); + } else { + return "jdbc:sqlserver://HOSTNAME:PORT;DatabaseName=DATABASE;EXTRA_PARAMS" + .replace("HOSTNAME", getLHost().trim()) + .replace("PORT", getLPort().toString().trim()) + .replace("DATABASE", getDataBase().trim()) + .replace("EXTRA_PARAMS", getExtraParams().trim()); + } + } +} diff --git a/backend/src/main/java/com/stdproject/utils/FieldUtils.java b/backend/src/main/java/com/stdproject/utils/FieldUtils.java new file mode 100644 index 0000000..dfaed15 --- /dev/null +++ b/backend/src/main/java/com/stdproject/utils/FieldUtils.java @@ -0,0 +1,61 @@ +package com.stdproject.utils; + +import java.util.Arrays; +import java.util.List; + +/** + * @Author bi-coder + */ +public class FieldUtils { + public static int transType2DeType(final String type) { + List text = Arrays.asList("CHAR", "VARCHAR", "TEXT", "TINYTEXT", "MEDIUMTEXT", "LONGTEXT", "ENUM", "ANY", "STRING", "BOOL", "BOOLEAN"); + List time = Arrays.asList("DATE", "TIME", "YEAR", "DATETIME", "TIMESTAMP", "DATEV2", "DATETIMEV2", "DATETIME2", "DATETIMEOFFSET", "SMALLDATETIME", "DATETIME64", "_TIMESTAMPTZ", "TIMESTAMPTZ"); + List num = Arrays.asList("INT", "SMALLINT", "MEDIUMINT", "INTEGER", "BIGINT", "LONG", "INT2", "INT4", "INT8", "int2", "int4", "int8", "INT16", "INT32", "INT64", "UINT8", "UINT16", "UINT32", "UINT64"); + List doubleList = Arrays.asList("NUMBER", "FLOAT", "DOUBLE", "DECIMAL", "REAL", "MONEY", "NUMERIC", "float4", "float8", "FLOAT4", "FLOAT8", "DECFLOAT", "FLOAT32", "FLOAT64"); + List boolType = Arrays.asList("BIT", "TINYINT"); + if (boolType.contains(type)) { + return 4;// 布尔 + } + if (doubleList.contains(type)) { + return 3;// 浮点 + } + if (num.contains(type)) { + return 2;// 整型 + } + if (time.contains(type)) { + return 1;// 时间 + } + if (text.contains(type)) { + return 0;// 文本 + } + + if (boolType.stream().anyMatch(l -> type.contains(l))) { + return 4;// 布尔 + } + if (doubleList.stream().anyMatch(l -> type.contains(l))) { + return 3;// 浮点 + } + if (num.stream().anyMatch(l -> type.contains(l))) { + return 2;// 整型 + } + if (time.stream().anyMatch(l -> type.contains(l))) { + return 1;// 时间 + } + return 0;// 文本 + } + + public static String transDeType2DQ(int deType) { + switch (deType) { + case 0: + case 1: + case 5: + return "d"; + case 2: + case 3: + case 4: + return "q"; + default: + return "d"; + } + } +} diff --git a/backend/src/main/java/com/stdproject/utils/FileUtils.java b/backend/src/main/java/com/stdproject/utils/FileUtils.java index 63b2c01..406b41a 100644 --- a/backend/src/main/java/com/stdproject/utils/FileUtils.java +++ b/backend/src/main/java/com/stdproject/utils/FileUtils.java @@ -216,4 +216,5 @@ public class FileUtils { return String.format("%.1f GB", size / (1024.0 * 1024.0 * 1024.0)); } } + } \ No newline at end of file diff --git a/backend/src/main/resources/application.yml b/backend/src/main/resources/application.yml index d006c7c..390b5ba 100644 --- a/backend/src/main/resources/application.yml +++ b/backend/src/main/resources/application.yml @@ -16,6 +16,8 @@ spring: active: ${SPRING_PROFILES_ACTIVE:dev} application: name: stdproject-backend + main: + allow-bean-definition-overriding: true datasource: url: ${DB_URL:jdbc:mysql://121.37.111.42:3306/gisbi-demodb?useUnicode=true&characterEncoding=utf-8&serverTimezone=Asia/Shanghai&useSSL=false&allowPublicKeyRetrieval=true} username: ${DB_USERNAME:root} @@ -56,7 +58,7 @@ spring: enabled: ${JWT_ENABLED:true} # 控制是否启用JWT认证 secret: ${JWT_SECRET:YourJWTSecretKeyForStdProjectBackendApplicationWhichIsVeryLongAndSecure2024!@#$%^&*()} expiration-ms: ${JWT_EXPIRATION:1800000} # Token 过期时间 (例如: 24小时) - refresh-expiration-ms: ${JWT_REFRESH_EXPIRATION:604800000} # 刷新Token过期时间 (例如: 7天) + refresh-expiration-ms: ${JWT_REFRESH_EXPIRATION:1800000} # 刷新Token过期时间 (例如: 30分钟) mybatis-plus: mapper-locations: classpath*:/mapper/**/*.xml # MyBatis Mapper XML文件位置 @@ -147,7 +149,7 @@ spring: on-profile: dev security: jwt: - enabled: true + enabled: false logging: level: com.stdproject: DEBUG @@ -155,7 +157,7 @@ logging: org.hibernate.type.descriptor.sql.BasicBinder: TRACE mybatis-plus: configuration: - log-impl: org.apache.ibatis.logging.nologging.NoLoggingImpl #org.apache.ibatis.logging.stdout.StdOutImpl + log-impl: org.apache.ibatis.logging.stdout.StdOutImpl #org.apache.ibatis.logging.nologging.NoLoggingImpl springdoc: swagger-ui: enabled: true