fix: 数据源插件

This commit is contained in:
taojinlong 2022-04-17 18:19:54 +08:00
parent 74613decc7
commit 7f00646285
22 changed files with 36 additions and 68 deletions

View File

@ -34,8 +34,8 @@ public class KettleController {
@ApiIgnore
@PostMapping("validate")
public ResultHolder validate(@RequestBody KettleDTO kettleDTO) throws Exception{
return kettleService.validate(kettleDTO);
public void validate(@RequestBody KettleDTO kettleDTO) throws Exception{
kettleService.validate(kettleDTO);
}
@RequiresPermissions("sysparam:read")

View File

@ -9,7 +9,7 @@ import org.apache.commons.lang3.StringUtils;
@Setter
public class MysqlConfiguration extends JdbcConfiguration {
private String driver = "com.mysql.cj.jdbc.Driver";
private String driver = "com.mysql.jdbc.Driver";
private String extraParams = "characterEncoding=UTF-8&connectTimeout=5000&useSSL=false&allowPublicKeyRetrieval=true&zeroDateTimeBehavior=convertToNull";
public String getJdbc() {

View File

@ -33,6 +33,10 @@ public class ProviderFactory implements ApplicationContextAware {
public static Provider getProvider(String type) {
if(type.equalsIgnoreCase(DatasourceTypes.engine_doris.toString()) || type.equalsIgnoreCase(DatasourceTypes.engine_mysql.toString())){
return context.getBean("jdbc", Provider.class);
}
Map<String, DataSourceType> dataSourceTypeMap = SpringContextUtil.getApplicationContext().getBeansOfType((DataSourceType.class));
if(dataSourceTypeMap.keySet().contains(type)){
DatasourceTypes datasourceType = DatasourceTypes.valueOf(type);
@ -51,48 +55,17 @@ public class ProviderFactory implements ApplicationContextAware {
}
public static QueryProvider getQueryProvider(String type) {
Map<String, DataSourceType> dataSourceTypeMap = SpringContextUtil.getApplicationContext().getBeansOfType((DataSourceType.class));
if(dataSourceTypeMap.keySet().contains(type)){
DatasourceTypes datasourceType = DatasourceTypes.valueOf(type);
switch (datasourceType) {
case mysql:
case mariadb:
case ds_doris:
case TiDB:
case StarRocks:
return context.getBean("mysqlQuery", QueryProvider.class);
case sqlServer:
return context.getBean("sqlserverQuery", QueryProvider.class);
case pg:
return context.getBean("pgQuery", QueryProvider.class);
case oracle:
return context.getBean("oracleQuery", QueryProvider.class);
case es:
return context.getBean("esQuery", QueryProvider.class);
case ck:
return context.getBean("ckQuery", QueryProvider.class);
case mongo:
return context.getBean("mongoQuery", QueryProvider.class);
case redshift:
return context.getBean("redshiftQuery", QueryProvider.class);
case hive:
return context.getBean("hiveQuery", QueryProvider.class);
case impala:
return context.getBean("impalaQuery", QueryProvider.class);
case db2:
return context.getBean("db2Query", QueryProvider.class);
case api:
return context.getBean("apiQuery", QueryProvider.class);
case engine_doris:
return context.getBean("dorisEngineQuery", QueryProvider.class);
case engine_mysql:
return context.getBean("mysqlEngineQuery", QueryProvider.class);
default:
return context.getBean("mysqlQuery", QueryProvider.class);
}
switch (type) {
case "mysql":
case "mariadb":
case "ds_doris":
case "TiDB":
case "StarRocks":
return context.getBean("mysqlQueryProvider", QueryProvider.class);
default:
return SpringContextUtil.getApplicationContext().getBean(type + "QueryProvider", QueryProvider.class);
}
return SpringContextUtil.getApplicationContext().getBean(type + "QueryProvider", QueryProvider.class);
}
public static DDLProvider getDDLProvider(String type) {

View File

@ -290,7 +290,6 @@ public class EsProvider extends Provider {
Request request = new Request();
request.setQuery(sql);
request.setFetch_size(datasourceRequest.getFetchSize());
System.out.println(new Gson().toJson(request));
String url = esConfiguration.getUrl().endsWith("/") ? esConfiguration.getUrl() + uri : esConfiguration.getUrl() + "/" + uri;
String response = HttpClientUtil.post(url, new Gson().toJson(request), httpClientConfig);
return response;

View File

@ -357,8 +357,6 @@ public class JdbcProvider extends DefaultJdbcProvider {
}
Driver driverClass = (Driver) extendedJdbcClassLoader.loadClass(driver).newInstance();
System.out.println(driverClass.getMajorVersion());
System.out.println(driverClass.getMinorVersion());
if (StringUtils.isNotBlank(username)) {
props.setProperty("user", username);
if (StringUtils.isNotBlank(password)) {

View File

@ -35,7 +35,7 @@ import static io.dataease.plugins.common.constants.SQLConstants.TABLE_ALIAS_PREF
* @Author gin
* @Date 2021/5/17 4:11 下午
*/
@Service("dorisEngineQuery")
@Service("engine_dorisQueryProvider")
public class DorisQueryProvider extends QueryProvider {
@Resource
private DatasetTableFieldMapper datasetTableFieldMapper;

View File

@ -35,7 +35,7 @@ import static io.dataease.plugins.common.constants.SQLConstants.TABLE_ALIAS_PREF
* @Author gin
* @Date 2021/5/17 4:11 下午
*/
@Service("mysqlEngineQuery")
@Service("engine_mysqlQueryProvider")
public class MysqlQueryProvider extends QueryProvider {
@Resource
private DatasetTableFieldMapper datasetTableFieldMapper;

View File

@ -11,7 +11,7 @@ import org.springframework.stereotype.Service;
import java.util.List;
@Service("apiQuery")
@Service("apiQueryProvider")
public class ApiProvider extends QueryProvider {
@Override
public Integer transFieldType(String field) {

View File

@ -36,7 +36,7 @@ import static io.dataease.plugins.common.constants.SQLConstants.TABLE_ALIAS_PREF
* @Author gin
* @Date 2021/5/17 2:43 下午
*/
@Service("ckQuery")
@Service("ckQueryProvider")
public class CKQueryProvider extends QueryProvider {
@Resource
private DatasetTableFieldMapper datasetTableFieldMapper;

View File

@ -34,7 +34,7 @@ import java.util.stream.Collectors;
import static io.dataease.plugins.common.constants.SQLConstants.TABLE_ALIAS_PREFIX;
@Service("db2Query")
@Service("db2QueryProvider")
public class Db2QueryProvider extends QueryProvider {
@Resource
private DatasetTableFieldMapper datasetTableFieldMapper;

View File

@ -32,7 +32,7 @@ import java.util.stream.Collectors;
import static io.dataease.plugins.common.constants.SQLConstants.TABLE_ALIAS_PREFIX;
@Service("esQuery")
@Service("esQueryProvider")
public class EsQueryProvider extends QueryProvider {
@Resource
private DatasetTableFieldMapper datasetTableFieldMapper;

View File

@ -36,7 +36,7 @@ import static io.dataease.plugins.common.constants.SQLConstants.TABLE_ALIAS_PREF
* @Author gin
* @Date 2021/5/17 2:43 下午
*/
@Service("hiveQuery")
@Service("hiveQueryProvider")
public class HiveQueryProvider extends QueryProvider {
@Resource
private DatasetTableFieldMapper datasetTableFieldMapper;

View File

@ -34,7 +34,7 @@ import java.util.stream.Collectors;
import static io.dataease.plugins.common.constants.SQLConstants.TABLE_ALIAS_PREFIX;
@Service("impalaQuery")
@Service("impalaQueryProvider")
public class ImpalaQueryProvider extends QueryProvider {
@Resource
private DatasetTableFieldMapper datasetTableFieldMapper;

View File

@ -37,7 +37,7 @@ import static io.dataease.plugins.common.constants.SQLConstants.TABLE_ALIAS_PREF
* @Author gin
* @Date 2021/5/17 2:43 下午
*/
@Service("mongoQuery")
@Service("mongoQueryProvider")
public class MongoQueryProvider extends QueryProvider {
@Resource
private DatasetTableFieldMapper datasetTableFieldMapper;

View File

@ -36,7 +36,7 @@ import static io.dataease.plugins.common.constants.SQLConstants.TABLE_ALIAS_PREF
* @Author gin
* @Date 2021/5/17 2:43 下午
*/
@Service("mysqlQuery")
@Service("mysqlQueryProvider")
public class MysqlQueryProvider extends QueryProvider {
@Resource
private DatasetTableFieldMapper datasetTableFieldMapper;

View File

@ -38,7 +38,7 @@ import static io.dataease.plugins.common.constants.SQLConstants.TABLE_ALIAS_PREF
* @Author gin
* @Date 2021/5/17 2:43 下午
*/
@Service("oracleQuery")
@Service("oracleQueryProvider")
public class OracleQueryProvider extends QueryProvider {
private static final Integer STRING = 0;

View File

@ -37,7 +37,7 @@ import java.util.stream.Collectors;
import static io.dataease.plugins.common.constants.SQLConstants.TABLE_ALIAS_PREFIX;
@Service("pgQuery")
@Service("pgQueryProvider")
public class PgQueryProvider extends QueryProvider {
@Resource
private DatasetTableFieldMapper datasetTableFieldMapper;

View File

@ -41,7 +41,7 @@ import static io.dataease.plugins.common.constants.SQLConstants.TABLE_ALIAS_PREF
* @author: Jiantao Yan
* @date: 2021/10/11 17:12
**/
@Service("redshiftQuery")
@Service("redshiftQueryProvider")
public class RedshiftQueryProvider extends QueryProvider {
@Resource
private DatasetTableFieldMapper datasetTableFieldMapper;

View File

@ -34,7 +34,7 @@ import java.util.stream.Collectors;
import static io.dataease.plugins.common.constants.SQLConstants.TABLE_ALIAS_PREFIX;
@Service("sqlserverQuery")
@Service("sqlserverQueryProvider")
public class SqlserverQueryProvider extends QueryProvider {
@Resource
private DatasetTableFieldMapper datasetTableFieldMapper;

View File

@ -153,7 +153,7 @@ public class EngineService {
CacheUtils.remove("ENGINE", "SimpleKey []");
}
@Cacheable(value = "ENGINE")
// @Cacheable(value = "ENGINE")
public Datasource getDeEngine() throws Exception {
Datasource datasource = new Datasource();

View File

@ -54,17 +54,12 @@ public class KettleService {
deEngineMapper.deleteByPrimaryKey(id);
}
public ResultHolder validate(KettleDTO kettleDTO) throws Exception {
public void validate(KettleDTO kettleDTO) throws Exception {
HttpClientConfig httpClientConfig = new HttpClientConfig();
String authValue = "Basic " + Base64.getUrlEncoder().encodeToString((kettleDTO.getUser()
+ ":" + kettleDTO.getPasswd()).getBytes());
httpClientConfig.addHeader("Authorization", authValue);
try {
String response = HttpClientUtil.get("http://" + kettleDTO.getCarte() + ":" + kettleDTO.getPort() + "/kettle/status/", httpClientConfig);
return ResultHolder.success("Kettle is valid.");
}catch (Exception e){
return ResultHolder.error("Kettle is invalid: " + e.getMessage());
}
String response = HttpClientUtil.get("http://" + kettleDTO.getCarte() + ":" + kettleDTO.getPort() + "/kettle/status/", httpClientConfig);
}
public ResultHolder validate(String id) {

View File

@ -11,3 +11,6 @@ COMMIT;
INSERT INTO `system_parameter`(`param_key`, `param_value`, `type`, `sort`) VALUES ('ui.helpLink', NULL, 'text', 15);
INSERT INTO `system_parameter`(`param_key`, `param_value`, `type`, `sort`) VALUES ('ui.homeLink', NULL, 'text', 16);
ALTER TABLE `my_plugin` ADD COLUMN `ds_type` VARCHAR(45) NULL COMMENT '数据源类型' AFTER `icon`;