Merge pull request #1164 from dataease/hive

feat: 支持 hive 数据源
This commit is contained in:
taojinlong 2021-11-10 18:45:15 +08:00 committed by GitHub
commit 99638187bb
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
12 changed files with 43 additions and 12 deletions

View File

@ -3,6 +3,7 @@ package io.dataease.commons.constants;
public enum DatasourceTypes {
excel("excel", "excel", "", "", "", "", ""),
mysql("mysql", "mysql", "com.mysql.jdbc.Driver", "`", "`", "'", "'"),
hive("hive", "hive", "org.apache.hive.jdbc.HiveDriver", "`", "`", "'", "'"),
mariadb("mariadb", "mariadb", "com.mysql.jdbc.Driver", "`", "`", "'", "'"),
ds_doris("ds_doris", "ds_doris", "com.mysql.jdbc.Driver", "`", "`", "'", "'"),
pg("pg", "pg", "org.postgresql.Driver", "\"", "\"", "\"", "\""),

View File

@ -53,6 +53,8 @@ public class ProviderFactory implements ApplicationContextAware {
return context.getBean("mongoQuery", QueryProvider.class);
case redshift:
return context.getBean("redshiftQuery", QueryProvider.class);
case hive:
return context.getBean("hiveQuery", QueryProvider.class);
default:
return context.getBean("mysqlQuery", QueryProvider.class);
}

View File

@ -194,10 +194,10 @@ public class EsProvider extends DatasourceProvider {
}
for (String[] row : esReponse.getRows()) {
if(row.length == 3 && row[1].equalsIgnoreCase("TABLE") && row[2].equalsIgnoreCase("INDEX")){
if(row.length == 3 && row[1].contains("TABLE") && row[2].equalsIgnoreCase("INDEX")){
tables.add(row[0]);
}
if(row.length == 2 && row[1].equalsIgnoreCase("BASE TABLE")){
if(row.length == 2 && row[1].contains("TABLE")){
tables.add(row[0]);
}
}

View File

@ -170,6 +170,9 @@ public class JdbcProvider extends DatasourceProvider {
String f = metaData.getColumnName(j + 1);
String l = StringUtils.isNotEmpty(metaData.getColumnLabel(j + 1)) ? metaData.getColumnLabel(j + 1) : f;
String t = metaData.getColumnTypeName(j + 1);
if(datasourceRequest.getDatasource().getType().equalsIgnoreCase(DatasourceTypes.hive.name())){
l = l.split("\\.")[1];
}
TableFiled field = new TableFiled();
field.setFieldName(l);
field.setRemarks(l);
@ -342,14 +345,25 @@ public class JdbcProvider extends DatasourceProvider {
password = redshiftConfigration.getPassword();
driver = redshiftConfigration.getDriver();
jdbcurl = redshiftConfigration.getJdbc();
break;
case hive:
HiveConfiguration hiveConfiguration = new Gson().fromJson(datasourceRequest.getDatasource().getConfiguration(), HiveConfiguration.class);
username = hiveConfiguration.getUsername();
password = hiveConfiguration.getPassword();
driver = hiveConfiguration.getDriver();
jdbcurl = hiveConfiguration.getJdbc();
break;
default:
break;
}
Driver driverClass = (Driver) extendedJdbcClassLoader.loadClass(driver).newInstance();
props.setProperty("user", username);
if (StringUtils.isNotBlank(password)) {
props.setProperty("password", password);
if (StringUtils.isNotBlank(username)) {
props.setProperty("user", username);
if (StringUtils.isNotBlank(password)) {
props.setProperty("password", password);
}
}
Connection conn = driverClass.connect(jdbcurl, props);
@ -362,7 +376,7 @@ public class JdbcProvider extends DatasourceProvider {
druidDataSource.setInitialSize(jdbcConfiguration.getInitialPoolSize());// 初始连接数
druidDataSource.setMinIdle(jdbcConfiguration.getMinPoolSize()); // 最小连接数
druidDataSource.setMaxActive(jdbcConfiguration.getMaxPoolSize()); // 最大连接数
if(datasourceRequest.getDatasource().getType().equals(DatasourceTypes.mongo.name())){
if(datasourceRequest.getDatasource().getType().equals(DatasourceTypes.mongo.name()) || datasourceRequest.getDatasource().getType().equals(DatasourceTypes.hive.name())){
WallFilter wallFilter = new WallFilter();
wallFilter.setDbType(DatasourceTypes.mysql.name());
druidDataSource.setProxyFilters(Arrays.asList(new Filter[]{wallFilter}));
@ -424,6 +438,13 @@ public class JdbcProvider extends DatasourceProvider {
dataSource.setDriverClassName(redshiftConfigration.getDriver());
dataSource.setUrl(redshiftConfigration.getJdbc());
jdbcConfiguration = redshiftConfigration;
break;
case hive:
HiveConfiguration hiveConfiguration = new Gson().fromJson(datasourceRequest.getDatasource().getConfiguration(), HiveConfiguration.class);
dataSource.setPassword(hiveConfiguration.getPassword());
dataSource.setDriverClassName(hiveConfiguration.getDriver());
dataSource.setUrl(hiveConfiguration.getJdbc());
jdbcConfiguration = hiveConfiguration;
default:
break;
}
@ -442,7 +463,8 @@ public class JdbcProvider extends DatasourceProvider {
case mariadb:
case de_doris:
case ds_doris:
return "show tables;";
case hive:
return "show tables";
case sqlServer:
SqlServerConfiguration sqlServerConfiguration = new Gson().fromJson(datasourceRequest.getDatasource().getConfiguration(), SqlServerConfiguration.class);
if(StringUtils.isEmpty(sqlServerConfiguration.getSchema())){

View File

@ -188,7 +188,7 @@ public class DataSetTableService {
if (datasetTable.getEditType() == 0) {
List<String> newFields = sheet.getFields().stream().map(TableFiled::getRemarks).collect(Collectors.toList());
if (!oldFields.equals(newFields)) {
DataEaseException.throwException(Translator.get("i18n_excel_colume_change"));
DataEaseException.throwException(Translator.get("i18n_excel_colume_inconsistent"));
}
oldFields = newFields;
}

View File

@ -279,6 +279,7 @@ i18n_msg_type_dataset_sync_success=Dataset synchronization successful
i18n_msg_type_dataset_sync_faild=Dataset synchronization failed
i18n_data_not_sync=Please sync data first
i18n_excel_colume_change=The column name of Excel is inconsistent with the original data set
i18n_excel_colume_inconsistent=The column names of the selected sheet pages are inconsistent
i18n_timed_task=Timed Task
i18n_datasource_connect_error=Data source connection exception:
i18n_check_sql_error=Check incremental SQL exception,

View File

@ -278,6 +278,7 @@ i18n_msg_type_dataset_sync_success=数据集同步成功
i18n_msg_type_dataset_sync_faild=数据集同步失败
i18n_data_not_sync=请先完成数据同步
i18n_excel_colume_change=Excel的列名与原数据集不一致
i18n_excel_colume_inconsistent=所选sheet页面的列名不一致
i18n_timed_task=定时任务
i18n_datasource_connect_error=数据源连接异常:
i18n_check_sql_error=校验增量 SQL 异常,

View File

@ -281,6 +281,7 @@ i18n_msg_type_dataset_sync_success=數據集同步成功
i18n_msg_type_dataset_sync_faild=數據集同步失敗
i18n_data_not_sync=請先完成數據同步
i18n_excel_colume_change=Excel的列名與原數據集不一致
i18n_excel_colume_inconsistent=所選sheet頁面的列名不一致
i18n_timed_task=定時任務
i18n_datasource_connect_error=數據源連接異常:
i18n_check_sql_error=校驗增量SQL異常,

View File

@ -29,7 +29,7 @@
<el-form-item class="form-item">
<el-select v-model="mode" filterable :placeholder="$t('dataset.connect_mode')" size="mini">
<el-option :label="$t('dataset.direct_connect')" value="0" />
<el-option :label="$t('dataset.sync_data')" value="1" :disabled="!kettleRunning || selectedDatasource.type==='es' || selectedDatasource.type==='ck' || selectedDatasource.type==='mongo' || selectedDatasource.type==='redshift'" />
<el-option :label="$t('dataset.sync_data')" value="1" :disabled="!kettleRunning || selectedDatasource.type==='es' || selectedDatasource.type==='ck' || selectedDatasource.type==='mongo' || selectedDatasource.type==='redshift' || selectedDatasource.type==='hive'" />
</el-select>
</el-form-item>

View File

@ -33,7 +33,7 @@
<el-form-item v-if="!param.tableId" class="form-item">
<el-select v-model="mode" filterable :placeholder="$t('dataset.connect_mode')" size="mini">
<el-option :label="$t('dataset.direct_connect')" value="0" />
<el-option :label="$t('dataset.sync_data')" value="1" :disabled="!kettleRunning || selectedDatasource.type==='es' || selectedDatasource.type==='ck'|| selectedDatasource.type==='mongo'|| selectedDatasource.type==='redshift'" />
<el-option :label="$t('dataset.sync_data')" value="1" :disabled="!kettleRunning || selectedDatasource.type==='es' || selectedDatasource.type==='ck'|| selectedDatasource.type==='mongo'|| selectedDatasource.type==='redshift' || selectedDatasource.type==='hive'" />
</el-select>
</el-form-item>

View File

@ -185,6 +185,8 @@ export default {
return 'MongoDB'
}else if (type === 'redshift') {
return 'AWS Redshift'
} else if (type === 'hive') {
return 'Apache Hive'
}
},

View File

@ -39,10 +39,10 @@
<el-radio v-model="form.configuration.connectionType" label="serviceName">{{ $t('datasource.oracle_service_name') }}</el-radio>
</el-form-item>
<el-form-item v-if="form.configuration.dataSourceType=='jdbc'" :label="$t('datasource.user_name')" prop="configuration.username">
<el-form-item v-if="form.configuration.dataSourceType=='jdbc'" :label="$t('datasource.user_name')">
<el-input v-model="form.configuration.username" autocomplete="off" />
</el-form-item>
<el-form-item v-if="form.configuration.dataSourceType=='jdbc'" :label="$t('datasource.password')" prop="configuration.password">
<el-form-item v-if="form.configuration.dataSourceType=='jdbc'" :label="$t('datasource.password')">
<el-input v-model="form.configuration.password" autocomplete="off" show-password />
</el-form-item>
<el-form-item v-if="form.configuration.dataSourceType=='es'" :label="$t('datasource.user_name')" >
@ -152,6 +152,7 @@ export default {
},
allTypes: [
{ name: 'mysql', label: 'MySQL', type: 'jdbc', extraParams: 'characterEncoding=UTF-8&connectTimeout=5000&useSSL=false&allowPublicKeyRetrieval=true'},
{ name: 'hive', label: 'Apache Hive', type: 'jdbc', extraParams: ''},
{ name: 'oracle', label: 'Oracle', type: 'jdbc'},
{ name: 'sqlServer', label: 'SQL Server', type: 'jdbc', extraParams: ''},
{ name: 'pg', label: 'PostgreSQL', type: 'jdbc', extraParams: '' },