fix: 抽取excel数据格式问题;

This commit is contained in:
taojinlong 2021-05-24 17:06:35 +08:00
parent f64122b7af
commit f9bb4b3142
10 changed files with 84 additions and 57 deletions

View File

@ -7,6 +7,8 @@ import io.dataease.base.mapper.ext.ExtDataSourceMapper;
import io.dataease.base.mapper.ext.query.GridExample;
import io.dataease.commons.exception.DEException;
import io.dataease.commons.utils.AuthUtils;
import io.dataease.commons.utils.CommonThreadPool;
import io.dataease.commons.utils.LogUtil;
import io.dataease.controller.request.DatasourceUnionRequest;
import io.dataease.controller.sys.base.BaseGridRequest;
import io.dataease.controller.sys.base.ConditionEntity;
@ -40,6 +42,8 @@ public class DatasourceService {
private DatasetTableMapper datasetTableMapper;
@Resource
private DataSetGroupService dataSetGroupService;
@Resource
private CommonThreadPool commonThreadPool;
public Datasource addDatasource(Datasource datasource) {
DatasourceExample example = new DatasourceExample();
@ -114,7 +118,6 @@ public class DatasourceService {
DataTableInfoDTO dataTableInfoDTO = new Gson().fromJson(datasetTable.getInfo(), DataTableInfoDTO.class);
if (StringUtils.equals(name, dataTableInfoDTO.getTable())) {
dbTableDTO.setEnableCheck(false);
List<DatasetGroup> parents = dataSetGroupService.getParents(datasetTable.getSceneId());
StringBuilder stringBuilder = new StringBuilder();
parents.forEach(ele -> stringBuilder.append(ele.getName()).append("/"));
@ -131,4 +134,26 @@ public class DatasourceService {
public Datasource get(String id) {
return datasourceMapper.selectByPrimaryKey(id);
}
public void initAllDataSourceConnectionPool(){
List<Datasource> datasources = datasourceMapper.selectByExampleWithBLOBs(new DatasourceExample());
datasources.forEach(datasource -> {
try {
commonThreadPool.addTask(() ->{
try {
System.out.println(new Gson().toJson(datasource));
DatasourceProvider datasourceProvider = ProviderFactory.getProvider(datasource.getType());
DatasourceRequest datasourceRequest = new DatasourceRequest();
datasourceRequest.setDatasource(datasource);
datasourceProvider.initDataSource(datasourceRequest);
LogUtil.error("Succsss to init datasource connection pool: " + datasource.getName());
}catch (Exception e){
LogUtil.error("Failed to init datasource connection pool: " + datasource.getName(), e);
}
});
}catch (Exception e){
e.printStackTrace();
}
});
}
}

View File

@ -1,47 +0,0 @@
//package io.dataease.listener;
//
//import io.dataease.base.mapper.DatasetTableMapper;
//import io.dataease.commons.utils.CommonThreadPool;
//import io.dataease.service.dataset.DataSetTableFieldsService;
//import org.springframework.boot.context.event.ApplicationReadyEvent;
//import org.springframework.context.ApplicationListener;
//import org.springframework.core.annotation.Order;
//import org.springframework.core.env.Environment;
//import org.springframework.stereotype.Component;
//
//import javax.annotation.Resource;
//
//@Component
//@Order(value = 2)
//public class AppStartReadHBaseListener implements ApplicationListener<ApplicationReadyEvent> {
// @Resource
// private CommonThreadPool commonThreadPool;
//// @Resource
//// private SparkCalc sparkCalc;
// @Resource
// private Environment env; // 保存了配置文件的信息
//
// @Resource
// private DatasetTableMapper datasetTableMapper;
// @Resource
// private DataSetTableFieldsService dataSetTableFieldsService;
//
// @Override
// public void onApplicationEvent(ApplicationReadyEvent applicationReadyEvent) {
//// System.out.println("================= Read HBase start =================");
//// // 项目启动从数据集中找到定时抽取的表从HBase中读取放入缓存
//// DatasetTableExample datasetTableExample = new DatasetTableExample();
//// datasetTableExample.createCriteria().andModeEqualTo(1);
//// List<DatasetTable> datasetTables = datasetTableMapper.selectByExampleWithBLOBs(datasetTableExample);
//// for (DatasetTable table : datasetTables) {
////// commonThreadPool.addTask(() -> {
//// try {
//// List<DatasetTableField> fields = dataSetTableFieldsService.getFieldsByTableId(table.getId());
//// sparkCalc.getHBaseDataAndCache(table.getId(), fields);
//// } catch (Exception e) {
//// e.printStackTrace();
//// }
////// });
//// }
// }
//}

View File

@ -0,0 +1,25 @@
package io.dataease.listener;
import io.dataease.base.domain.DatasetTableTask;
import io.dataease.datasource.service.DatasourceService;
import io.dataease.service.ScheduleService;
import io.dataease.service.dataset.DataSetTableTaskService;
import org.springframework.boot.context.event.ApplicationReadyEvent;
import org.springframework.context.ApplicationListener;
import org.springframework.core.annotation.Order;
import org.springframework.stereotype.Component;
import javax.annotation.Resource;
import java.util.List;
@Component
@Order(value = 1)
public class DataSourceInitStartListener implements ApplicationListener<ApplicationReadyEvent> {
@Resource
private DatasourceService datasourceService;
@Override
public void onApplicationEvent(ApplicationReadyEvent applicationReadyEvent) {
datasourceService.initAllDataSourceConnectionPool();
}
}

View File

@ -14,6 +14,7 @@ import io.dataease.dto.dataset.DataSetTableDTO;
import io.dataease.i18n.Translator;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.springframework.context.annotation.Lazy;
import org.springframework.stereotype.Service;
import javax.annotation.Resource;
@ -32,6 +33,7 @@ public class DataSetGroupService {
@Resource
private DatasetGroupMapper datasetGroupMapper;
@Resource
@Lazy
private DataSetTableService dataSetTableService;
@Resource
private ExtDataSetGroupMapper extDataSetGroupMapper;

View File

@ -35,6 +35,7 @@ import org.apache.poi.xssf.usermodel.XSSFSheet;
import org.apache.poi.xssf.usermodel.XSSFWorkbook;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.web.multipart.MultipartFile;
import javax.annotation.Resource;
@ -50,6 +51,7 @@ import java.util.stream.Collectors;
* @Date 2021/2/23 2:54 下午
*/
@Service
@Transactional(rollbackFor = Exception.class)
public class DataSetTableService {
@Resource
private DatasetTableMapper datasetTableMapper;

View File

@ -63,6 +63,7 @@ import org.pentaho.di.trans.steps.userdefinedjavaclass.UserDefinedJavaClassDef;
import org.pentaho.di.trans.steps.userdefinedjavaclass.UserDefinedJavaClassMeta;
import org.pentaho.di.www.SlaveServerJobStatus;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Lazy;
import org.springframework.stereotype.Service;
import javax.annotation.Resource;
@ -82,6 +83,7 @@ import java.util.stream.Collectors;
public class ExtractDataService {
@Resource
@Lazy
private DataSetTableService dataSetTableService;
@Resource
private DataSetTableFieldsService dataSetTableFieldsService;
@ -91,7 +93,6 @@ public class ExtractDataService {
private DataSetTableTaskService dataSetTableTaskService;
@Resource
private DatasourceMapper datasourceMapper;
private static ExecutorService pool = Executors.newScheduledThreadPool(50); //设置连接池
private static String lastUpdateTime = "${__last_update_time__}";
private static String currentUpdateTime = "${__current_update_time__}";
@ -113,7 +114,6 @@ public class ExtractDataService {
"UNIQUE KEY(dataease_uuid)\n" +
"DISTRIBUTED BY HASH(dataease_uuid) BUCKETS 10\n" +
"PROPERTIES(\"replication_num\" = \"1\");";
private static String shellScript = "curl --location-trusted -u %s:%s -H \"label:%s\" -H \"column_separator:%s\" -H \"columns:%s\" -H \"merge_type: %s\" -T %s -XPUT http://%s:%s/api/%s/%s/_stream_load\n" +
"rm -rf %s\n";
@ -448,6 +448,7 @@ public class ExtractDataService {
selectSQL = qp.createQuerySQL(tableName, datasetTableFields);
}
inputStep = inputStep(transMeta, selectSQL);
udjcStep = udjc(datasetTableFields, false);
break;
case sqlServer:
SqlServerConfigration sqlServerConfigration = new Gson().fromJson(datasource.getConfiguration(), SqlServerConfigration.class);
@ -459,10 +460,12 @@ public class ExtractDataService {
selectSQL = qp.createQuerySQL(tableName, datasetTableFields);
}
inputStep = inputStep(transMeta, selectSQL);
udjcStep = udjc(datasetTableFields, false);
break;
case excel:
String filePath = new Gson().fromJson(datasetTable.getInfo(), DataTableInfoDTO.class).getData();
inputStep = excelInputStep(filePath, datasetTableFields);
udjcStep = udjc(datasetTableFields, true);
default:
break;
}
@ -487,7 +490,7 @@ public class ExtractDataService {
break;
}
udjcStep = udjc(datasetTableFields);
outputStep = outputStep(dorisOutputTable);
hi1 = new TransHopMeta(inputStep, udjcStep);
hi2 = new TransHopMeta(udjcStep, outputStep);
@ -569,11 +572,11 @@ public class ExtractDataService {
return outputStep;
}
private StepMeta udjc(List<DatasetTableField> datasetTableFields) {
String needToChangeolumnType = "";
private StepMeta udjc(List<DatasetTableField> datasetTableFields, boolean isExcel) {
String needToChangeColumnType = "";
for (DatasetTableField datasetTableField : datasetTableFields) {
if (datasetTableField.getDeExtractType() != null && datasetTableField.getDeExtractType() == 4) {
needToChangeolumnType = needToChangeolumnType + alterColumnTypeCode.replace("FILED", datasetTableField.getOriginName());
needToChangeColumnType = needToChangeColumnType + alterColumnTypeCode.replace("FILED", datasetTableField.getOriginName());
}
}
@ -583,8 +586,13 @@ public class ExtractDataService {
fields.add(fieldInfo);
userDefinedJavaClassMeta.setFieldInfo(fields);
List<UserDefinedJavaClassDef> definitions = new ArrayList<UserDefinedJavaClassDef>();
UserDefinedJavaClassDef userDefinedJavaClassDef = new UserDefinedJavaClassDef(UserDefinedJavaClassDef.ClassType.TRANSFORM_CLASS, "Processor",
code.replace("alterColumnTypeCode", needToChangeolumnType).replace("Column_Fields", String.join(",", datasetTableFields.stream().map(DatasetTableField::getOriginName).collect(Collectors.toList()))));
String tmp_code = code.replace("alterColumnTypeCode", needToChangeColumnType).replace("Column_Fields", String.join(",", datasetTableFields.stream().map(DatasetTableField::getOriginName).collect(Collectors.toList())));
if(isExcel){
tmp_code = tmp_code.replace("handleExcelIntColumn", handleExcelIntColumn);
}else {
tmp_code = tmp_code.replace("handleExcelIntColumn", "");
}
UserDefinedJavaClassDef userDefinedJavaClassDef = new UserDefinedJavaClassDef(UserDefinedJavaClassDef.ClassType.TRANSFORM_CLASS, "Processor", tmp_code);
userDefinedJavaClassDef.setActive(true);
definitions.add(userDefinedJavaClassDef);
@ -629,6 +637,14 @@ public class ExtractDataService {
" }\n" +
" }\n";
private static String handleExcelIntColumn = " \t\tif(tmp != null && tmp.endsWith(\".0\")){\n" +
" try {\n" +
" Integer.valueOf(tmp.substring(0, tmp.length()-2));\n" +
" get(Fields.Out, filed).setValue(r, tmp.substring(0, tmp.length()-2));\n" +
" get(Fields.Out, filed).getValueMeta().setType(2);\n" +
" }catch (Exception e){}\n" +
" }";
private static String code = "import org.pentaho.di.core.row.ValueMetaInterface;\n" +
"import java.util.List;\n" +
"import java.io.File;\n" +
@ -659,6 +675,7 @@ public class ExtractDataService {
" for (String filed : fileds) {\n" +
" String tmp = get(Fields.In, filed).getString(r);\n" +
"alterColumnTypeCode \n" +
"handleExcelIntColumn \n" +
" str = str + tmp;\n" +
" }\n" +
"\n" +

View File

@ -160,7 +160,7 @@ export default {
handleFileChange(e) {
const file = e.target.files[0]
if (!file.type.includes('image')) {
toast('只能插入图片')
toast(this.$t('panel.picture_limit'))
return
}

View File

@ -815,6 +815,7 @@ export default {
input_limit_0_50: '0-50 chars'
},
panel: {
picture_limit: 'Only pictures can be inserted',
drag_here: 'Please drag the left field here',
copy_link_passwd: 'Copy link and password',
copy_link: 'Copy link',

View File

@ -814,6 +814,7 @@ export default {
input_limit_0_50: '0-50字符'
},
panel: {
picture_limit: '只能插入圖片',
drag_here: '請將左側字段拖至此處',
copy_link_passwd: '複製鏈接及密碼',
copy_link: '複製鏈接',

View File

@ -816,6 +816,7 @@ export default {
input_limit_0_50: '0-50字符'
},
panel: {
picture_limit: '只能插入图片',
drag_here: '请将左侧字段拖至此处',
copy_link_passwd: '复制链接及密码',
copy_link: '复制链接',