Merge remote-tracking branch 'origin/main' into main
@ -0,0 +1,99 @@
|
||||
package io.dataease.commons.utils;
|
||||
|
||||
import javax.annotation.PostConstruct;
|
||||
import javax.annotation.PreDestroy;
|
||||
import java.util.concurrent.*;
|
||||
|
||||
/**
|
||||
* @Author gin
|
||||
* @Date 2021/4/13 4:08 下午
|
||||
*/
|
||||
public class CommonThreadPool {
|
||||
|
||||
private int corePoolSize = 10;
|
||||
|
||||
private int maxQueueSize = 10;
|
||||
|
||||
private int keepAliveSeconds = 600;
|
||||
|
||||
private ScheduledThreadPoolExecutor scheduledThreadPoolExecutor;
|
||||
|
||||
@PostConstruct
|
||||
public void init() {
|
||||
scheduledThreadPoolExecutor = new ScheduledThreadPoolExecutor(corePoolSize);
|
||||
scheduledThreadPoolExecutor.setKeepAliveTime(keepAliveSeconds, TimeUnit.SECONDS);
|
||||
}
|
||||
|
||||
@PreDestroy
|
||||
public void shutdown() {
|
||||
if (scheduledThreadPoolExecutor != null) {
|
||||
scheduledThreadPoolExecutor.shutdown();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 线程池是否可用(实际队列数是否小于最大队列数)
|
||||
*
|
||||
* @return true为可用,false不可用
|
||||
*/
|
||||
public boolean available() {
|
||||
return scheduledThreadPoolExecutor.getQueue().size() <= maxQueueSize;
|
||||
}
|
||||
|
||||
/**
|
||||
* 添加任务,不强制限制队列数
|
||||
*
|
||||
* @param task 任务
|
||||
*/
|
||||
public void addTask(Runnable task) {
|
||||
scheduledThreadPoolExecutor.execute(task);
|
||||
}
|
||||
|
||||
/**
|
||||
* 添加延迟执行任务,不强制限制队列数
|
||||
*
|
||||
* @param task 任务
|
||||
* @param delay 延迟时间
|
||||
* @param unit 延迟时间单位
|
||||
*/
|
||||
public void scheduleTask(Runnable task, long delay, TimeUnit unit) {
|
||||
scheduledThreadPoolExecutor.schedule(task, delay, unit);
|
||||
}
|
||||
|
||||
/**
|
||||
* 添加任务和超时时间(超时时间内未执行完的任务将被终止并移除线程池,防止任务执行时间过长而占用线程池)
|
||||
*
|
||||
* @param task 任务
|
||||
* @param timeOut 超时时间
|
||||
* @param timeUnit 超时时间单位
|
||||
*/
|
||||
public void addTask(Runnable task, long timeOut, TimeUnit timeUnit) {
|
||||
scheduledThreadPoolExecutor.execute(() -> {
|
||||
ExecutorService executorService = Executors.newSingleThreadExecutor();
|
||||
try {
|
||||
Future future = executorService.submit(task);
|
||||
future.get(timeOut, timeUnit); // 此行会阻塞,直到任务执行完或超时
|
||||
} catch (TimeoutException timeoutException) {
|
||||
LogUtil.getLogger().error("timeout to execute task", timeoutException);
|
||||
} catch (Exception exception) {
|
||||
LogUtil.getLogger().error("failed to execute task", exception);
|
||||
} finally {
|
||||
if (!executorService.isShutdown()) {
|
||||
executorService.shutdown();
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
public void setCorePoolSize(int corePoolSize) {
|
||||
this.corePoolSize = corePoolSize;
|
||||
}
|
||||
|
||||
public void setMaxQueueSize(int maxQueueSize) {
|
||||
this.maxQueueSize = maxQueueSize;
|
||||
}
|
||||
|
||||
public void setKeepAliveSeconds(int keepAliveSeconds) {
|
||||
this.keepAliveSeconds = keepAliveSeconds;
|
||||
}
|
||||
}
|
@ -1,6 +1,7 @@
|
||||
package io.dataease.config;
|
||||
|
||||
import com.fit2cloud.autoconfigure.QuartzAutoConfiguration;
|
||||
import io.dataease.commons.utils.CommonThreadPool;
|
||||
import org.apache.spark.api.java.JavaSparkContext;
|
||||
import org.apache.spark.sql.SQLContext;
|
||||
import org.apache.spark.sql.SparkSession;
|
||||
@ -33,31 +34,20 @@ public class CommonConfig {
|
||||
return configuration;
|
||||
}
|
||||
|
||||
|
||||
@Bean
|
||||
@ConditionalOnMissingBean
|
||||
public JavaSparkContext javaSparkContext() {
|
||||
public SparkSession javaSparkSession() {
|
||||
SparkSession spark = SparkSession.builder()
|
||||
.appName(env.getProperty("spark.appName", "DataeaseJob"))
|
||||
.master(env.getProperty("spark.master", "local[*]"))
|
||||
.config("spark.scheduler.mode", "FAIR")
|
||||
.getOrCreate();
|
||||
JavaSparkContext sc = new JavaSparkContext(spark.sparkContext());
|
||||
return sc;
|
||||
return spark;
|
||||
}
|
||||
|
||||
@Bean
|
||||
@ConditionalOnMissingBean
|
||||
public SQLContext sqlContext(JavaSparkContext javaSparkContext) {
|
||||
SQLContext sqlContext = new SQLContext(javaSparkContext);
|
||||
sqlContext.setConf("spark.sql.shuffle.partitions", env.getProperty("spark.sql.shuffle.partitions", "1"));
|
||||
sqlContext.setConf("spark.default.parallelism", env.getProperty("spark.default.parallelism", "1"));
|
||||
return sqlContext;
|
||||
}
|
||||
|
||||
@Bean
|
||||
@ConditionalOnMissingBean
|
||||
public KettleFileRepository kettleFileRepository()throws Exception{
|
||||
public KettleFileRepository kettleFileRepository() throws Exception {
|
||||
KettleEnvironment.init();
|
||||
KettleFileRepository repository = new KettleFileRepository();
|
||||
KettleFileRepositoryMeta kettleDatabaseMeta = new KettleFileRepositoryMeta("KettleFileRepository", "repo",
|
||||
@ -65,4 +55,13 @@ public class CommonConfig {
|
||||
repository.init(kettleDatabaseMeta);
|
||||
return repository;
|
||||
}
|
||||
|
||||
@Bean(destroyMethod = "shutdown")
|
||||
public CommonThreadPool resourcePoolThreadPool() {
|
||||
CommonThreadPool commonThreadPool = new CommonThreadPool();
|
||||
commonThreadPool.setCorePoolSize(20);
|
||||
commonThreadPool.setMaxQueueSize(100);
|
||||
commonThreadPool.setKeepAliveSeconds(3600);
|
||||
return commonThreadPool;
|
||||
}
|
||||
}
|
||||
|
@ -6,12 +6,14 @@ import io.dataease.service.ScheduleService;
|
||||
import io.dataease.service.dataset.DataSetTableTaskService;
|
||||
import org.springframework.boot.context.event.ApplicationReadyEvent;
|
||||
import org.springframework.context.ApplicationListener;
|
||||
import org.springframework.core.annotation.Order;
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
import javax.annotation.Resource;
|
||||
import java.util.List;
|
||||
|
||||
@Component
|
||||
@Order(value = 1)
|
||||
public class AppStartListener implements ApplicationListener<ApplicationReadyEvent> {
|
||||
@Resource
|
||||
private ScheduleService scheduleService;
|
||||
|
@ -0,0 +1,58 @@
|
||||
package io.dataease.listener;
|
||||
|
||||
import io.dataease.base.domain.DatasetTable;
|
||||
import io.dataease.base.domain.DatasetTableExample;
|
||||
import io.dataease.base.domain.DatasetTableField;
|
||||
import io.dataease.base.domain.DatasetTableFieldExample;
|
||||
import io.dataease.base.mapper.DatasetTableFieldMapper;
|
||||
import io.dataease.base.mapper.DatasetTableMapper;
|
||||
import io.dataease.commons.utils.CommonBeanFactory;
|
||||
import io.dataease.commons.utils.CommonThreadPool;
|
||||
import io.dataease.service.dataset.DataSetTableFieldsService;
|
||||
import io.dataease.service.spark.SparkCalc;
|
||||
import org.apache.spark.api.java.JavaSparkContext;
|
||||
import org.apache.spark.sql.SQLContext;
|
||||
import org.apache.spark.sql.SparkSession;
|
||||
import org.springframework.boot.context.event.ApplicationReadyEvent;
|
||||
import org.springframework.context.ApplicationListener;
|
||||
import org.springframework.core.annotation.Order;
|
||||
import org.springframework.core.env.Environment;
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
import javax.annotation.Resource;
|
||||
import java.util.List;
|
||||
|
||||
@Component
|
||||
@Order(value = 2)
|
||||
public class AppStartReadHBaseListener implements ApplicationListener<ApplicationReadyEvent> {
|
||||
@Resource
|
||||
private CommonThreadPool commonThreadPool;
|
||||
@Resource
|
||||
private SparkCalc sparkCalc;
|
||||
@Resource
|
||||
private Environment env; // 保存了配置文件的信息
|
||||
|
||||
@Resource
|
||||
private DatasetTableMapper datasetTableMapper;
|
||||
@Resource
|
||||
private DataSetTableFieldsService dataSetTableFieldsService;
|
||||
|
||||
@Override
|
||||
public void onApplicationEvent(ApplicationReadyEvent applicationReadyEvent) {
|
||||
System.out.println("================= Read HBase start =================");
|
||||
// 项目启动,从数据集中找到定时抽取的表,从HBase中读取放入缓存
|
||||
DatasetTableExample datasetTableExample = new DatasetTableExample();
|
||||
datasetTableExample.createCriteria().andModeEqualTo(1);
|
||||
List<DatasetTable> datasetTables = datasetTableMapper.selectByExampleWithBLOBs(datasetTableExample);
|
||||
for (DatasetTable table : datasetTables) {
|
||||
commonThreadPool.addTask(() -> {
|
||||
try {
|
||||
List<DatasetTableField> fields = dataSetTableFieldsService.getFieldsByTableId(table.getId());
|
||||
sparkCalc.getHBaseDataAndCache(table.getId(), fields);
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
@ -4,6 +4,7 @@ import com.google.gson.Gson;
|
||||
import com.google.gson.reflect.TypeToken;
|
||||
import io.dataease.base.domain.*;
|
||||
import io.dataease.base.mapper.ChartViewMapper;
|
||||
import io.dataease.base.mapper.DatasetTableFieldMapper;
|
||||
import io.dataease.commons.utils.AuthUtils;
|
||||
import io.dataease.commons.utils.BeanUtils;
|
||||
import io.dataease.controller.request.chart.ChartViewRequest;
|
||||
@ -16,6 +17,7 @@ import io.dataease.dto.chart.ChartViewDTO;
|
||||
import io.dataease.dto.chart.ChartViewFieldDTO;
|
||||
import io.dataease.dto.chart.Series;
|
||||
import io.dataease.dto.dataset.DataTableInfoDTO;
|
||||
import io.dataease.service.dataset.DataSetTableFieldsService;
|
||||
import io.dataease.service.dataset.DataSetTableService;
|
||||
import io.dataease.service.spark.SparkCalc;
|
||||
import org.apache.commons.collections4.CollectionUtils;
|
||||
@ -41,6 +43,8 @@ public class ChartViewService {
|
||||
private DatasourceService datasourceService;
|
||||
@Resource
|
||||
private SparkCalc sparkCalc;
|
||||
@Resource
|
||||
private DataSetTableFieldsService dataSetTableFieldsService;
|
||||
|
||||
public ChartViewWithBLOBs save(ChartViewWithBLOBs chartView) {
|
||||
checkName(chartView);
|
||||
@ -121,9 +125,9 @@ public class ChartViewService {
|
||||
}
|
||||
data = datasourceProvider.getData(datasourceRequest);
|
||||
} else if (table.getMode() == 1) {// 抽取
|
||||
// DataTableInfoDTO dataTableInfoDTO = new Gson().fromJson(table.getInfo(), DataTableInfoDTO.class);
|
||||
// String tableName = dataTableInfoDTO.getTable() + "-" + table.getDataSourceId();// todo hBase table name maybe change
|
||||
data = sparkCalc.getData(table.getId(), xAxis, yAxis, "tmp_" + view.getId().split("-")[0]);
|
||||
// 获取数据集de字段
|
||||
List<DatasetTableField> fields = dataSetTableFieldsService.getFieldsByTableId(table.getId());
|
||||
data = sparkCalc.getData(table.getId(), fields, xAxis, yAxis, "tmp_" + view.getId().split("-")[0]);
|
||||
}
|
||||
|
||||
// 图表组件可再扩展
|
||||
|
@ -60,4 +60,10 @@ public class DataSetTableFieldsService {
|
||||
datasetTableFieldExample.createCriteria().andIdIn(ids);
|
||||
return datasetTableFieldMapper.selectByExample(datasetTableFieldExample);
|
||||
}
|
||||
|
||||
public List<DatasetTableField> getFieldsByTableId(String id) {
|
||||
DatasetTableFieldExample datasetTableFieldExample = new DatasetTableFieldExample();
|
||||
datasetTableFieldExample.createCriteria().andTableIdEqualTo(id);
|
||||
return datasetTableFieldMapper.selectByExample(datasetTableFieldExample);
|
||||
}
|
||||
}
|
||||
|
@ -113,7 +113,7 @@ public class DataSetTableService {
|
||||
List<DatasetTableField> quota = new ArrayList<>();
|
||||
|
||||
fields.forEach(field -> {
|
||||
if (field.getDeType() == 2) {
|
||||
if (field.getDeType() == 2 || field.getDeType() == 3) {
|
||||
quota.add(field);
|
||||
} else {
|
||||
dimension.add(field);
|
||||
@ -360,10 +360,11 @@ public class DataSetTableService {
|
||||
case "MEDIUMINT":
|
||||
case "INTEGER":
|
||||
case "BIGINT":
|
||||
return 2;// 整型
|
||||
case "FLOAT":
|
||||
case "DOUBLE":
|
||||
case "DECIMAL":
|
||||
return 2;// 数值
|
||||
return 3;// 浮点
|
||||
default:
|
||||
return 0;
|
||||
}
|
||||
|
@ -13,6 +13,7 @@ import io.dataease.datasource.constants.DatasourceTypes;
|
||||
import io.dataease.datasource.dto.MysqlConfigrationDTO;
|
||||
import io.dataease.dto.dataset.DataSetTaskLogDTO;
|
||||
import io.dataease.dto.dataset.DataTableInfoDTO;
|
||||
import io.dataease.service.spark.SparkCalc;
|
||||
import org.apache.commons.collections4.CollectionUtils;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
@ -97,7 +98,7 @@ public class ExtractDataService {
|
||||
private DataSetTableTaskService dataSetTableTaskService;
|
||||
@Resource
|
||||
private DatasourceMapper datasourceMapper;
|
||||
private static ExecutorService pool = Executors.newScheduledThreadPool(50); //设置连接池
|
||||
private static ExecutorService pool = Executors.newScheduledThreadPool(50); //设置连接池
|
||||
private Connection connection;
|
||||
|
||||
private static String lastUpdateTime = "${__last_update_time__}";
|
||||
@ -120,6 +121,9 @@ public class ExtractDataService {
|
||||
@Value("${hbase.zookeeper.property.clientPort:2181}")
|
||||
private String zkPort;
|
||||
|
||||
@Resource
|
||||
private SparkCalc sparkCalc;
|
||||
|
||||
|
||||
public void extractData(String datasetTableId, String taskId, String type) {
|
||||
DatasetTableTaskLog datasetTableTaskLog = new DatasetTableTaskLog();
|
||||
@ -131,60 +135,62 @@ public class ExtractDataService {
|
||||
List<DatasetTableField> datasetTableFields = dataSetTableFieldsService.list(DatasetTableField.builder().tableId(datasetTable.getId()).build());
|
||||
String table = new Gson().fromJson(datasetTable.getInfo(), DataTableInfoDTO.class).getTable();
|
||||
TableName hbaseTable = TableName.valueOf(datasetTableId);
|
||||
switch (updateType){
|
||||
switch (updateType) {
|
||||
// 全量更新
|
||||
case all_scope:
|
||||
writeDatasetTableTaskLog(datasetTableTaskLog, datasetTableId, taskId);
|
||||
|
||||
//check pentaho_mappings table
|
||||
TableName pentaho_mappings = TableName.valueOf(this.pentaho_mappings);
|
||||
if(!admin.tableExists(pentaho_mappings)){
|
||||
creatHaseTable(pentaho_mappings, admin, Arrays.asList("columns","key"));
|
||||
if (!admin.tableExists(pentaho_mappings)) {
|
||||
creatHaseTable(pentaho_mappings, admin, Arrays.asList("columns", "key"));
|
||||
}
|
||||
|
||||
//check pentaho files
|
||||
if(!isExitFile("job_" + datasetTableId + ".kjb") || !isExitFile("trans_" + datasetTableId + ".ktr")){
|
||||
if (!isExitFile("job_" + datasetTableId + ".kjb") || !isExitFile("trans_" + datasetTableId + ".ktr")) {
|
||||
generateTransFile("all_scope", datasetTable, datasource, table, datasetTableFields, null);
|
||||
generateJobFile("all_scope", datasetTable);
|
||||
}
|
||||
|
||||
if(!admin.tableExists(hbaseTable)){
|
||||
if (!admin.tableExists(hbaseTable)) {
|
||||
creatHaseTable(hbaseTable, admin, Arrays.asList(dataease_column_family));
|
||||
}
|
||||
admin.disableTable(hbaseTable);
|
||||
admin.truncateTable(hbaseTable, true);
|
||||
|
||||
extractData(datasetTable, "all_scope");
|
||||
// after sync complete,read data to cache from HBase
|
||||
sparkCalc.getHBaseDataAndCache(datasetTableId, dataSetTableFieldsService.getFieldsByTableId(datasetTableId));
|
||||
datasetTableTaskLog.setStatus(JobStatus.Completed.name());
|
||||
datasetTableTaskLog.setEndTime(System.currentTimeMillis());
|
||||
dataSetTableTaskLogService.save(datasetTableTaskLog);
|
||||
break;
|
||||
case add_scope:
|
||||
// 增量更新
|
||||
if(!admin.tableExists(hbaseTable)){
|
||||
if (!admin.tableExists(hbaseTable)) {
|
||||
LogUtil.error("TableName error, dataaset: " + datasetTableId);
|
||||
return;
|
||||
}
|
||||
DatasetTableIncrementalConfig datasetTableIncrementalConfig = dataSetTableService.incrementalConfig(datasetTableId);
|
||||
if(datasetTableIncrementalConfig == null || StringUtils.isEmpty(datasetTableIncrementalConfig.getTableId())){
|
||||
if (datasetTableIncrementalConfig == null || StringUtils.isEmpty(datasetTableIncrementalConfig.getTableId())) {
|
||||
return;
|
||||
}
|
||||
DatasetTableTaskLog request = new DatasetTableTaskLog();
|
||||
request.setTableId(datasetTableId);
|
||||
request.setStatus(JobStatus.Completed.name());
|
||||
List<DataSetTaskLogDTO> dataSetTaskLogDTOS = dataSetTableTaskLogService.list(request);
|
||||
if(CollectionUtils.isEmpty(dataSetTaskLogDTOS)){
|
||||
if (CollectionUtils.isEmpty(dataSetTaskLogDTOS)) {
|
||||
return;
|
||||
}
|
||||
writeDatasetTableTaskLog(datasetTableTaskLog,datasetTableId, taskId);
|
||||
writeDatasetTableTaskLog(datasetTableTaskLog, datasetTableId, taskId);
|
||||
|
||||
// 增量添加
|
||||
if(StringUtils.isNotEmpty(datasetTableIncrementalConfig.getIncrementalAdd().replace(" ", ""))){
|
||||
if (StringUtils.isNotEmpty(datasetTableIncrementalConfig.getIncrementalAdd().replace(" ", ""))) {
|
||||
System.out.println("datasetTableIncrementalConfig.getIncrementalAdd(): " + datasetTableIncrementalConfig.getIncrementalAdd());
|
||||
String sql = datasetTableIncrementalConfig.getIncrementalAdd().replace(lastUpdateTime, dataSetTaskLogDTOS.get(0).getStartTime().toString()
|
||||
.replace(currentUpdateTime, Long.valueOf(System.currentTimeMillis()).toString()));
|
||||
|
||||
if(!isExitFile("job_add_" + datasetTableId + ".kjb") || !isExitFile("trans_add_" + datasetTableId + ".ktr")){
|
||||
if (!isExitFile("job_add_" + datasetTableId + ".kjb") || !isExitFile("trans_add_" + datasetTableId + ".ktr")) {
|
||||
generateTransFile("incremental_add", datasetTable, datasource, table, datasetTableFields, sql);
|
||||
generateJobFile("incremental_add", datasetTable);
|
||||
}
|
||||
@ -193,39 +199,39 @@ public class ExtractDataService {
|
||||
}
|
||||
|
||||
// 增量删除
|
||||
if( StringUtils.isNotEmpty(datasetTableIncrementalConfig.getIncrementalDelete())){
|
||||
if (StringUtils.isNotEmpty(datasetTableIncrementalConfig.getIncrementalDelete())) {
|
||||
String sql = datasetTableIncrementalConfig.getIncrementalDelete().replace(lastUpdateTime, dataSetTaskLogDTOS.get(0).getStartTime().toString()
|
||||
.replace(currentUpdateTime, Long.valueOf(System.currentTimeMillis()).toString()));
|
||||
if(!isExitFile("job_delete_" + datasetTableId + ".kjb") || !isExitFile("trans_delete_" + datasetTableId + ".ktr")){
|
||||
if (!isExitFile("job_delete_" + datasetTableId + ".kjb") || !isExitFile("trans_delete_" + datasetTableId + ".ktr")) {
|
||||
generateTransFile("incremental_delete", datasetTable, datasource, table, datasetTableFields, sql);
|
||||
generateJobFile("incremental_delete", datasetTable);
|
||||
}
|
||||
extractData(datasetTable, "incremental_delete");
|
||||
}
|
||||
|
||||
// after sync complete,read data to cache from HBase
|
||||
sparkCalc.getHBaseDataAndCache(datasetTableId, dataSetTableFieldsService.getFieldsByTableId(datasetTableId));
|
||||
datasetTableTaskLog.setStatus(JobStatus.Completed.name());
|
||||
datasetTableTaskLog.setEndTime(System.currentTimeMillis());
|
||||
dataSetTableTaskLogService.save(datasetTableTaskLog);
|
||||
break;
|
||||
}
|
||||
}catch (Exception e){
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
LogUtil.error("ExtractData error, dataaset: " + datasetTableId);
|
||||
LogUtil.error(e.getMessage(), e);
|
||||
datasetTableTaskLog.setStatus(JobStatus.Error.name());
|
||||
datasetTableTaskLog.setEndTime(System.currentTimeMillis());
|
||||
dataSetTableTaskLogService.save(datasetTableTaskLog);
|
||||
}
|
||||
finally {
|
||||
} finally {
|
||||
DatasetTableTask datasetTableTask = dataSetTableTaskService.get(taskId);
|
||||
if (datasetTableTask != null && datasetTableTask.getRate().equalsIgnoreCase(ScheduleType.SIMPLE.toString())){
|
||||
if (datasetTableTask != null && datasetTableTask.getRate().equalsIgnoreCase(ScheduleType.SIMPLE.toString())) {
|
||||
datasetTableTask.setRate(ScheduleType.SIMPLE_COMPLETE.toString());
|
||||
dataSetTableTaskService.update(datasetTableTask);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void writeDatasetTableTaskLog(DatasetTableTaskLog datasetTableTaskLog, String datasetTableId, String taskId){
|
||||
private void writeDatasetTableTaskLog(DatasetTableTaskLog datasetTableTaskLog, String datasetTableId, String taskId) {
|
||||
datasetTableTaskLog.setTableId(datasetTableId);
|
||||
datasetTableTaskLog.setTaskId(taskId);
|
||||
datasetTableTaskLog.setStatus(JobStatus.Underway.name());
|
||||
@ -233,7 +239,7 @@ public class ExtractDataService {
|
||||
dataSetTableTaskLogService.save(datasetTableTaskLog);
|
||||
}
|
||||
|
||||
private void creatHaseTable(TableName tableName, Admin admin, List<String> columnFamily)throws Exception{
|
||||
private void creatHaseTable(TableName tableName, Admin admin, List<String> columnFamily) throws Exception {
|
||||
TableDescriptorBuilder descBuilder = TableDescriptorBuilder.newBuilder(tableName);
|
||||
Collection<ColumnFamilyDescriptor> families = new ArrayList<>();
|
||||
for (String s : columnFamily) {
|
||||
@ -245,11 +251,11 @@ public class ExtractDataService {
|
||||
admin.createTable(desc);
|
||||
}
|
||||
|
||||
private void extractData(DatasetTable datasetTable, String extractType)throws Exception{
|
||||
private void extractData(DatasetTable datasetTable, String extractType) throws Exception {
|
||||
KettleFileRepository repository = CommonBeanFactory.getBean(KettleFileRepository.class);
|
||||
RepositoryDirectoryInterface repositoryDirectoryInterface = repository.loadRepositoryDirectoryTree();
|
||||
JobMeta jobMeta = null;
|
||||
switch (extractType){
|
||||
switch (extractType) {
|
||||
case "all_scope":
|
||||
jobMeta = repository.loadJob("job_" + datasetTable.getId(), repositoryDirectoryInterface, null, null);
|
||||
break;
|
||||
@ -272,27 +278,27 @@ public class ExtractDataService {
|
||||
do {
|
||||
jobStatus = remoteSlaveServer.getJobStatus(jobMeta.getName(), lastCarteObjectId, 0);
|
||||
} while (jobStatus != null && jobStatus.isRunning());
|
||||
if(jobStatus.getStatusDescription().equals("Finished")){
|
||||
if (jobStatus.getStatusDescription().equals("Finished")) {
|
||||
return;
|
||||
}else {
|
||||
} else {
|
||||
throw new Exception(jobStatus.getLoggingString());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private synchronized Connection getConnection() throws Exception{
|
||||
if(connection == null || connection.isClosed()){
|
||||
private synchronized Connection getConnection() throws Exception {
|
||||
if (connection == null || connection.isClosed()) {
|
||||
Configuration cfg = CommonBeanFactory.getBean(Configuration.class);
|
||||
connection = ConnectionFactory.createConnection(cfg, pool);
|
||||
}
|
||||
return connection;
|
||||
}
|
||||
|
||||
private boolean isExitFile(String fileName){
|
||||
File file=new File(root_path + fileName);
|
||||
private boolean isExitFile(String fileName) {
|
||||
File file = new File(root_path + fileName);
|
||||
return file.exists();
|
||||
}
|
||||
|
||||
private SlaveServer getSlaveServer(){
|
||||
private SlaveServer getSlaveServer() {
|
||||
SlaveServer remoteSlaveServer = new SlaveServer();
|
||||
remoteSlaveServer.setHostname(carte);// 设置远程IP
|
||||
remoteSlaveServer.setPort(port);// 端口
|
||||
@ -301,14 +307,14 @@ public class ExtractDataService {
|
||||
return remoteSlaveServer;
|
||||
}
|
||||
|
||||
private void generateJobFile(String extractType, DatasetTable datasetTable) throws Exception{
|
||||
private void generateJobFile(String extractType, DatasetTable datasetTable) throws Exception {
|
||||
String jobName = null;
|
||||
switch (extractType) {
|
||||
case "all_scope":
|
||||
jobName = "job_" + datasetTable.getId();
|
||||
break;
|
||||
case "incremental_add":
|
||||
jobName = "job_add_" + datasetTable.getId();
|
||||
jobName = "job_add_" + datasetTable.getId();
|
||||
break;
|
||||
case "incremental_delete":
|
||||
jobName = "job_delete_" + datasetTable.getId();
|
||||
@ -323,7 +329,7 @@ public class ExtractDataService {
|
||||
transName = "trans_" + datasetTable.getId();
|
||||
break;
|
||||
case "incremental_add":
|
||||
transName = "trans_add_" + datasetTable.getId();
|
||||
transName = "trans_add_" + datasetTable.getId();
|
||||
break;
|
||||
case "incremental_delete":
|
||||
transName = "trans_delete_" + datasetTable.getId();
|
||||
@ -364,11 +370,11 @@ public class ExtractDataService {
|
||||
jobMeta.addJobHop(greenHop);
|
||||
|
||||
String jobXml = jobMeta.getXML();
|
||||
File file = new File( root_path + jobName + ".kjb");
|
||||
File file = new File(root_path + jobName + ".kjb");
|
||||
FileUtils.writeStringToFile(file, jobXml, "UTF-8");
|
||||
}
|
||||
|
||||
private void generateTransFile(String extractType, DatasetTable datasetTable, Datasource datasource, String table, List<DatasetTableField> datasetTableFields, String selectSQL) throws Exception{
|
||||
private void generateTransFile(String extractType, DatasetTable datasetTable, Datasource datasource, String table, List<DatasetTableField> datasetTableFields, String selectSQL) throws Exception {
|
||||
TransMeta transMeta = new TransMeta();
|
||||
String transName = null;
|
||||
switch (extractType) {
|
||||
@ -377,7 +383,7 @@ public class ExtractDataService {
|
||||
selectSQL = dataSetTableService.createQuerySQL(datasource.getType(), table, datasetTableFields.stream().map(DatasetTableField::getOriginName).toArray(String[]::new));
|
||||
break;
|
||||
case "incremental_add":
|
||||
transName = "trans_add_" + datasetTable.getId();
|
||||
transName = "trans_add_" + datasetTable.getId();
|
||||
break;
|
||||
case "incremental_delete":
|
||||
transName = "trans_delete_" + datasetTable.getId();
|
||||
@ -450,11 +456,11 @@ public class ExtractDataService {
|
||||
RuntimeTestActionHandler defaultHandler = null;
|
||||
|
||||
RuntimeTestActionService runtimeTestActionService = new RuntimeTestActionServiceImpl(runtimeTestActionHandlers, defaultHandler);
|
||||
RuntimeTester runtimeTester = new RuntimeTesterImpl(new ArrayList<>( Arrays.asList( mock( RuntimeTest.class ) ) ), mock( ExecutorService.class ), "modules");
|
||||
RuntimeTester runtimeTester = new RuntimeTesterImpl(new ArrayList<>(Arrays.asList(mock(RuntimeTest.class))), mock(ExecutorService.class), "modules");
|
||||
|
||||
Put put = new Put((datasetTable.getId() + "," + "target_mapping").getBytes());
|
||||
for (DatasetTableField datasetTableField : datasetTableFields) {
|
||||
put.addColumn("columns".getBytes(), (dataease_column_family + "," + datasetTableField.getOriginName() + "," + datasetTableField.getOriginName()).getBytes(), transToColumnType(datasetTableField.getDeType()).getBytes());
|
||||
put.addColumn("columns".getBytes(), (dataease_column_family + "," + datasetTableField.getOriginName() + "," + datasetTableField.getOriginName()).getBytes(), transToColumnType(datasetTableField.getDeType()).getBytes());
|
||||
}
|
||||
put.addColumn("key".getBytes(), "uuid".getBytes(), "String".getBytes());
|
||||
TableName pentaho_mappings = TableName.valueOf(this.pentaho_mappings);
|
||||
@ -466,7 +472,7 @@ public class ExtractDataService {
|
||||
hBaseOutputMeta.setTargetMappingName("target_mapping");
|
||||
hBaseOutputMeta.setNamedCluster(clusterTemplate);
|
||||
hBaseOutputMeta.setCoreConfigURL(hbase_conf_file);
|
||||
if(extractType.equalsIgnoreCase("incremental_delete")){
|
||||
if (extractType.equalsIgnoreCase("incremental_delete")) {
|
||||
hBaseOutputMeta.setDeleteRowKey(true);
|
||||
}
|
||||
StepMeta tostep = new StepMeta("HBaseOutput", "HBaseOutput", hBaseOutputMeta);
|
||||
|
@ -0,0 +1,53 @@
|
||||
package io.dataease.service.spark;
|
||||
|
||||
import org.apache.spark.sql.Dataset;
|
||||
import org.apache.spark.sql.Row;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* @Author gin
|
||||
* @Date 2021/4/13 12:32 下午
|
||||
*/
|
||||
public class CacheUtil {
|
||||
private static CacheUtil cacheUtil;
|
||||
private static Map<String, Dataset<Row>> cacheMap;
|
||||
|
||||
private CacheUtil(){
|
||||
cacheMap = new HashMap<String, Dataset<Row>>();
|
||||
}
|
||||
|
||||
public static CacheUtil getInstance(){
|
||||
if (cacheUtil == null){
|
||||
cacheUtil = new CacheUtil();
|
||||
}
|
||||
return cacheUtil;
|
||||
}
|
||||
|
||||
/**
|
||||
* 添加缓存
|
||||
* @param key
|
||||
* @param obj
|
||||
*/
|
||||
public void addCacheData(String key,Dataset<Row> obj){
|
||||
cacheMap.put(key,obj);
|
||||
}
|
||||
|
||||
/**
|
||||
* 取出缓存
|
||||
* @param key
|
||||
* @return
|
||||
*/
|
||||
public Dataset<Row> getCacheData(String key){
|
||||
return cacheMap.get(key);
|
||||
}
|
||||
|
||||
/**
|
||||
* 清楚缓存
|
||||
* @param key
|
||||
*/
|
||||
public void removeCacheData(String key){
|
||||
cacheMap.remove(key);
|
||||
}
|
||||
}
|
@ -1,8 +1,10 @@
|
||||
package io.dataease.service.spark;
|
||||
|
||||
import io.dataease.base.domain.DatasetTableField;
|
||||
import io.dataease.commons.utils.CommonBeanFactory;
|
||||
import io.dataease.dto.chart.ChartViewFieldDTO;
|
||||
import org.apache.commons.collections4.CollectionUtils;
|
||||
import org.apache.commons.lang3.ObjectUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.client.Result;
|
||||
@ -42,21 +44,56 @@ public class SparkCalc {
|
||||
@Resource
|
||||
private Environment env; // 保存了配置文件的信息
|
||||
|
||||
public List<String[]> getData(String hTable, List<ChartViewFieldDTO> xAxis, List<ChartViewFieldDTO> yAxis, String tmpTable) throws Exception {
|
||||
public List<String[]> getData(String hTable, List<DatasetTableField> fields, List<ChartViewFieldDTO> xAxis, List<ChartViewFieldDTO> yAxis, String tmpTable) throws Exception {
|
||||
// Spark Context
|
||||
SparkSession spark = CommonBeanFactory.getBean(SparkSession.class);
|
||||
JavaSparkContext sparkContext = new JavaSparkContext(spark.sparkContext());
|
||||
|
||||
// Spark SQL Context
|
||||
// SQLContext sqlContext = CommonBeanFactory.getBean(SQLContext.class);
|
||||
SQLContext sqlContext = new SQLContext(sparkContext);
|
||||
sqlContext.setConf("spark.sql.shuffle.partitions", env.getProperty("spark.sql.shuffle.partitions", "1"));
|
||||
sqlContext.setConf("spark.default.parallelism", env.getProperty("spark.default.parallelism", "1"));
|
||||
|
||||
Dataset<Row> dataFrame = CacheUtil.getInstance().getCacheData(hTable);
|
||||
if (ObjectUtils.isEmpty(dataFrame)) {
|
||||
dataFrame = getHBaseDataAndCache(sparkContext, sqlContext, hTable, fields);
|
||||
}
|
||||
|
||||
dataFrame.createOrReplaceTempView(tmpTable);
|
||||
Dataset<Row> sql = sqlContext.sql(getSQL(xAxis, yAxis, tmpTable));
|
||||
// transform
|
||||
List<String[]> data = new ArrayList<>();
|
||||
List<Row> list = sql.collectAsList();
|
||||
for (Row row : list) {
|
||||
String[] r = new String[row.length()];
|
||||
for (int i = 0; i < row.length(); i++) {
|
||||
r[i] = row.get(i) == null ? "null" : row.get(i).toString();
|
||||
}
|
||||
data.add(r);
|
||||
}
|
||||
return data;
|
||||
}
|
||||
|
||||
public Dataset<Row> getHBaseDataAndCache(String hTable, List<DatasetTableField> fields) throws Exception {
|
||||
// Spark Context
|
||||
SparkSession spark = CommonBeanFactory.getBean(SparkSession.class);
|
||||
JavaSparkContext sparkContext = new JavaSparkContext(spark.sparkContext());
|
||||
|
||||
// Spark SQL Context
|
||||
// SQLContext sqlContext = CommonBeanFactory.getBean(SQLContext.class);
|
||||
SQLContext sqlContext = new SQLContext(sparkContext);
|
||||
sqlContext.setConf("spark.sql.shuffle.partitions", env.getProperty("spark.sql.shuffle.partitions", "1"));
|
||||
sqlContext.setConf("spark.default.parallelism", env.getProperty("spark.default.parallelism", "1"));
|
||||
return getHBaseDataAndCache(sparkContext, sqlContext, hTable, fields);
|
||||
}
|
||||
|
||||
public Dataset<Row> getHBaseDataAndCache(JavaSparkContext sparkContext, SQLContext sqlContext, String hTable, List<DatasetTableField> fields) throws Exception {
|
||||
Scan scan = new Scan();
|
||||
scan.addFamily(column_family.getBytes());
|
||||
ClientProtos.Scan proto = ProtobufUtil.toScan(scan);
|
||||
String scanToString = new String(Base64.getEncoder().encode(proto.toByteArray()));
|
||||
|
||||
// Spark Context
|
||||
// JavaSparkContext sparkContext = CommonBeanFactory.getBean(JavaSparkContext.class);
|
||||
SparkSession spark = SparkSession.builder()
|
||||
.appName(env.getProperty("spark.appName", "DataeaseJob"))
|
||||
.master(env.getProperty("spark.master", "local[*]"))
|
||||
.config("spark.scheduler.mode", "FAIR")
|
||||
.getOrCreate();
|
||||
JavaSparkContext sparkContext = new JavaSparkContext(spark.sparkContext());
|
||||
|
||||
// HBase config
|
||||
// Configuration conf = CommonBeanFactory.getBean(Configuration.class);
|
||||
org.apache.hadoop.conf.Configuration conf = new org.apache.hadoop.conf.Configuration();
|
||||
@ -73,26 +110,20 @@ public class SparkCalc {
|
||||
while (tuple2Iterator.hasNext()) {
|
||||
Result result = tuple2Iterator.next()._2;
|
||||
List<Object> list = new ArrayList<>();
|
||||
xAxis.forEach(x -> {
|
||||
fields.forEach(x -> {
|
||||
String l = Bytes.toString(result.getValue(column_family.getBytes(), x.getOriginName().getBytes()));
|
||||
if (x.getDeType() == 0 || x.getDeType() == 1) {
|
||||
list.add(Bytes.toString(result.getValue(column_family.getBytes(), x.getOriginName().getBytes())));
|
||||
list.add(l);
|
||||
} else if (x.getDeType() == 2) {
|
||||
String l = Bytes.toString(result.getValue(column_family.getBytes(), x.getOriginName().getBytes()));
|
||||
if (StringUtils.isEmpty(l)) {
|
||||
l = "0";
|
||||
}
|
||||
list.add(l.contains(".") ? Double.parseDouble(l) : Long.parseLong(l));
|
||||
}
|
||||
});
|
||||
yAxis.forEach(y -> {
|
||||
if (y.getDeType() == 0 || y.getDeType() == 1) {
|
||||
list.add(Bytes.toString(result.getValue(column_family.getBytes(), y.getOriginName().getBytes())));
|
||||
} else if (y.getDeType() == 2) {
|
||||
String l = Bytes.toString(result.getValue(column_family.getBytes(), y.getOriginName().getBytes()));
|
||||
list.add(Long.valueOf(l));
|
||||
} else if (x.getDeType() == 3) {
|
||||
if (StringUtils.isEmpty(l)) {
|
||||
l = "0";
|
||||
l = "0.0";
|
||||
}
|
||||
list.add(l.contains(".") ? Double.parseDouble(l) : Long.parseLong(l));
|
||||
list.add(Double.valueOf(l));
|
||||
}
|
||||
});
|
||||
iterator.add(RowFactory.create(list.toArray()));
|
||||
@ -102,45 +133,24 @@ public class SparkCalc {
|
||||
|
||||
List<StructField> structFields = new ArrayList<>();
|
||||
// struct顺序要与rdd顺序一致
|
||||
xAxis.forEach(x -> {
|
||||
fields.forEach(x -> {
|
||||
if (x.getDeType() == 0 || x.getDeType() == 1) {
|
||||
structFields.add(DataTypes.createStructField(x.getOriginName(), DataTypes.StringType, true));
|
||||
} else if (x.getDeType() == 2) {
|
||||
structFields.add(DataTypes.createStructField(x.getOriginName(), DataTypes.LongType, true));
|
||||
}
|
||||
});
|
||||
yAxis.forEach(y -> {
|
||||
if (y.getDeType() == 0 || y.getDeType() == 1) {
|
||||
structFields.add(DataTypes.createStructField(y.getOriginName(), DataTypes.StringType, true));
|
||||
} else if (y.getDeType() == 2) {
|
||||
structFields.add(DataTypes.createStructField(y.getOriginName(), DataTypes.LongType, true));
|
||||
} else if (x.getDeType() == 3) {
|
||||
structFields.add(DataTypes.createStructField(x.getOriginName(), DataTypes.DoubleType, true));
|
||||
}
|
||||
});
|
||||
StructType structType = DataTypes.createStructType(structFields);
|
||||
|
||||
// Spark SQL Context
|
||||
// SQLContext sqlContext = CommonBeanFactory.getBean(SQLContext.class);
|
||||
SQLContext sqlContext = new SQLContext(sparkContext);
|
||||
sqlContext.setConf("spark.sql.shuffle.partitions", env.getProperty("spark.sql.shuffle.partitions", "1"));
|
||||
sqlContext.setConf("spark.default.parallelism", env.getProperty("spark.default.parallelism", "1"));
|
||||
|
||||
Dataset<Row> dataFrame = sqlContext.createDataFrame(rdd, structType);
|
||||
dataFrame.createOrReplaceTempView(tmpTable);
|
||||
Dataset<Row> sql = sqlContext.sql(getSQL(xAxis, yAxis, tmpTable));
|
||||
// transform
|
||||
List<String[]> data = new ArrayList<>();
|
||||
List<Row> list = sql.collectAsList();
|
||||
for (Row row : list) {
|
||||
String[] r = new String[row.length()];
|
||||
for (int i = 0; i < row.length(); i++) {
|
||||
r[i] = row.get(i).toString();
|
||||
}
|
||||
data.add(r);
|
||||
}
|
||||
return data;
|
||||
Dataset<Row> dataFrame = sqlContext.createDataFrame(rdd, structType).persist();
|
||||
CacheUtil.getInstance().addCacheData(hTable, dataFrame);
|
||||
dataFrame.count();
|
||||
return dataFrame;
|
||||
}
|
||||
|
||||
private String getSQL(List<ChartViewFieldDTO> xAxis, List<ChartViewFieldDTO> yAxis, String table) {
|
||||
public String getSQL(List<ChartViewFieldDTO> xAxis, List<ChartViewFieldDTO> yAxis, String table) {
|
||||
// 字段汇总 排序等
|
||||
String[] field = yAxis.stream().map(y -> "CAST(" + y.getSummary() + "(" + y.getOriginName() + ") AS DECIMAL(20,2)) AS _" + y.getSummary() + "_" + y.getOriginName()).toArray(String[]::new);
|
||||
String[] group = xAxis.stream().map(ChartViewFieldDTO::getOriginName).toArray(String[]::new);
|
||||
|
@ -22,7 +22,7 @@ CREATE TABLE IF NOT EXISTS `dataset_table_field`
|
||||
`origin_name` varchar(255) NOT NULL COMMENT '原始名',
|
||||
`name` varchar(255) NOT NULL COMMENT '字段名',
|
||||
`type` varchar(50) NOT NULL COMMENT '原始字段类型',
|
||||
`de_type` int(10) NOT NULL COMMENT 'dataease字段类型:0-文本,1-时间,2-数值...',
|
||||
`de_type` int(10) NOT NULL COMMENT 'dataease字段类型:0-文本,1-时间,2-整型数值,3-浮点数值...',
|
||||
`checked` tinyint(1) NOT NULL DEFAULT true COMMENT '是否选中',
|
||||
`column_index` int(10) NOT NULL COMMENT '列位置',
|
||||
`last_sync_time` bigint(13) COMMENT '同步时间',
|
||||
|
@ -1 +0,0 @@
|
||||
<?xml version="1.0" standalone="no"?><!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"><svg t="1617776907542" class="icon" viewBox="0 0 1024 1024" version="1.1" xmlns="http://www.w3.org/2000/svg" p-id="9028" xmlns:xlink="http://www.w3.org/1999/xlink" width="200" height="200"><defs><style type="text/css"></style></defs><path d="M925.0304 82.3808c-25.4464-16.6912-61.0816-31.488-105.9328-43.9296-89.1904-24.7808-207.36-38.4-332.6976-38.4s-243.5072 13.6704-332.6976 38.4c-44.9024 12.4416-80.5376 27.2384-105.9328 43.9296C16.0768 103.2192 0 127.1296 0 153.6v614.4c0 26.4704 16.0768 50.432 47.7696 71.2192 25.4464 16.6912 61.0816 31.488 105.9328 43.9296 89.1904 24.7808 207.36 38.4 332.6976 38.4s243.5072-13.6704 332.6976-38.4512c44.9024-12.4416 80.5376-27.2384 105.9328-43.9296 31.6928-20.7872 47.7696-44.7488 47.7696-71.2192v-614.4c0-26.4704-16.0768-50.432-47.7696-71.2192zM167.424 87.7568C252.3136 64.2048 365.568 51.2 486.4 51.2s234.1376 13.0048 318.976 36.5568C897.28 113.3056 921.6 141.9776 921.6 153.6s-24.2688 40.2944-116.224 65.8432C720.4864 242.9952 607.232 256 486.4 256s-234.1376-13.0048-318.976-36.5568C75.52 193.8944 51.2 165.2224 51.2 153.6s24.2688-40.2944 116.224-65.8432z m637.952 746.0864c-84.8896 23.552-198.144 36.5568-318.976 36.5568s-234.1376-13.0048-318.976-36.5568C75.52 808.2944 51.2 779.6224 51.2 768v-131.3792c25.1904 15.8208 59.5968 29.8496 102.5024 41.7792 89.1904 24.7808 207.36 38.4 332.6976 38.4s243.5072-13.6704 332.6976-38.4512c42.9056-11.9296 77.3632-25.9584 102.5024-41.7792v131.3792c0 11.6224-24.2688 40.2944-116.224 65.8432z m0-204.8c-84.8896 23.552-198.144 36.5568-318.976 36.5568s-234.1376-13.0048-318.976-36.5568C75.52 603.4944 51.2 574.8224 51.2 563.2V431.8208c25.1904 15.8208 59.5968 29.8496 102.5024 41.7792 89.1904 24.7808 207.36 38.4 332.6976 38.4s243.5072-13.6704 332.6976-38.4c42.9056-11.9296 77.3632-25.9584 102.5024-41.7792V563.2c0 11.6224-24.2688 40.2944-116.224 65.8432z m0-204.8C720.4864 447.7952 607.232 460.8 486.4 460.8s-234.1376-13.0048-318.976-36.5568C75.52 398.6944 51.2 370.0224 51.2 358.4V227.0208c25.1904 15.8208 59.5968 29.8496 102.5024 41.7792C242.8928 293.5808 361.0624 307.2 486.4 307.2s243.5072-13.6704 332.6976-38.4c42.9056-11.9296 77.3632-25.9584 102.5024-41.7792V358.4c0 11.6224-24.2688 40.2944-116.224 65.8432z" p-id="9029"></path></svg>
|
Before Width: | Height: | Size: 2.3 KiB |
Before Width: | Height: | Size: 1.6 KiB After Width: | Height: | Size: 1.6 KiB |
1
frontend/src/icons/svg/ds-db.svg
Normal file
@ -0,0 +1 @@
|
||||
<?xml version="1.0" standalone="no"?><!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"><svg t="1618222670482" class="icon" viewBox="0 0 1024 1024" version="1.1" xmlns="http://www.w3.org/2000/svg" p-id="3856" xmlns:xlink="http://www.w3.org/1999/xlink" width="200" height="200"><defs><style type="text/css"></style></defs><path d="M512 384c-229.8 0-416-57.3-416-128v256c0 70.7 186.2 128 416 128s416-57.3 416-128V256c0 70.7-186.2 128-416 128z" p-id="3857"></path><path d="M512 704c-229.8 0-416-57.3-416-128v256c0 70.7 186.2 128 416 128s416-57.3 416-128V576c0 70.7-186.2 128-416 128zM512 320c229.8 0 416-57.3 416-128S741.8 64 512 64 96 121.3 96 192s186.2 128 416 128z" p-id="3858"></path></svg>
|
After Width: | Height: | Size: 739 B |
Before Width: | Height: | Size: 1.7 KiB After Width: | Height: | Size: 1.7 KiB |
Before Width: | Height: | Size: 3.3 KiB After Width: | Height: | Size: 3.3 KiB |
1
frontend/src/icons/svg/field_text.svg
Normal file
@ -0,0 +1 @@
|
||||
<?xml version="1.0" standalone="no"?><!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"><svg t="1618217035616" class="icon" viewBox="0 0 1024 1024" version="1.1" xmlns="http://www.w3.org/2000/svg" p-id="6354" xmlns:xlink="http://www.w3.org/1999/xlink" width="200" height="200"><defs><style type="text/css"></style></defs><path d="M554.666667 256v640h-85.333334V256H213.333333V170.666667h597.333334v85.333333z" p-id="6355"></path></svg>
|
After Width: | Height: | Size: 483 B |
1
frontend/src/icons/svg/field_time.svg
Normal file
@ -0,0 +1 @@
|
||||
<?xml version="1.0" standalone="no"?><!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"><svg t="1618217742324" class="icon" viewBox="0 0 1024 1024" version="1.1" xmlns="http://www.w3.org/2000/svg" p-id="7205" xmlns:xlink="http://www.w3.org/1999/xlink" width="200" height="200"><defs><style type="text/css"></style></defs><path d="M512 64C264.96 64 64 264.96 64 512s200.96 448 448 448 448-200.96 448-448S759.04 64 512 64z m0 831.712c-211.584 0-383.712-172.16-383.712-383.712 0-211.584 172.128-383.712 383.712-383.712 211.552 0 383.712 172.128 383.712 383.712 0 211.552-172.16 383.712-383.712 383.712z" p-id="7206"></path><path d="M671.968 512H512V288.064c0-17.76-14.24-32.128-32-32.128s-32 14.4-32 32.128V544c0 17.76 14.272 32 32 32h191.968c17.76 0 32.128-14.24 32.128-32s-14.368-32-32.128-32z" p-id="7207"></path></svg>
|
After Width: | Height: | Size: 867 B |
1
frontend/src/icons/svg/field_value.svg
Normal file
@ -0,0 +1 @@
|
||||
<?xml version="1.0" standalone="no"?><!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"><svg t="1618218878362" class="icon" viewBox="0 0 1024 1024" version="1.1" xmlns="http://www.w3.org/2000/svg" p-id="9183" xmlns:xlink="http://www.w3.org/1999/xlink" width="200" height="200"><defs><style type="text/css"></style></defs><path d="M159.926857 689.426286h130.724572l-39.003429 193.718857c-0.859429 4.278857-1.28 9.435429-1.28 13.714286 0 20.992 14.555429 32.566857 34.706286 32.566857 20.571429 0 35.565714-11.154286 39.862857-32.146286l41.984-207.853714h202.715428L530.651429 883.145143c-1.28 4.278857-1.700571 9.435429-1.700572 13.714286 0 20.992 14.573714 32.566857 35.145143 32.566857s35.565714-11.154286 39.862857-32.146286L645.485714 689.426286h152.996572c23.570286 0 39.862857-17.133714 39.862857-40.283429 0-18.852571-12.854857-34.285714-32.146286-34.285714h-145.298286L706.377143 388.571429h149.997714c23.588571 0 39.862857-17.152 39.862857-40.283429 0-18.852571-12.854857-34.285714-32.146285-34.285714h-142.72l35.145142-172.726857c0.420571-2.56 1.28-8.137143 1.28-13.714286 0-20.992-14.994286-32.987429-35.565714-32.987429-23.990857 0-34.706286 13.275429-39.003428 33.426286l-37.705143 186.002286H442.788571l35.145143-172.726857c0.420571-2.56 1.28-8.137143 1.28-13.714286 0-20.992-15.433143-32.987429-35.565714-32.987429-24.429714 0-35.584 13.275429-39.442286 33.426286l-37.705143 186.002286h-140.580571c-23.570286 0-39.862857 17.993143-39.862857 41.563428 0 19.291429 12.873143 33.005714 32.164571 33.005715h133.284572l-45.44 226.285714h-138.422857c-23.588571 0-39.862857 17.993143-39.862858 41.563428 0 19.291429 12.854857 33.005714 32.146286 33.005715z m221.988572-74.569143l45.878857-226.285714H630.491429l-45.842286 226.285714z" p-id="9184"></path></svg>
|
After Width: | Height: | Size: 1.8 KiB |
1
frontend/src/icons/svg/scene.svg
Normal file
@ -0,0 +1 @@
|
||||
<?xml version="1.0" standalone="no"?><!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"><svg t="1618223542845" class="icon" viewBox="0 0 1024 1024" version="1.1" xmlns="http://www.w3.org/2000/svg" p-id="6672" xmlns:xlink="http://www.w3.org/1999/xlink" width="200" height="200"><defs><style type="text/css"></style></defs><path d="M592 336H176c-52.928 0-96 43.072-96 96v416c0 52.928 43.072 96 96 96h416c52.928 0 96-43.072 96-96V432c0-52.928-43.072-96-96-96z m32 512a32 32 0 0 1-32 32H176c-17.632 0-32-14.336-32-32V432c0-17.632 14.368-32 32-32h416c17.664 0 32 14.368 32 32v416z" p-id="6673"></path><path d="M720 208H304a32 32 0 0 0 0 64h416c17.664 0 32 14.368 32 32v416a32 32 0 1 0 64 0V304c0-52.928-43.072-96-96-96zM528 752H240a32 32 0 1 0 0 64h288a32 32 0 1 0 0-64z" p-id="6674"></path><path d="M848 80H432a32 32 0 0 0 0 64h416c17.664 0 32 14.368 32 32v416a32 32 0 1 0 64 0V176c0-52.928-43.072-96-96-96z" p-id="6675"></path></svg>
|
After Width: | Height: | Size: 978 B |
@ -171,3 +171,49 @@ div:focus {
|
||||
width: 100% !important;
|
||||
}
|
||||
}
|
||||
|
||||
%field-icon{
|
||||
font-size: 13px;
|
||||
margin: 0 3px 0 0;
|
||||
}
|
||||
.field-icon-text{
|
||||
@extend %field-icon;
|
||||
color: #688fd8;
|
||||
}
|
||||
.field-icon-time{
|
||||
@extend %field-icon;
|
||||
color: #faaa39;
|
||||
}
|
||||
.field-icon-value{
|
||||
@extend %field-icon;
|
||||
color: #37b4aa;
|
||||
}
|
||||
.ds-icon-scene{
|
||||
width: 14px;
|
||||
height: 14px;
|
||||
color: #faaa39;
|
||||
}
|
||||
.ds-icon-db{
|
||||
width: 14px;
|
||||
height: 14px;
|
||||
color: #3685f2;
|
||||
margin: 0 2px 0 0;
|
||||
}
|
||||
.ds-icon-sql{
|
||||
width: 14px;
|
||||
height: 14px;
|
||||
color: #faaa39;
|
||||
margin: 0 2px 0 0;
|
||||
}
|
||||
.ds-icon-excel{
|
||||
width: 14px;
|
||||
height: 14px;
|
||||
color: #13cd66;
|
||||
margin: 0 2px 0 0;
|
||||
}
|
||||
.ds-icon-custom{
|
||||
width: 14px;
|
||||
height: 14px;
|
||||
color: #23beef;
|
||||
margin: 0 2px 0 0;
|
||||
}
|
||||
|
@ -44,11 +44,12 @@
|
||||
<span slot-scope="{ node, data }" class="custom-tree-node">
|
||||
<span>
|
||||
<span v-if="data.type === 'scene'">
|
||||
<el-button
|
||||
icon="el-icon-folder"
|
||||
type="text"
|
||||
size="mini"
|
||||
/>
|
||||
<!-- <el-button-->
|
||||
<!-- icon="el-icon-folder-opened"-->
|
||||
<!-- type="text"-->
|
||||
<!-- size="mini"-->
|
||||
<!-- />-->
|
||||
<svg-icon icon-class="scene" class="ds-icon-scene" />
|
||||
</span>
|
||||
<span style="margin-left: 6px">{{ data.name }}</span>
|
||||
</span>
|
||||
@ -117,8 +118,8 @@
|
||||
<span class="title-text">
|
||||
{{ currGroup.name }}
|
||||
</span>
|
||||
<el-button icon="el-icon-back" size="mini" style="float: right" @click="back">
|
||||
{{ $t('chart.back') }}
|
||||
<el-button icon="el-icon-back" size="mini" style="float: right" circle @click="back">
|
||||
<!-- {{ $t('chart.back') }}-->
|
||||
</el-button>
|
||||
</el-row>
|
||||
<el-divider />
|
||||
|
@ -28,7 +28,12 @@
|
||||
@start="start1"
|
||||
>
|
||||
<transition-group>
|
||||
<span v-for="item in dimension" :key="item.id" class="item">{{ item.name }}</span>
|
||||
<span v-for="item in dimension" :key="item.id" class="item">
|
||||
<svg-icon v-if="item.deType === 0" icon-class="field_text" class="field-icon-text" />
|
||||
<svg-icon v-if="item.deType === 1" icon-class="field_time" class="field-icon-time" />
|
||||
<svg-icon v-if="item.deType === 2 || item.deType === 3" icon-class="field_value" class="field-icon-value" />
|
||||
{{ item.name }}
|
||||
</span>
|
||||
</transition-group>
|
||||
</draggable>
|
||||
</div>
|
||||
@ -44,7 +49,12 @@
|
||||
@start="start1"
|
||||
>
|
||||
<transition-group>
|
||||
<span v-for="item in quota" :key="item.id" class="item">{{ item.name }}</span>
|
||||
<span v-for="item in quota" :key="item.id" class="item">
|
||||
<svg-icon v-if="item.deType === 0" icon-class="field_text" class="field-icon-text" />
|
||||
<svg-icon v-if="item.deType === 1" icon-class="field_time" class="field-icon-time" />
|
||||
<svg-icon v-if="item.deType === 2 || item.deType === 3" icon-class="field_value" class="field-icon-value" />
|
||||
{{ item.name }}
|
||||
</span>
|
||||
</transition-group>
|
||||
</draggable>
|
||||
</div>
|
||||
|
@ -35,11 +35,12 @@
|
||||
<span slot-scope="{ node, data }" class="custom-tree-node">
|
||||
<span>
|
||||
<span v-if="data.type === 'scene'">
|
||||
<el-button
|
||||
icon="el-icon-folder"
|
||||
type="text"
|
||||
size="mini"
|
||||
/>
|
||||
<!-- <el-button-->
|
||||
<!-- icon="el-icon-folder-opened"-->
|
||||
<!-- type="text"-->
|
||||
<!-- size="mini"-->
|
||||
<!-- />-->
|
||||
<svg-icon icon-class="scene" class="ds-icon-scene" />
|
||||
</span>
|
||||
<span style="margin-left: 6px">{{ data.name }}</span>
|
||||
</span>
|
||||
@ -55,8 +56,8 @@
|
||||
<span class="title-text">
|
||||
{{ currGroup.name }}
|
||||
</span>
|
||||
<el-button icon="el-icon-back" size="mini" style="float: right" @click="back">
|
||||
{{ $t('dataset.back') }}
|
||||
<el-button icon="el-icon-back" size="mini" style="float: right" circle @click="back">
|
||||
<!-- {{ $t('dataset.back') }}-->
|
||||
</el-button>
|
||||
</el-row>
|
||||
<el-divider />
|
||||
@ -83,11 +84,14 @@
|
||||
<span slot-scope="{ node, data }" class="custom-tree-node-list">
|
||||
<span>
|
||||
<span>
|
||||
<svg-icon :icon-class="data.type" class="icon" />
|
||||
<svg-icon v-if="data.type === 'db'" icon-class="ds-db" class="ds-icon-db" />
|
||||
<svg-icon v-if="data.type === 'sql'" icon-class="ds-sql" class="ds-icon-sql" />
|
||||
<svg-icon v-if="data.type === 'excel'" icon-class="ds-excel" class="ds-icon-excel" />
|
||||
<svg-icon v-if="data.type === 'custom'" icon-class="ds-custom" class="ds-icon-custom" />
|
||||
</span>
|
||||
<span>
|
||||
<span v-if="data.mode === 0" style="margin-left: 6px"><i class="el-icon-s-operation" /></span>
|
||||
<span v-if="data.mode === 1" style="margin-left: 6px"><i class="el-icon-time" /></span>
|
||||
<span v-if="data.mode === 1" style="margin-left: 6px"><i class="el-icon-alarm-clock" /></span>
|
||||
</span>
|
||||
<span style="margin-left: 6px">{{ data.name }}</span>
|
||||
</span>
|
||||
|
@ -24,9 +24,15 @@
|
||||
:key="field.originName"
|
||||
min-width="200px"
|
||||
:field="field.originName"
|
||||
:title="field.name"
|
||||
:resizable="true"
|
||||
/>
|
||||
>
|
||||
<template slot="header">
|
||||
<svg-icon v-if="field.deType === 0" icon-class="field_text" class="field-icon-text" />
|
||||
<svg-icon v-if="field.deType === 1" icon-class="field_time" class="field-icon-time" />
|
||||
<svg-icon v-if="field.deType === 2 || field.deType === 3" icon-class="field_value" class="field-icon-value" />
|
||||
<span>{{ field.name }}</span>
|
||||
</template>
|
||||
</ux-table-column>
|
||||
</ux-grid>
|
||||
</el-col>
|
||||
</template>
|
||||
|
@ -27,7 +27,7 @@
|
||||
<el-tab-pane :label="$t('dataset.join_view')" name="joinView">
|
||||
关联视图 TODO
|
||||
</el-tab-pane>
|
||||
<el-tab-pane :label="$t('dataset.update_info')" name="updateInfo">
|
||||
<el-tab-pane v-if="table.mode === 1" :label="$t('dataset.update_info')" name="updateInfo">
|
||||
<update-info :table="table" />
|
||||
</el-tab-pane>
|
||||
</el-tabs>
|
||||
@ -36,9 +36,18 @@
|
||||
<el-table :data="tableFields" size="mini" max-height="600px">
|
||||
<el-table-column property="type" :label="$t('dataset.field_type')" width="100">
|
||||
<template slot-scope="scope">
|
||||
<span v-if="scope.row.deType === 0">{{ $t('dataset.text') }}</span>
|
||||
<span v-if="scope.row.deType === 1">{{ $t('dataset.time') }}</span>
|
||||
<span v-if="scope.row.deType === 2">{{ $t('dataset.value') }}</span>
|
||||
<span v-if="scope.row.deType === 0">
|
||||
<svg-icon v-if="scope.row.deType === 0" icon-class="field_text" class="field-icon-text" />
|
||||
{{ $t('dataset.text') }}
|
||||
</span>
|
||||
<span v-if="scope.row.deType === 1">
|
||||
<svg-icon v-if="scope.row.deType === 1" icon-class="field_time" class="field-icon-time" />
|
||||
{{ $t('dataset.time') }}
|
||||
</span>
|
||||
<span v-if="scope.row.deType === 2 || scope.row.deType === 3">
|
||||
<svg-icon v-if="scope.row.deType === 2 || scope.row.deType === 3" icon-class="field_value" class="field-icon-value" />
|
||||
{{ $t('dataset.value') }}
|
||||
</span>
|
||||
</template>
|
||||
</el-table-column>
|
||||
<el-table-column property="name" :label="$t('dataset.field_name')" width="180">
|
||||
@ -105,6 +114,7 @@ export default {
|
||||
methods: {
|
||||
initTable(id) {
|
||||
console.log(id)
|
||||
this.tabActive = 'dataPreview'
|
||||
this.tableViewRowForm.row = 1000
|
||||
if (id !== null) {
|
||||
this.fields = []
|
||||
|
@ -44,11 +44,12 @@
|
||||
<span slot-scope="{ node, data }" class="custom-tree-node">
|
||||
<span>
|
||||
<span v-if="data.type === 'scene'">
|
||||
<el-button
|
||||
icon="el-icon-folder"
|
||||
type="text"
|
||||
size="mini"
|
||||
/>
|
||||
<!-- <el-button-->
|
||||
<!-- icon="el-icon-folder-opened"-->
|
||||
<!-- type="text"-->
|
||||
<!-- size="mini"-->
|
||||
<!-- />-->
|
||||
<svg-icon icon-class="scene" class="ds-icon-scene" />
|
||||
</span>
|
||||
<span style="margin-left: 6px">{{ data.name }}</span>
|
||||
</span>
|
||||
@ -120,8 +121,8 @@
|
||||
<span class="title-text">
|
||||
{{ currGroup.name }}
|
||||
</span>
|
||||
<el-button icon="el-icon-back" size="mini" style="float: right" @click="back">
|
||||
{{ $t('dataset.back') }}
|
||||
<el-button icon="el-icon-back" size="mini" style="float: right" circle @click="back">
|
||||
<!-- {{ $t('dataset.back') }}-->
|
||||
</el-button>
|
||||
</el-row>
|
||||
<el-divider />
|
||||
@ -132,19 +133,19 @@
|
||||
</el-button>
|
||||
<el-dropdown-menu slot="dropdown">
|
||||
<el-dropdown-item :command="beforeClickAddData('db')">
|
||||
<svg-icon icon-class="db" class="icon" />
|
||||
<svg-icon icon-class="ds-db" class="ds-icon-db" />
|
||||
{{ $t('dataset.db_data') }}
|
||||
</el-dropdown-item>
|
||||
<el-dropdown-item :command="beforeClickAddData('sql')">
|
||||
<svg-icon icon-class="sql" class="icon" />
|
||||
<svg-icon icon-class="ds-sql" class="ds-icon-sql" />
|
||||
{{ $t('dataset.sql_data') }}
|
||||
</el-dropdown-item>
|
||||
<el-dropdown-item :command="beforeClickAddData('excel')">
|
||||
<svg-icon icon-class="excel" class="icon" />
|
||||
<svg-icon icon-class="ds-excel" class="ds-icon-excel" />
|
||||
{{ $t('dataset.excel_data') }}
|
||||
</el-dropdown-item>
|
||||
<el-dropdown-item :command="beforeClickAddData('custom')">
|
||||
<svg-icon icon-class="custom" class="icon" />
|
||||
<svg-icon icon-class="ds-custom" class="ds-icon-custom" />
|
||||
{{ $t('dataset.custom_data') }}
|
||||
</el-dropdown-item>
|
||||
</el-dropdown-menu>
|
||||
@ -180,11 +181,14 @@
|
||||
<span slot-scope="{ node, data }" class="custom-tree-node-list">
|
||||
<span>
|
||||
<span>
|
||||
<svg-icon :icon-class="data.type" class="icon" />
|
||||
<svg-icon v-if="data.type === 'db'" icon-class="ds-db" class="ds-icon-db" />
|
||||
<svg-icon v-if="data.type === 'sql'" icon-class="ds-sql" class="ds-icon-sql" />
|
||||
<svg-icon v-if="data.type === 'excel'" icon-class="ds-excel" class="ds-icon-excel" />
|
||||
<svg-icon v-if="data.type === 'custom'" icon-class="ds-custom" class="ds-icon-custom" />
|
||||
</span>
|
||||
<span>
|
||||
<span v-if="data.mode === 0" style="margin-left: 6px"><i class="el-icon-s-operation" /></span>
|
||||
<span v-if="data.mode === 1" style="margin-left: 6px"><i class="el-icon-time" /></span>
|
||||
<span v-if="data.mode === 1" style="margin-left: 6px"><i class="el-icon-alarm-clock" /></span>
|
||||
</span>
|
||||
<span style="margin-left: 6px">{{ data.name }}</span>
|
||||
</span>
|
||||
@ -582,11 +586,6 @@ export default {
|
||||
</script>
|
||||
|
||||
<style scoped>
|
||||
.icon{
|
||||
width: 14px;
|
||||
height: 14px;
|
||||
}
|
||||
|
||||
.el-divider--horizontal {
|
||||
margin: 12px 0
|
||||
}
|
||||
|