From ec4dcf93a9df1e783a8df28dd1318be1368c946f Mon Sep 17 00:00:00 2001 From: taojinlong Date: Wed, 16 Jun 2021 18:02:56 +0800 Subject: [PATCH 1/2] =?UTF-8?q?feat:=20=E6=B8=85=E7=90=86=E4=B8=B4?= =?UTF-8?q?=E6=97=B6=E6=96=87=E4=BB=B6?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- Dockerfile | 2 +- .../io/dataease/base/domain/DatasetTable.java | 2 + .../base/domain/DatasetTableExample.java | 60 +++++++++++++ .../base/mapper/DatasetTableMapper.xml | 29 ++++-- .../service/dataset/DataSetTableService.java | 5 ++ .../service/dataset/ExtractDataService.java | 89 +++++++++++-------- .../db/migration/V7__alter_dataset_table.sql | 1 + 7 files changed, 143 insertions(+), 45 deletions(-) create mode 100644 backend/src/main/resources/db/migration/V7__alter_dataset_table.sql diff --git a/Dockerfile b/Dockerfile index da05358bd4..592ef71ff5 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM registry.fit2cloud.com/fit2cloud3/fabric8-java-alpine-openjdk8-jre +FROM registry.cn-qingdao.aliyuncs.com/fit2cloud3/fabric8-java-alpine-openjdk8-jre ARG IMAGE_TAG diff --git a/backend/src/main/java/io/dataease/base/domain/DatasetTable.java b/backend/src/main/java/io/dataease/base/domain/DatasetTable.java index e88c0febff..c45f77a26e 100644 --- a/backend/src/main/java/io/dataease/base/domain/DatasetTable.java +++ b/backend/src/main/java/io/dataease/base/domain/DatasetTable.java @@ -25,6 +25,8 @@ public class DatasetTable implements Serializable { private String syncStatus; + private Long lastUpdateTime; + private String info; private static final long serialVersionUID = 1L; diff --git a/backend/src/main/java/io/dataease/base/domain/DatasetTableExample.java b/backend/src/main/java/io/dataease/base/domain/DatasetTableExample.java index b0dad0931d..539348e6eb 100644 --- a/backend/src/main/java/io/dataease/base/domain/DatasetTableExample.java +++ b/backend/src/main/java/io/dataease/base/domain/DatasetTableExample.java @@ -783,6 +783,66 @@ public class DatasetTableExample { addCriterion("sync_status not between", value1, value2, "syncStatus"); return (Criteria) this; } + + public Criteria andLastUpdateTimeIsNull() { + addCriterion("last_update_time is null"); + return (Criteria) this; + } + + public Criteria andLastUpdateTimeIsNotNull() { + addCriterion("last_update_time is not null"); + return (Criteria) this; + } + + public Criteria andLastUpdateTimeEqualTo(Long value) { + addCriterion("last_update_time =", value, "lastUpdateTime"); + return (Criteria) this; + } + + public Criteria andLastUpdateTimeNotEqualTo(Long value) { + addCriterion("last_update_time <>", value, "lastUpdateTime"); + return (Criteria) this; + } + + public Criteria andLastUpdateTimeGreaterThan(Long value) { + addCriterion("last_update_time >", value, "lastUpdateTime"); + return (Criteria) this; + } + + public Criteria andLastUpdateTimeGreaterThanOrEqualTo(Long value) { + addCriterion("last_update_time >=", value, "lastUpdateTime"); + return (Criteria) this; + } + + public Criteria andLastUpdateTimeLessThan(Long value) { + addCriterion("last_update_time <", value, "lastUpdateTime"); + return (Criteria) this; + } + + public Criteria andLastUpdateTimeLessThanOrEqualTo(Long value) { + addCriterion("last_update_time <=", value, "lastUpdateTime"); + return (Criteria) this; + } + + public Criteria andLastUpdateTimeIn(List values) { + addCriterion("last_update_time in", values, "lastUpdateTime"); + return (Criteria) this; + } + + public Criteria andLastUpdateTimeNotIn(List values) { + addCriterion("last_update_time not in", values, "lastUpdateTime"); + return (Criteria) this; + } + + public Criteria andLastUpdateTimeBetween(Long value1, Long value2) { + addCriterion("last_update_time between", value1, value2, "lastUpdateTime"); + return (Criteria) this; + } + + public Criteria andLastUpdateTimeNotBetween(Long value1, Long value2) { + addCriterion("last_update_time not between", value1, value2, "lastUpdateTime"); + return (Criteria) this; + } } public static class Criteria extends GeneratedCriteria { diff --git a/backend/src/main/java/io/dataease/base/mapper/DatasetTableMapper.xml b/backend/src/main/java/io/dataease/base/mapper/DatasetTableMapper.xml index 35c7c22c34..4c72b88ecc 100644 --- a/backend/src/main/java/io/dataease/base/mapper/DatasetTableMapper.xml +++ b/backend/src/main/java/io/dataease/base/mapper/DatasetTableMapper.xml @@ -12,6 +12,7 @@ + @@ -76,7 +77,7 @@ id, `name`, scene_id, data_source_id, `type`, `mode`, create_by, create_time, qrtz_instance, - sync_status + sync_status, last_update_time info @@ -133,11 +134,13 @@ insert into dataset_table (id, `name`, scene_id, data_source_id, `type`, `mode`, create_by, create_time, qrtz_instance, - sync_status, info) + sync_status, last_update_time, info + ) values (#{id,jdbcType=VARCHAR}, #{name,jdbcType=VARCHAR}, #{sceneId,jdbcType=VARCHAR}, #{dataSourceId,jdbcType=VARCHAR}, #{type,jdbcType=VARCHAR}, #{mode,jdbcType=INTEGER}, #{createBy,jdbcType=VARCHAR}, #{createTime,jdbcType=BIGINT}, #{qrtzInstance,jdbcType=VARCHAR}, - #{syncStatus,jdbcType=VARCHAR}, #{info,jdbcType=LONGVARCHAR}) + #{syncStatus,jdbcType=VARCHAR}, #{lastUpdateTime,jdbcType=BIGINT}, #{info,jdbcType=LONGVARCHAR} + ) insert into dataset_table @@ -172,6 +175,9 @@ sync_status, + + last_update_time, + info, @@ -207,6 +213,9 @@ #{syncStatus,jdbcType=VARCHAR}, + + #{lastUpdateTime,jdbcType=BIGINT}, + #{info,jdbcType=LONGVARCHAR}, @@ -251,6 +260,9 @@ sync_status = #{record.syncStatus,jdbcType=VARCHAR}, + + last_update_time = #{record.lastUpdateTime,jdbcType=BIGINT}, + info = #{record.info,jdbcType=LONGVARCHAR}, @@ -271,6 +283,7 @@ create_time = #{record.createTime,jdbcType=BIGINT}, qrtz_instance = #{record.qrtzInstance,jdbcType=VARCHAR}, sync_status = #{record.syncStatus,jdbcType=VARCHAR}, + last_update_time = #{record.lastUpdateTime,jdbcType=BIGINT}, info = #{record.info,jdbcType=LONGVARCHAR} @@ -287,7 +300,8 @@ create_by = #{record.createBy,jdbcType=VARCHAR}, create_time = #{record.createTime,jdbcType=BIGINT}, qrtz_instance = #{record.qrtzInstance,jdbcType=VARCHAR}, - sync_status = #{record.syncStatus,jdbcType=VARCHAR} + sync_status = #{record.syncStatus,jdbcType=VARCHAR}, + last_update_time = #{record.lastUpdateTime,jdbcType=BIGINT} @@ -322,6 +336,9 @@ sync_status = #{syncStatus,jdbcType=VARCHAR}, + + last_update_time = #{lastUpdateTime,jdbcType=BIGINT}, + info = #{info,jdbcType=LONGVARCHAR}, @@ -339,6 +356,7 @@ create_time = #{createTime,jdbcType=BIGINT}, qrtz_instance = #{qrtzInstance,jdbcType=VARCHAR}, sync_status = #{syncStatus,jdbcType=VARCHAR}, + last_update_time = #{lastUpdateTime,jdbcType=BIGINT}, info = #{info,jdbcType=LONGVARCHAR} where id = #{id,jdbcType=VARCHAR} @@ -352,7 +370,8 @@ create_by = #{createBy,jdbcType=VARCHAR}, create_time = #{createTime,jdbcType=BIGINT}, qrtz_instance = #{qrtzInstance,jdbcType=VARCHAR}, - sync_status = #{syncStatus,jdbcType=VARCHAR} + sync_status = #{syncStatus,jdbcType=VARCHAR}, + last_update_time = #{lastUpdateTime,jdbcType=BIGINT} where id = #{id,jdbcType=VARCHAR} \ No newline at end of file diff --git a/backend/src/main/java/io/dataease/service/dataset/DataSetTableService.java b/backend/src/main/java/io/dataease/service/dataset/DataSetTableService.java index 4014d37c96..fe696ff646 100644 --- a/backend/src/main/java/io/dataease/service/dataset/DataSetTableService.java +++ b/backend/src/main/java/io/dataease/service/dataset/DataSetTableService.java @@ -1055,6 +1055,11 @@ public class DataSetTableService { DatasetTableTaskLogExample datasetTableTaskLogExample = new DatasetTableTaskLogExample(); datasetTableTaskLogExample.createCriteria().andStatusEqualTo(JobStatus.Underway.name()).andTableIdIn(jobStoppeddDatasetTables.stream().map(DatasetTable::getId).collect(Collectors.toList())); datasetTableTaskLogMapper.updateByExampleSelective(datasetTableTaskLog, datasetTableTaskLogExample); + for (DatasetTable jobStoppeddDatasetTable : jobStoppeddDatasetTables) { + extractDataService.deleteFile("all_scope", jobStoppeddDatasetTable.getId()); + extractDataService.deleteFile("incremental_add", jobStoppeddDatasetTable.getId()); + extractDataService.deleteFile("incremental_delete", jobStoppeddDatasetTable.getId()); + } } } diff --git a/backend/src/main/java/io/dataease/service/dataset/ExtractDataService.java b/backend/src/main/java/io/dataease/service/dataset/ExtractDataService.java index 9a1a083cfd..c93236d857 100644 --- a/backend/src/main/java/io/dataease/service/dataset/ExtractDataService.java +++ b/backend/src/main/java/io/dataease/service/dataset/ExtractDataService.java @@ -196,14 +196,15 @@ public class ExtractDataService { }else { generateJobFile("all_scope", datasetTable, String.join(",", datasetTableFields.stream().map(DatasetTableField::getDataeaseName).collect(Collectors.toList()))); } + Long execTime = System.currentTimeMillis(); extractData(datasetTable, "all_scope"); replaceTable(DorisTableUtils.dorisName(datasetTableId)); saveSucessLog(datasetTableTaskLog); deleteFile("all_scope", datasetTableId); - updateTableStatus(datasetTableId, datasetTable, JobStatus.Completed); + updateTableStatus(datasetTableId, datasetTable, JobStatus.Completed, execTime); }catch (Exception e){ saveErrorLog(datasetTableId, taskId, e); - updateTableStatus(datasetTableId, datasetTable, JobStatus.Error); + updateTableStatus(datasetTableId, datasetTable, JobStatus.Error, null); dropDorisTable(DorisTableUtils.dorisTmpName(DorisTableUtils.dorisName(datasetTableId))); deleteFile("all_scope", datasetTableId); }finally { @@ -220,21 +221,19 @@ public class ExtractDataService { datasetTableTaskLog = writeDatasetTableTaskLog(datasetTableTaskLog, datasetTableId, null); generateTransFile("incremental_add", datasetTable, datasource, datasetTableFields, null); generateJobFile("incremental_add", datasetTable, String.join(",", datasetTableFields.stream().map(DatasetTableField::getDataeaseName).collect(Collectors.toList()))); + Long execTime = System.currentTimeMillis(); extractData(datasetTable, "incremental_add"); saveSucessLog(datasetTableTaskLog); - updateTableStatus(datasetTableId, datasetTable, JobStatus.Completed); + updateTableStatus(datasetTableId, datasetTable, JobStatus.Completed, execTime); }else { DatasetTableIncrementalConfig datasetTableIncrementalConfig = dataSetTableService.incrementalConfig(datasetTableId); if (datasetTableIncrementalConfig == null || StringUtils.isEmpty(datasetTableIncrementalConfig.getTableId())) { - updateTableStatus(datasetTableId, datasetTable, JobStatus.Completed); + updateTableStatus(datasetTableId, datasetTable, JobStatus.Completed, null); return; } - DatasetTableTaskLog request = new DatasetTableTaskLog(); - request.setTableId(datasetTableId); - request.setStatus(JobStatus.Completed.name()); - List datasetTableTaskLogs = dataSetTableTaskLogService.select(request); - if (CollectionUtils.isEmpty(datasetTableTaskLogs)) { - updateTableStatus(datasetTableId, datasetTable, JobStatus.Completed); + + if (datasetTable.getLastUpdateTime() == 0 || datasetTable.getLastUpdateTime() == null) { + updateTableStatus(datasetTableId, datasetTable, JobStatus.Completed, null); return; } @@ -245,8 +244,9 @@ public class ExtractDataService { datasetTableTaskLog = getDatasetTableTaskLog(datasetTableTaskLog, datasetTableId, taskId); } + Long execTime = System.currentTimeMillis(); if (StringUtils.isNotEmpty(datasetTableIncrementalConfig.getIncrementalAdd()) && StringUtils.isNotEmpty(datasetTableIncrementalConfig.getIncrementalAdd().replace(" ", ""))) {// 增量添加 - String sql = datasetTableIncrementalConfig.getIncrementalAdd().replace(lastUpdateTime, datasetTableTaskLogs.get(0).getStartTime().toString()) + String sql = datasetTableIncrementalConfig.getIncrementalAdd().replace(lastUpdateTime, datasetTable.getLastUpdateTime().toString()) .replace(currentUpdateTime, Long.valueOf(System.currentTimeMillis()).toString()); generateTransFile("incremental_add", datasetTable, datasource, datasetTableFields, sql); generateJobFile("incremental_add", datasetTable, fetchSqlField(sql, datasource)); @@ -254,7 +254,7 @@ public class ExtractDataService { } if (StringUtils.isNotEmpty(datasetTableIncrementalConfig.getIncrementalDelete()) && StringUtils.isNotEmpty(datasetTableIncrementalConfig.getIncrementalDelete().replace(" ", ""))) {// 增量删除 - String sql = datasetTableIncrementalConfig.getIncrementalDelete().replace(lastUpdateTime, datasetTableTaskLogs.get(0).getStartTime().toString()) + String sql = datasetTableIncrementalConfig.getIncrementalDelete().replace(lastUpdateTime, datasetTable.getLastUpdateTime().toString()) .replace(currentUpdateTime, Long.valueOf(System.currentTimeMillis()).toString()); generateTransFile("incremental_delete", datasetTable, datasource, datasetTableFields, sql); generateJobFile("incremental_delete", datasetTable, fetchSqlField(sql, datasource)); @@ -263,11 +263,11 @@ public class ExtractDataService { saveSucessLog(datasetTableTaskLog); deleteFile("incremental_add", datasetTableId); deleteFile("incremental_delete", datasetTableId); - updateTableStatus(datasetTableId, datasetTable, JobStatus.Completed); + updateTableStatus(datasetTableId, datasetTable, JobStatus.Completed, execTime); } }catch (Exception e){ saveErrorLog(datasetTableId, taskId, e); - updateTableStatus(datasetTableId, datasetTable, JobStatus.Error); + updateTableStatus(datasetTableId, datasetTable, JobStatus.Error, null); deleteFile("incremental_add", datasetTableId); deleteFile("incremental_delete", datasetTableId); }finally { @@ -280,8 +280,11 @@ public class ExtractDataService { } } - private void updateTableStatus(String datasetTableId, DatasetTable datasetTable, JobStatus completed) { + private void updateTableStatus(String datasetTableId, DatasetTable datasetTable, JobStatus completed, Long execTime) { datasetTable.setSyncStatus(completed.name()); + if(execTime != null){ + datasetTable.setLastUpdateTime(execTime); + } DatasetTableExample example = new DatasetTableExample(); example.createCriteria().andIdEqualTo(datasetTableId); datasetTableMapper.updateByExampleSelective(datasetTable, example); @@ -429,13 +432,13 @@ public class ExtractDataService { JobMeta jobMeta = null; switch (extractType) { case "all_scope": - jobMeta = repository.loadJob("job_" + datasetTable.getId(), repositoryDirectoryInterface, null, null); + jobMeta = repository.loadJob("job_" + DorisTableUtils.dorisName(datasetTable.getId()), repositoryDirectoryInterface, null, null); break; case "incremental_add": - jobMeta = repository.loadJob("job_add_" + datasetTable.getId(), repositoryDirectoryInterface, null, null); + jobMeta = repository.loadJob("job_add_" + DorisTableUtils.dorisName(datasetTable.getId()), repositoryDirectoryInterface, null, null); break; case "incremental_delete": - jobMeta = repository.loadJob("job_delete_" + datasetTable.getId(), repositoryDirectoryInterface, null, null); + jobMeta = repository.loadJob("job_delete_" + DorisTableUtils.dorisName(datasetTable.getId()), repositoryDirectoryInterface, null, null); break; default: break; @@ -477,7 +480,7 @@ public class ExtractDataService { } private void generateJobFile(String extractType, DatasetTable datasetTable, String columnFeilds) throws Exception { - String dorisOutputTable = null; + String outFile = null; String jobName = null; String script = null; Datasource dorisDatasource = (Datasource) CommonBeanFactory.getBean("DorisDatasource"); @@ -486,22 +489,22 @@ public class ExtractDataService { String transName = null; switch (extractType) { case "all_scope": - transName = "trans_" + datasetTable.getId(); - dorisOutputTable = DorisTableUtils.dorisTmpName(DorisTableUtils.dorisName(datasetTable.getId())); + transName = "trans_" + DorisTableUtils.dorisName(datasetTable.getId()); + outFile = DorisTableUtils.dorisTmpName(DorisTableUtils.dorisName(datasetTable.getId())); jobName = "job_" + datasetTable.getId(); - script = String.format(shellScript, dorisConfigration.getUsername(), dorisConfigration.getPassword(), String.valueOf(System.currentTimeMillis()), separator, columns, "APPEND", root_path + dorisOutputTable + "." + extention, dorisConfigration.getHost(), dorisConfigration.getHttpPort(), dorisConfigration.getDataBase(), dorisOutputTable, root_path + dorisOutputTable + "." + extention); + script = String.format(shellScript, dorisConfigration.getUsername(), dorisConfigration.getPassword(), String.valueOf(System.currentTimeMillis()), separator, columns, "APPEND", root_path + outFile + "." + extention, dorisConfigration.getHost(), dorisConfigration.getHttpPort(), dorisConfigration.getDataBase(), DorisTableUtils.dorisName(datasetTable.getId()), root_path + outFile + "." + extention); break; case "incremental_add": - transName = "trans_add_" + datasetTable.getId(); - dorisOutputTable = DorisTableUtils.dorisName(datasetTable.getId()); - jobName = "job_add_" + datasetTable.getId(); - script = String.format(shellScript, dorisConfigration.getUsername(), dorisConfigration.getPassword(), String.valueOf(System.currentTimeMillis()), separator, columns, "APPEND", root_path + dorisOutputTable + "." + extention, dorisConfigration.getHost(), dorisConfigration.getHttpPort(), dorisConfigration.getDataBase(), dorisOutputTable, root_path + dorisOutputTable + "." + extention); + transName = "trans_add_" + DorisTableUtils.dorisName(datasetTable.getId()); + outFile = DorisTableUtils.dorisName(datasetTable.getId()); + jobName = "job_add_" + DorisTableUtils.dorisName(datasetTable.getId()); + script = String.format(shellScript, dorisConfigration.getUsername(), dorisConfigration.getPassword(), String.valueOf(System.currentTimeMillis()), separator, columns, "APPEND", root_path + outFile + "." + extention, dorisConfigration.getHost(), dorisConfigration.getHttpPort(), dorisConfigration.getDataBase(), DorisTableUtils.dorisName(datasetTable.getId()), root_path + outFile + "." + extention); break; case "incremental_delete": - transName = "trans_delete_" + datasetTable.getId(); - dorisOutputTable = DorisTableUtils.dorisDeleteName(DorisTableUtils.dorisName(datasetTable.getId())); - script = String.format(shellScript, dorisConfigration.getUsername(), dorisConfigration.getPassword(), String.valueOf(System.currentTimeMillis()), separator, columns, "DELETE", root_path + dorisOutputTable + "." + extention, dorisConfigration.getHost(), dorisConfigration.getHttpPort(), dorisConfigration.getDataBase(), DorisTableUtils.dorisName(datasetTable.getId()), root_path + dorisOutputTable + "." + extention); - jobName = "job_delete_" + datasetTable.getId(); + transName = "trans_delete_" + DorisTableUtils.dorisName(datasetTable.getId()); + outFile = DorisTableUtils.dorisDeleteName(DorisTableUtils.dorisName(datasetTable.getId())); + script = String.format(shellScript, dorisConfigration.getUsername(), dorisConfigration.getPassword(), String.valueOf(System.currentTimeMillis()), separator, columns, "DELETE", root_path + outFile + "." + extention, dorisConfigration.getHost(), dorisConfigration.getHttpPort(), dorisConfigration.getDataBase(), DorisTableUtils.dorisName(datasetTable.getId()), root_path + outFile + "." + extention); + jobName = "job_delete_" + DorisTableUtils.dorisName(datasetTable.getId()); break; default: break; @@ -586,7 +589,7 @@ public class ExtractDataService { private void generateTransFile(String extractType, DatasetTable datasetTable, Datasource datasource, List datasetTableFields, String selectSQL) throws Exception { TransMeta transMeta = new TransMeta(); - String dorisOutputTable = null; + String outFile = null; DatasourceTypes datasourceType = DatasourceTypes.valueOf(datasource.getType()); DatabaseMeta dataMeta = null; StepMeta inputStep = null; @@ -640,18 +643,18 @@ public class ExtractDataService { switch (extractType) { case "all_scope": - transName = "trans_" + datasetTable.getId(); - dorisOutputTable = DorisTableUtils.dorisTmpName(DorisTableUtils.dorisName(datasetTable.getId())); + transName = "trans_" + DorisTableUtils.dorisName(datasetTable.getId()); + outFile = DorisTableUtils.dorisTmpName(DorisTableUtils.dorisName(datasetTable.getId())); transMeta.setName(transName); break; case "incremental_add": - transName = "trans_add_" + datasetTable.getId(); - dorisOutputTable = DorisTableUtils.dorisName(datasetTable.getId()); + transName = "trans_add_" + DorisTableUtils.dorisName(datasetTable.getId()); + outFile = DorisTableUtils.dorisName(datasetTable.getId()); transMeta.setName(transName); break; case "incremental_delete": - dorisOutputTable = DorisTableUtils.dorisDeleteName(DorisTableUtils.dorisName(datasetTable.getId())); - transName = "trans_delete_" + datasetTable.getId(); + transName = "trans_delete_" + DorisTableUtils.dorisName(datasetTable.getId()); + outFile = DorisTableUtils.dorisDeleteName(DorisTableUtils.dorisName(datasetTable.getId())); transMeta.setName(transName); break; default: @@ -659,7 +662,7 @@ public class ExtractDataService { } - outputStep = outputStep(dorisOutputTable); + outputStep = outputStep(outFile); hi1 = new TransHopMeta(inputStep, udjcStep); hi2 = new TransHopMeta(udjcStep, outputStep); transMeta.addTransHop(hi1); @@ -779,26 +782,34 @@ public class ExtractDataService { return userDefinedJavaClassStep; } - private void deleteFile(String type, String dataSetTableId){ + public void deleteFile(String type, String dataSetTableId){ String transName = null; String jobName = null; + String fileName = null; switch (type) { case "all_scope": transName = "trans_" + dataSetTableId; jobName = "job_" + dataSetTableId; + fileName = DorisTableUtils.dorisTmpName(dataSetTableId); break; case "incremental_add": transName = "trans_add_" + dataSetTableId; jobName = "job_add_" + dataSetTableId; + fileName = DorisTableUtils.dorisName(dataSetTableId); break; case "incremental_delete": transName = "trans_delete_" + dataSetTableId; jobName = "job_delete_" + dataSetTableId; + fileName = DorisTableUtils.dorisDeleteName(dataSetTableId); break; default: break; } + try{ + File file = new File(root_path + fileName + "." + extention); + FileUtils.forceDelete(file); + }catch (Exception e){} try{ File file = new File(root_path + jobName + ".kjb"); FileUtils.forceDelete(file); diff --git a/backend/src/main/resources/db/migration/V7__alter_dataset_table.sql b/backend/src/main/resources/db/migration/V7__alter_dataset_table.sql new file mode 100644 index 0000000000..f93e569d48 --- /dev/null +++ b/backend/src/main/resources/db/migration/V7__alter_dataset_table.sql @@ -0,0 +1 @@ +ALTER TABLE `dataset_table` ADD COLUMN `last_update_time` BIGINT(13) NULL DEFAULT 0 AFTER `sync_status`; From a8b9a4fbb2248da28ded7a0278d42f869ee1d87d Mon Sep 17 00:00:00 2001 From: fit2cloud-chenyw Date: Wed, 16 Jun 2021 18:03:59 +0800 Subject: [PATCH 2/2] =?UTF-8?q?feat:=20=E5=88=A0=E9=99=A4=E4=B8=8D?= =?UTF-8?q?=E7=AD=89=E4=BA=8E=20=E4=B8=8D=E5=8C=85=E5=90=AB?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../io/dataease/auth/config/F2CRealm.java | 14 ++++- .../condition-table/DeComplexInput.vue | 53 ++++++++++++++++ .../condition-table/DeComplexOperator.vue | 62 +++++++++++++++++++ frontend/src/main.js | 2 + frontend/src/views/system/user/index.vue | 3 +- 5 files changed, 129 insertions(+), 5 deletions(-) create mode 100644 frontend/src/components/business/condition-table/DeComplexInput.vue create mode 100644 frontend/src/components/business/condition-table/DeComplexOperator.vue diff --git a/backend/src/main/java/io/dataease/auth/config/F2CRealm.java b/backend/src/main/java/io/dataease/auth/config/F2CRealm.java index 29c269f10f..1f0ee7e60d 100644 --- a/backend/src/main/java/io/dataease/auth/config/F2CRealm.java +++ b/backend/src/main/java/io/dataease/auth/config/F2CRealm.java @@ -60,10 +60,18 @@ public class F2CRealm extends AuthorizingRealm { CacheUtils.get("lic_info", "lic"); }catch (Exception e) { LogUtil.error(e); + throw new AuthenticationException("lic error"); } - String token = (String) auth.getCredentials(); - // 解密获得username,用于和数据库进行对比 - TokenInfo tokenInfo = JWTUtils.tokenInfoByToken(token); + TokenInfo tokenInfo = null; + String token = null; + try { + token = (String) auth.getCredentials(); + // 解密获得username,用于和数据库进行对比 + tokenInfo = JWTUtils.tokenInfoByToken(token); + }catch (Exception e) { + throw new AuthenticationException(e); + } + Long userId = tokenInfo.getUserId(); String username = tokenInfo.getUsername(); if (username == null) { diff --git a/frontend/src/components/business/condition-table/DeComplexInput.vue b/frontend/src/components/business/condition-table/DeComplexInput.vue new file mode 100644 index 0000000000..9ff39ac3bf --- /dev/null +++ b/frontend/src/components/business/condition-table/DeComplexInput.vue @@ -0,0 +1,53 @@ + + + diff --git a/frontend/src/components/business/condition-table/DeComplexOperator.vue b/frontend/src/components/business/condition-table/DeComplexOperator.vue new file mode 100644 index 0000000000..1feec41b1d --- /dev/null +++ b/frontend/src/components/business/condition-table/DeComplexOperator.vue @@ -0,0 +1,62 @@ + + + + + diff --git a/frontend/src/main.js b/frontend/src/main.js index ada2849a15..2dc827327e 100644 --- a/frontend/src/main.js +++ b/frontend/src/main.js @@ -20,6 +20,7 @@ import widgets from '@/components/widget' import Treeselect from '@riophae/vue-treeselect' import '@riophae/vue-treeselect/dist/vue-treeselect.css' import './utils/dialog' +import DeComplexInput from '@/components/business/condition-table/DeComplexInput' import '@/components/canvas/custom-component' // 注册自定义组件 Vue.config.productionTip = false @@ -69,6 +70,7 @@ Vue.use(filter) Vue.use(directives) Vue.use(message) Vue.component('Treeselect', Treeselect) +Vue.component('DeComplexInput', DeComplexInput) Vue.config.productionTip = false Vue.prototype.hasDataPermission = function(pTarget, pSource) { diff --git a/frontend/src/views/system/user/index.vue b/frontend/src/views/system/user/index.vue index 2cfa1a189c..fe93048dd3 100644 --- a/frontend/src/views/system/user/index.vue +++ b/frontend/src/views/system/user/index.vue @@ -162,7 +162,6 @@