forked from github/dataease
fix: 检测同步任务
This commit is contained in:
parent
8f88821be4
commit
92549c13d4
@ -21,6 +21,8 @@ public class DatasetTable implements Serializable {
|
||||
|
||||
private Long createTime;
|
||||
|
||||
private String qrtzInstance;
|
||||
|
||||
private String syncStatus;
|
||||
|
||||
private String info;
|
||||
|
@ -644,6 +644,76 @@ public class DatasetTableExample {
|
||||
return (Criteria) this;
|
||||
}
|
||||
|
||||
public Criteria andQrtzInstanceIsNull() {
|
||||
addCriterion("qrtz_instance is null");
|
||||
return (Criteria) this;
|
||||
}
|
||||
|
||||
public Criteria andQrtzInstanceIsNotNull() {
|
||||
addCriterion("qrtz_instance is not null");
|
||||
return (Criteria) this;
|
||||
}
|
||||
|
||||
public Criteria andQrtzInstanceEqualTo(String value) {
|
||||
addCriterion("qrtz_instance =", value, "qrtzInstance");
|
||||
return (Criteria) this;
|
||||
}
|
||||
|
||||
public Criteria andQrtzInstanceNotEqualTo(String value) {
|
||||
addCriterion("qrtz_instance <>", value, "qrtzInstance");
|
||||
return (Criteria) this;
|
||||
}
|
||||
|
||||
public Criteria andQrtzInstanceGreaterThan(String value) {
|
||||
addCriterion("qrtz_instance >", value, "qrtzInstance");
|
||||
return (Criteria) this;
|
||||
}
|
||||
|
||||
public Criteria andQrtzInstanceGreaterThanOrEqualTo(String value) {
|
||||
addCriterion("qrtz_instance >=", value, "qrtzInstance");
|
||||
return (Criteria) this;
|
||||
}
|
||||
|
||||
public Criteria andQrtzInstanceLessThan(String value) {
|
||||
addCriterion("qrtz_instance <", value, "qrtzInstance");
|
||||
return (Criteria) this;
|
||||
}
|
||||
|
||||
public Criteria andQrtzInstanceLessThanOrEqualTo(String value) {
|
||||
addCriterion("qrtz_instance <=", value, "qrtzInstance");
|
||||
return (Criteria) this;
|
||||
}
|
||||
|
||||
public Criteria andQrtzInstanceLike(String value) {
|
||||
addCriterion("qrtz_instance like", value, "qrtzInstance");
|
||||
return (Criteria) this;
|
||||
}
|
||||
|
||||
public Criteria andQrtzInstanceNotLike(String value) {
|
||||
addCriterion("qrtz_instance not like", value, "qrtzInstance");
|
||||
return (Criteria) this;
|
||||
}
|
||||
|
||||
public Criteria andQrtzInstanceIn(List<String> values) {
|
||||
addCriterion("qrtz_instance in", values, "qrtzInstance");
|
||||
return (Criteria) this;
|
||||
}
|
||||
|
||||
public Criteria andQrtzInstanceNotIn(List<String> values) {
|
||||
addCriterion("qrtz_instance not in", values, "qrtzInstance");
|
||||
return (Criteria) this;
|
||||
}
|
||||
|
||||
public Criteria andQrtzInstanceBetween(String value1, String value2) {
|
||||
addCriterion("qrtz_instance between", value1, value2, "qrtzInstance");
|
||||
return (Criteria) this;
|
||||
}
|
||||
|
||||
public Criteria andQrtzInstanceNotBetween(String value1, String value2) {
|
||||
addCriterion("qrtz_instance not between", value1, value2, "qrtzInstance");
|
||||
return (Criteria) this;
|
||||
}
|
||||
|
||||
public Criteria andSyncStatusIsNull() {
|
||||
addCriterion("sync_status is null");
|
||||
return (Criteria) this;
|
||||
|
@ -10,6 +10,7 @@
|
||||
<result column="mode" jdbcType="INTEGER" property="mode" />
|
||||
<result column="create_by" jdbcType="VARCHAR" property="createBy" />
|
||||
<result column="create_time" jdbcType="BIGINT" property="createTime" />
|
||||
<result column="qrtz_instance" jdbcType="VARCHAR" property="qrtzInstance" />
|
||||
<result column="sync_status" jdbcType="VARCHAR" property="syncStatus" />
|
||||
</resultMap>
|
||||
<resultMap extends="BaseResultMap" id="ResultMapWithBLOBs" type="io.dataease.base.domain.DatasetTable">
|
||||
@ -74,7 +75,8 @@
|
||||
</where>
|
||||
</sql>
|
||||
<sql id="Base_Column_List">
|
||||
id, `name`, scene_id, data_source_id, `type`, `mode`, create_by, create_time, sync_status
|
||||
id, `name`, scene_id, data_source_id, `type`, `mode`, create_by, create_time, qrtz_instance,
|
||||
sync_status
|
||||
</sql>
|
||||
<sql id="Blob_Column_List">
|
||||
info
|
||||
@ -130,12 +132,12 @@
|
||||
<insert id="insert" parameterType="io.dataease.base.domain.DatasetTable">
|
||||
insert into dataset_table (id, `name`, scene_id,
|
||||
data_source_id, `type`, `mode`,
|
||||
create_by, create_time, sync_status,
|
||||
info)
|
||||
create_by, create_time, qrtz_instance,
|
||||
sync_status, info)
|
||||
values (#{id,jdbcType=VARCHAR}, #{name,jdbcType=VARCHAR}, #{sceneId,jdbcType=VARCHAR},
|
||||
#{dataSourceId,jdbcType=VARCHAR}, #{type,jdbcType=VARCHAR}, #{mode,jdbcType=INTEGER},
|
||||
#{createBy,jdbcType=VARCHAR}, #{createTime,jdbcType=BIGINT}, #{syncStatus,jdbcType=VARCHAR},
|
||||
#{info,jdbcType=LONGVARCHAR})
|
||||
#{createBy,jdbcType=VARCHAR}, #{createTime,jdbcType=BIGINT}, #{qrtzInstance,jdbcType=VARCHAR},
|
||||
#{syncStatus,jdbcType=VARCHAR}, #{info,jdbcType=LONGVARCHAR})
|
||||
</insert>
|
||||
<insert id="insertSelective" parameterType="io.dataease.base.domain.DatasetTable">
|
||||
insert into dataset_table
|
||||
@ -164,6 +166,9 @@
|
||||
<if test="createTime != null">
|
||||
create_time,
|
||||
</if>
|
||||
<if test="qrtzInstance != null">
|
||||
qrtz_instance,
|
||||
</if>
|
||||
<if test="syncStatus != null">
|
||||
sync_status,
|
||||
</if>
|
||||
@ -196,6 +201,9 @@
|
||||
<if test="createTime != null">
|
||||
#{createTime,jdbcType=BIGINT},
|
||||
</if>
|
||||
<if test="qrtzInstance != null">
|
||||
#{qrtzInstance,jdbcType=VARCHAR},
|
||||
</if>
|
||||
<if test="syncStatus != null">
|
||||
#{syncStatus,jdbcType=VARCHAR},
|
||||
</if>
|
||||
@ -237,6 +245,9 @@
|
||||
<if test="record.createTime != null">
|
||||
create_time = #{record.createTime,jdbcType=BIGINT},
|
||||
</if>
|
||||
<if test="record.qrtzInstance != null">
|
||||
qrtz_instance = #{record.qrtzInstance,jdbcType=VARCHAR},
|
||||
</if>
|
||||
<if test="record.syncStatus != null">
|
||||
sync_status = #{record.syncStatus,jdbcType=VARCHAR},
|
||||
</if>
|
||||
@ -258,6 +269,7 @@
|
||||
`mode` = #{record.mode,jdbcType=INTEGER},
|
||||
create_by = #{record.createBy,jdbcType=VARCHAR},
|
||||
create_time = #{record.createTime,jdbcType=BIGINT},
|
||||
qrtz_instance = #{record.qrtzInstance,jdbcType=VARCHAR},
|
||||
sync_status = #{record.syncStatus,jdbcType=VARCHAR},
|
||||
info = #{record.info,jdbcType=LONGVARCHAR}
|
||||
<if test="_parameter != null">
|
||||
@ -274,6 +286,7 @@
|
||||
`mode` = #{record.mode,jdbcType=INTEGER},
|
||||
create_by = #{record.createBy,jdbcType=VARCHAR},
|
||||
create_time = #{record.createTime,jdbcType=BIGINT},
|
||||
qrtz_instance = #{record.qrtzInstance,jdbcType=VARCHAR},
|
||||
sync_status = #{record.syncStatus,jdbcType=VARCHAR}
|
||||
<if test="_parameter != null">
|
||||
<include refid="Update_By_Example_Where_Clause" />
|
||||
@ -303,6 +316,9 @@
|
||||
<if test="createTime != null">
|
||||
create_time = #{createTime,jdbcType=BIGINT},
|
||||
</if>
|
||||
<if test="qrtzInstance != null">
|
||||
qrtz_instance = #{qrtzInstance,jdbcType=VARCHAR},
|
||||
</if>
|
||||
<if test="syncStatus != null">
|
||||
sync_status = #{syncStatus,jdbcType=VARCHAR},
|
||||
</if>
|
||||
@ -321,6 +337,7 @@
|
||||
`mode` = #{mode,jdbcType=INTEGER},
|
||||
create_by = #{createBy,jdbcType=VARCHAR},
|
||||
create_time = #{createTime,jdbcType=BIGINT},
|
||||
qrtz_instance = #{qrtzInstance,jdbcType=VARCHAR},
|
||||
sync_status = #{syncStatus,jdbcType=VARCHAR},
|
||||
info = #{info,jdbcType=LONGVARCHAR}
|
||||
where id = #{id,jdbcType=VARCHAR}
|
||||
@ -334,6 +351,7 @@
|
||||
`mode` = #{mode,jdbcType=INTEGER},
|
||||
create_by = #{createBy,jdbcType=VARCHAR},
|
||||
create_time = #{createTime,jdbcType=BIGINT},
|
||||
qrtz_instance = #{qrtzInstance,jdbcType=VARCHAR},
|
||||
sync_status = #{syncStatus,jdbcType=VARCHAR}
|
||||
where id = #{id,jdbcType=VARCHAR}
|
||||
</update>
|
||||
|
@ -145,7 +145,7 @@ public class DatasourceService {
|
||||
DatasourceRequest datasourceRequest = new DatasourceRequest();
|
||||
datasourceRequest.setDatasource(datasource);
|
||||
datasourceProvider.initDataSource(datasourceRequest);
|
||||
LogUtil.error("Succsss to init datasource connection pool: " + datasource.getName());
|
||||
LogUtil.info("Succsss to init datasource connection pool: " + datasource.getName());
|
||||
}catch (Exception e){
|
||||
LogUtil.error("Failed to init datasource connection pool: " + datasource.getName(), e);
|
||||
}
|
||||
|
@ -1,5 +1,6 @@
|
||||
package io.dataease.job.sechedule;
|
||||
|
||||
import com.google.gson.Gson;
|
||||
import io.dataease.commons.utils.LogUtil;
|
||||
import org.quartz.*;
|
||||
|
||||
|
@ -15,7 +15,7 @@ public class ExtractDataJob extends DeScheduleJob{
|
||||
|
||||
@Override
|
||||
void businessExecute(JobExecutionContext context) {
|
||||
extractDataService.extractData(datasetTableId, taskId, updateType);
|
||||
extractDataService.extractData(datasetTableId, taskId, updateType, context);
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -1,11 +1,10 @@
|
||||
package io.dataease.service.dataset;
|
||||
|
||||
|
||||
import com.fit2cloud.quartz.anno.QuartzScheduled;
|
||||
import com.google.gson.Gson;
|
||||
import io.dataease.base.domain.*;
|
||||
import io.dataease.base.mapper.DatasetTableIncrementalConfigMapper;
|
||||
import io.dataease.base.mapper.DatasetTableMapper;
|
||||
import io.dataease.base.mapper.DatasourceMapper;
|
||||
import io.dataease.base.mapper.*;
|
||||
import io.dataease.base.mapper.ext.ExtDataSetTableMapper;
|
||||
import io.dataease.commons.constants.JobStatus;
|
||||
import io.dataease.commons.utils.*;
|
||||
@ -75,6 +74,10 @@ public class DataSetTableService {
|
||||
private DataSetTableUnionService dataSetTableUnionService;
|
||||
@Resource
|
||||
private DataSetTableTaskLogService dataSetTableTaskLogService;
|
||||
@Resource
|
||||
private QrtzSchedulerStateMapper qrtzSchedulerStateMapper;
|
||||
@Resource
|
||||
private DatasetTableTaskLogMapper datasetTableTaskLogMapper;
|
||||
@Value("${upload.file.path}")
|
||||
private String path;
|
||||
|
||||
@ -106,7 +109,7 @@ public class DataSetTableService {
|
||||
saveTableField(datasetTable);
|
||||
if (StringUtils.equalsIgnoreCase(datasetTable.getType(), "excel")) {
|
||||
commonThreadPool.addTask(() -> {
|
||||
extractDataService.extractData(datasetTable.getId(), null, "all_scope");
|
||||
extractDataService.extractData(datasetTable.getId(), null, "all_scope", null);
|
||||
});
|
||||
}
|
||||
}
|
||||
@ -867,22 +870,48 @@ public class DataSetTableService {
|
||||
}
|
||||
|
||||
public Boolean checkDorisTableIsExists(String id) throws Exception {
|
||||
// Datasource dorisDatasource = (Datasource) CommonBeanFactory.getBean("DorisDatasource");
|
||||
// JdbcProvider jdbcProvider = CommonBeanFactory.getBean(JdbcProvider.class);
|
||||
// DatasourceRequest datasourceRequest = new DatasourceRequest();
|
||||
// datasourceRequest.setDatasource(dorisDatasource);
|
||||
// QueryProvider qp = ProviderFactory.getQueryProvider(dorisDatasource.getType());
|
||||
// datasourceRequest.setQuery(qp.searchTable(DorisTableUtils.dorisName(id)));
|
||||
// List<String[]> data = jdbcProvider.getData(datasourceRequest);
|
||||
// return CollectionUtils.isNotEmpty(data);
|
||||
return true;
|
||||
Datasource dorisDatasource = (Datasource) CommonBeanFactory.getBean("DorisDatasource");
|
||||
JdbcProvider jdbcProvider = CommonBeanFactory.getBean(JdbcProvider.class);
|
||||
DatasourceRequest datasourceRequest = new DatasourceRequest();
|
||||
datasourceRequest.setDatasource(dorisDatasource);
|
||||
QueryProvider qp = ProviderFactory.getQueryProvider(dorisDatasource.getType());
|
||||
datasourceRequest.setQuery(qp.searchTable(DorisTableUtils.dorisName(id)));
|
||||
List<String[]> data = jdbcProvider.getData(datasourceRequest);
|
||||
return CollectionUtils.isNotEmpty(data);
|
||||
}
|
||||
|
||||
@QuartzScheduled(cron = "0 0/3 * * * ?")
|
||||
public void updateDatasetTableStatus(){
|
||||
DatasetTable record = new DatasetTable();
|
||||
record.setSyncStatus(JobStatus.Completed.name());
|
||||
List<QrtzSchedulerState> qrtzSchedulerStates = qrtzSchedulerStateMapper.selectByExample(null);
|
||||
List<String> activeQrtzInstances = qrtzSchedulerStates.stream().filter(qrtzSchedulerState -> qrtzSchedulerState.getLastCheckinTime() + qrtzSchedulerState.getCheckinInterval() + 1000 > System.currentTimeMillis()).map(QrtzSchedulerStateKey::getInstanceName).collect(Collectors.toList());
|
||||
List<DatasetTable> jobStoppeddDatasetTables = new ArrayList<>();
|
||||
|
||||
DatasetTableExample example = new DatasetTableExample();
|
||||
example.createCriteria().andSyncStatusEqualTo(JobStatus.Underway.name());
|
||||
|
||||
datasetTableMapper.selectByExample(example).forEach(datasetTable -> {
|
||||
if(StringUtils.isEmpty(datasetTable.getQrtzInstance()) || !activeQrtzInstances.contains(datasetTable.getQrtzInstance().substring(0, datasetTable.getQrtzInstance().length() - 13))){
|
||||
jobStoppeddDatasetTables.add(datasetTable);
|
||||
}
|
||||
});
|
||||
|
||||
if(CollectionUtils.isEmpty(jobStoppeddDatasetTables)){
|
||||
return;
|
||||
}
|
||||
|
||||
DatasetTable record = new DatasetTable();
|
||||
record.setSyncStatus(JobStatus.Completed.name());
|
||||
example.clear();
|
||||
example.createCriteria().andSyncStatusEqualTo(JobStatus.Underway.name()).andIdIn(jobStoppeddDatasetTables.stream().map(DatasetTable::getId).collect(Collectors.toList()));
|
||||
datasetTableMapper.updateByExampleSelective(record, example);
|
||||
|
||||
DatasetTableTaskLog datasetTableTaskLog = new DatasetTableTaskLog();
|
||||
datasetTableTaskLog.setStatus(JobStatus.Error.name());
|
||||
datasetTableTaskLog.setInfo("Job stopped due to system error.");
|
||||
|
||||
DatasetTableTaskLogExample datasetTableTaskLogExample = new DatasetTableTaskLogExample();
|
||||
datasetTableTaskLogExample.createCriteria().andStatusEqualTo(JobStatus.Underway.name()).andTableIdIn(jobStoppeddDatasetTables.stream().map(DatasetTable::getId).collect(Collectors.toList()));
|
||||
datasetTableTaskLogMapper.updateByExampleSelective(datasetTableTaskLog, datasetTableTaskLogExample);
|
||||
|
||||
}
|
||||
}
|
||||
|
@ -63,6 +63,7 @@ import org.pentaho.di.trans.steps.textfileoutput.TextFileOutputMeta;
|
||||
import org.pentaho.di.trans.steps.userdefinedjavaclass.UserDefinedJavaClassDef;
|
||||
import org.pentaho.di.trans.steps.userdefinedjavaclass.UserDefinedJavaClassMeta;
|
||||
import org.pentaho.di.www.SlaveServerJobStatus;
|
||||
import org.quartz.JobExecutionContext;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.context.annotation.Lazy;
|
||||
import org.springframework.stereotype.Service;
|
||||
@ -183,7 +184,7 @@ public class ExtractDataService {
|
||||
return datasetTableMapper.updateByExampleSelective(datasetTable, example) == 0;
|
||||
}
|
||||
|
||||
public void extractData(String datasetTableId, String taskId, String type) {
|
||||
public void extractData(String datasetTableId, String taskId, String type, JobExecutionContext context) {
|
||||
DatasetTable datasetTable = dataSetTableService.get(datasetTableId);
|
||||
if(updateSyncStatus(datasetTable)){
|
||||
LogUtil.info("Skip synchronization task for table : " + datasetTableId);
|
||||
@ -193,6 +194,10 @@ public class ExtractDataService {
|
||||
UpdateType updateType = UpdateType.valueOf(type);
|
||||
Datasource datasource = new Datasource();
|
||||
try {
|
||||
if(context != null){
|
||||
datasetTable.setQrtzInstance(context.getFireInstanceId());
|
||||
datasetTableMapper.updateByPrimaryKeySelective(datasetTable);
|
||||
}
|
||||
if (StringUtils.isNotEmpty(datasetTable.getDataSourceId())) {
|
||||
datasource = datasourceMapper.selectByPrimaryKey(datasetTable.getDataSourceId());
|
||||
} else {
|
||||
|
@ -1 +1,2 @@
|
||||
ALTER TABLE `dataease`.`dataset_table` ADD COLUMN `sync_status` VARCHAR(45) NULL AFTER `create_time`;
|
||||
ALTER TABLE `dataset_table` ADD COLUMN `sync_status` VARCHAR(45) NULL AFTER `create_time`;
|
||||
ALTER TABLE `dataset_table` ADD COLUMN `qrtz_instance` VARCHAR(1024) NULL AFTER `create_time`;
|
||||
|
Loading…
Reference in New Issue
Block a user