From eaf2b13882ce7cccfcd255cb03bfbb064eef717b Mon Sep 17 00:00:00 2001 From: junjie Date: Wed, 14 Apr 2021 16:06:08 +0800 Subject: [PATCH] =?UTF-8?q?feat(backend):=E6=B3=A8=E9=87=8A=E3=80=81?= =?UTF-8?q?=E5=AF=BC=E5=8C=85=20=E6=95=B4=E7=90=86?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- backend/src/main/java/io/dataease/config/CommonConfig.java | 2 -- .../main/java/io/dataease/listener/AppStartListener.java | 1 - .../io/dataease/listener/AppStartReadHBaseListener.java | 6 ------ .../java/io/dataease/service/chart/ChartViewService.java | 1 - .../src/main/java/io/dataease/service/spark/SparkCalc.java | 5 ----- 5 files changed, 15 deletions(-) diff --git a/backend/src/main/java/io/dataease/config/CommonConfig.java b/backend/src/main/java/io/dataease/config/CommonConfig.java index 0728b2a831..6e98754712 100644 --- a/backend/src/main/java/io/dataease/config/CommonConfig.java +++ b/backend/src/main/java/io/dataease/config/CommonConfig.java @@ -2,8 +2,6 @@ package io.dataease.config; import com.fit2cloud.autoconfigure.QuartzAutoConfiguration; import io.dataease.commons.utils.CommonThreadPool; -import org.apache.spark.api.java.JavaSparkContext; -import org.apache.spark.sql.SQLContext; import org.apache.spark.sql.SparkSession; import org.pentaho.di.core.KettleEnvironment; import org.pentaho.di.repository.filerep.KettleFileRepository; diff --git a/backend/src/main/java/io/dataease/listener/AppStartListener.java b/backend/src/main/java/io/dataease/listener/AppStartListener.java index 6193506241..a2224eaaea 100644 --- a/backend/src/main/java/io/dataease/listener/AppStartListener.java +++ b/backend/src/main/java/io/dataease/listener/AppStartListener.java @@ -1,7 +1,6 @@ package io.dataease.listener; import io.dataease.base.domain.DatasetTableTask; -import io.dataease.job.sechedule.ScheduleManager; import io.dataease.service.ScheduleService; import io.dataease.service.dataset.DataSetTableTaskService; import org.springframework.boot.context.event.ApplicationReadyEvent; diff --git a/backend/src/main/java/io/dataease/listener/AppStartReadHBaseListener.java b/backend/src/main/java/io/dataease/listener/AppStartReadHBaseListener.java index d73e298f78..d8dcc2e3e6 100644 --- a/backend/src/main/java/io/dataease/listener/AppStartReadHBaseListener.java +++ b/backend/src/main/java/io/dataease/listener/AppStartReadHBaseListener.java @@ -3,16 +3,10 @@ package io.dataease.listener; import io.dataease.base.domain.DatasetTable; import io.dataease.base.domain.DatasetTableExample; import io.dataease.base.domain.DatasetTableField; -import io.dataease.base.domain.DatasetTableFieldExample; -import io.dataease.base.mapper.DatasetTableFieldMapper; import io.dataease.base.mapper.DatasetTableMapper; -import io.dataease.commons.utils.CommonBeanFactory; import io.dataease.commons.utils.CommonThreadPool; import io.dataease.service.dataset.DataSetTableFieldsService; import io.dataease.service.spark.SparkCalc; -import org.apache.spark.api.java.JavaSparkContext; -import org.apache.spark.sql.SQLContext; -import org.apache.spark.sql.SparkSession; import org.springframework.boot.context.event.ApplicationReadyEvent; import org.springframework.context.ApplicationListener; import org.springframework.core.annotation.Order; diff --git a/backend/src/main/java/io/dataease/service/chart/ChartViewService.java b/backend/src/main/java/io/dataease/service/chart/ChartViewService.java index 2100b7ada1..28f0e7b4d6 100644 --- a/backend/src/main/java/io/dataease/service/chart/ChartViewService.java +++ b/backend/src/main/java/io/dataease/service/chart/ChartViewService.java @@ -4,7 +4,6 @@ import com.google.gson.Gson; import com.google.gson.reflect.TypeToken; import io.dataease.base.domain.*; import io.dataease.base.mapper.ChartViewMapper; -import io.dataease.base.mapper.DatasetTableFieldMapper; import io.dataease.commons.utils.AuthUtils; import io.dataease.commons.utils.BeanUtils; import io.dataease.controller.request.chart.ChartViewRequest; diff --git a/backend/src/main/java/io/dataease/service/spark/SparkCalc.java b/backend/src/main/java/io/dataease/service/spark/SparkCalc.java index b3c8cf71a2..b3e2010513 100644 --- a/backend/src/main/java/io/dataease/service/spark/SparkCalc.java +++ b/backend/src/main/java/io/dataease/service/spark/SparkCalc.java @@ -6,7 +6,6 @@ import io.dataease.dto.chart.ChartViewFieldDTO; import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.lang3.ObjectUtils; import org.apache.commons.lang3.StringUtils; -import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; @@ -28,7 +27,6 @@ import org.springframework.stereotype.Service; import scala.Tuple2; import javax.annotation.Resource; -import java.math.BigDecimal; import java.text.MessageFormat; import java.util.ArrayList; import java.util.Base64; @@ -51,7 +49,6 @@ public class SparkCalc { JavaSparkContext sparkContext = new JavaSparkContext(spark.sparkContext()); // Spark SQL Context -// SQLContext sqlContext = CommonBeanFactory.getBean(SQLContext.class); SQLContext sqlContext = new SQLContext(sparkContext); sqlContext.setConf("spark.sql.shuffle.partitions", env.getProperty("spark.sql.shuffle.partitions", "1")); sqlContext.setConf("spark.default.parallelism", env.getProperty("spark.default.parallelism", "1")); @@ -82,7 +79,6 @@ public class SparkCalc { JavaSparkContext sparkContext = new JavaSparkContext(spark.sparkContext()); // Spark SQL Context -// SQLContext sqlContext = CommonBeanFactory.getBean(SQLContext.class); SQLContext sqlContext = new SQLContext(sparkContext); sqlContext.setConf("spark.sql.shuffle.partitions", env.getProperty("spark.sql.shuffle.partitions", "1")); sqlContext.setConf("spark.default.parallelism", env.getProperty("spark.default.parallelism", "1")); @@ -99,7 +95,6 @@ public class SparkCalc { String scanToString = new String(Base64.getEncoder().encode(proto.toByteArray())); // HBase config -// Configuration conf = CommonBeanFactory.getBean(Configuration.class); org.apache.hadoop.conf.Configuration conf = new org.apache.hadoop.conf.Configuration(); conf.set("hbase.zookeeper.quorum", env.getProperty("hbase.zookeeper.quorum")); conf.set("hbase.zookeeper.property.clientPort", env.getProperty("hbase.zookeeper.property.clientPort"));