diff --git a/backend/src/main/java/io/dataease/service/spark/SparkCalc.java b/backend/src/main/java/io/dataease/service/spark/SparkCalc.java index 1006d898bf..df200f7691 100644 --- a/backend/src/main/java/io/dataease/service/spark/SparkCalc.java +++ b/backend/src/main/java/io/dataease/service/spark/SparkCalc.java @@ -16,7 +16,10 @@ import org.apache.spark.api.java.JavaPairRDD; import org.apache.spark.api.java.JavaRDD; import org.apache.spark.api.java.JavaSparkContext; import org.apache.spark.api.java.function.Function; -import org.apache.spark.sql.*; +import org.apache.spark.sql.Dataset; +import org.apache.spark.sql.Row; +import org.apache.spark.sql.RowFactory; +import org.apache.spark.sql.SQLContext; import org.apache.spark.sql.types.DataTypes; import org.apache.spark.sql.types.StructField; import org.apache.spark.sql.types.StructType; @@ -28,8 +31,6 @@ import java.util.ArrayList; import java.util.Base64; import java.util.List; -import static org.reflections8.Reflections.collect; - /** * @Author gin * @Date 2021/3/26 3:49 下午 @@ -98,8 +99,7 @@ public class SparkCalc { List data = new ArrayList<>(); // transform - JavaRDD rowJavaRDD = sql.javaRDD(); - List list = rowJavaRDD.collect(); + List list = sql.javaRDD().collect(); for (Row row : list) { String[] r = new String[row.length()]; for (int i = 0; i < row.length(); i++) {