From cd65f762e5c443f993c19889dcb7c56c4e35f0ec Mon Sep 17 00:00:00 2001 From: junjie Date: Tue, 30 Mar 2021 11:37:52 +0800 Subject: [PATCH] feat(backend): spark --- .../java/io/dataease/config/CommonConfig.java | 7 +++--- .../io/dataease/service/spark/SparkCalc.java | 25 ++++++++++++++++--- 2 files changed, 26 insertions(+), 6 deletions(-) diff --git a/backend/src/main/java/io/dataease/config/CommonConfig.java b/backend/src/main/java/io/dataease/config/CommonConfig.java index 09bc21dad6..6cc3ed2bf5 100644 --- a/backend/src/main/java/io/dataease/config/CommonConfig.java +++ b/backend/src/main/java/io/dataease/config/CommonConfig.java @@ -1,7 +1,6 @@ package io.dataease.config; import com.fit2cloud.autoconfigure.QuartzAutoConfiguration; -import org.apache.spark.SparkConf; import org.apache.spark.api.java.JavaSparkContext; import org.apache.spark.sql.SQLContext; import org.apache.spark.sql.SparkSession; @@ -35,8 +34,10 @@ public class CommonConfig { @Bean @ConditionalOnMissingBean public JavaSparkContext javaSparkContext() { - SparkConf conf = new SparkConf().setAppName(env.getProperty("spark.appName", "DataeaseJob")).setMaster(env.getProperty("spark.master", "local[*]")); - SparkSession spark = SparkSession.builder().config(conf).getOrCreate(); + SparkSession spark = SparkSession.builder() + .appName(env.getProperty("spark.appName", "DataeaseJob")) + .master(env.getProperty("spark.master", "local[*]")) + .getOrCreate(); JavaSparkContext sc = new JavaSparkContext(spark.sparkContext()); return sc; } diff --git a/backend/src/main/java/io/dataease/service/spark/SparkCalc.java b/backend/src/main/java/io/dataease/service/spark/SparkCalc.java index df200f7691..96e76f5497 100644 --- a/backend/src/main/java/io/dataease/service/spark/SparkCalc.java +++ b/backend/src/main/java/io/dataease/service/spark/SparkCalc.java @@ -59,14 +59,22 @@ public class SparkCalc { if (x.getDeType() == 0 || x.getDeType() == 1) { list.add(Bytes.toString(result.getValue(column_family.getBytes(), x.getOriginName().getBytes()))); } else if (x.getDeType() == 2) { - list.add(Long.valueOf(Bytes.toString(result.getValue(column_family.getBytes(), x.getOriginName().getBytes())))); + String l = Bytes.toString(result.getValue(column_family.getBytes(), x.getOriginName().getBytes())); + if (StringUtils.isEmpty(l)) { + l = "0"; + } + list.add(Long.valueOf(l)); } }); yAxis.forEach(y -> { if (y.getDeType() == 0 || y.getDeType() == 1) { list.add(Bytes.toString(result.getValue(column_family.getBytes(), y.getOriginName().getBytes()))); } else if (y.getDeType() == 2) { - list.add(Long.valueOf(Bytes.toString(result.getValue(column_family.getBytes(), y.getOriginName().getBytes())))); + String l = Bytes.toString(result.getValue(column_family.getBytes(), y.getOriginName().getBytes())); + if (StringUtils.isEmpty(l)) { + l = "0"; + } + list.add(Long.valueOf(l)); } }); return RowFactory.create(list.toArray()); @@ -99,7 +107,8 @@ public class SparkCalc { List data = new ArrayList<>(); // transform - List list = sql.javaRDD().collect(); +// List list = sql.javaRDD().collect(); + List list = sql.collectAsList(); for (Row row : list) { String[] r = new String[row.length()]; for (int i = 0; i < row.length(); i++) { @@ -108,6 +117,16 @@ public class SparkCalc { data.add(r); } +// Iterator rowIterator = sql.toLocalIterator(); +// while (rowIterator.hasNext()){ +// Row row = rowIterator.next(); +// String[] r = new String[row.length()]; +// for (int i = 0; i < row.length(); i++) { +// r[i] = row.get(i).toString(); +// } +// data.add(r); +// } + return data; }