feat(数据集):上传excel并解析预览

This commit is contained in:
junjie 2021-04-19 18:33:53 +08:00
parent 7cdc0c220f
commit 109ce64fdd
4 changed files with 168 additions and 4 deletions

View File

@ -7,6 +7,7 @@ import io.dataease.controller.request.dataset.DataSetTableRequest;
import io.dataease.datasource.dto.TableFiled;
import io.dataease.service.dataset.DataSetTableService;
import org.springframework.web.bind.annotation.*;
import org.springframework.web.multipart.MultipartFile;
import javax.annotation.Resource;
import java.util.List;
@ -86,4 +87,9 @@ public class DataSetTableController {
public Map<String, Object> datasetDetail(@PathVariable String id) {
return dataSetTableService.getDatasetDetail(id);
}
@PostMapping("excel/upload")
public Map<String, Object> excelUpload(@RequestParam("file") MultipartFile file) throws Exception {
return dataSetTableService.excelSaveAndParse(file);
}
}

View File

@ -18,10 +18,26 @@ import io.dataease.dto.dataset.DataTableInfoDTO;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.ObjectUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.poi.hssf.usermodel.HSSFCell;
import org.apache.poi.hssf.usermodel.HSSFRow;
import org.apache.poi.hssf.usermodel.HSSFSheet;
import org.apache.poi.hssf.usermodel.HSSFWorkbook;
import org.apache.poi.ss.usermodel.Cell;
import org.apache.poi.ss.usermodel.CellType;
import org.apache.poi.ss.usermodel.Row;
import org.apache.poi.xssf.usermodel.XSSFRow;
import org.apache.poi.xssf.usermodel.XSSFSheet;
import org.apache.poi.xssf.usermodel.XSSFWorkbook;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import org.springframework.web.multipart.MultipartFile;
import javax.annotation.Resource;
import java.io.*;
import java.math.BigDecimal;
import java.nio.charset.StandardCharsets;
import java.sql.ResultSet;
import java.text.DecimalFormat;
import java.text.MessageFormat;
import java.util.*;
import java.util.stream.Collectors;
@ -42,6 +58,8 @@ public class DataSetTableService {
private DataSetTableTaskService dataSetTableTaskService;
@Resource
private DatasetTableIncrementalConfigMapper datasetTableIncrementalConfigMapper;
@Value("${upload.file.path}")
private String path;
public void batchInsert(List<DatasetTable> datasetTable) throws Exception {
for (DatasetTable table : datasetTable) {
@ -434,4 +452,120 @@ public class DataSetTableService {
}
return map;
}
public Map<String, Object> excelSaveAndParse(MultipartFile file) throws Exception {
String filename = file.getOriginalFilename();
String suffix = filename.substring(filename.lastIndexOf(".") + 1);
List<TableFiled> fields = new ArrayList<>();
List<String[]> data = new ArrayList<>();
List<Map<String, Object>> jsonArray = new ArrayList<>();
InputStream inputStream = file.getInputStream();
if (StringUtils.equalsIgnoreCase(suffix, "xls")) {
HSSFWorkbook workbook = new HSSFWorkbook(inputStream);
HSSFSheet sheet0 = workbook.getSheetAt(0);
if (sheet0.getNumMergedRegions() > 0) {
throw new RuntimeException("Sheet have merged regions.");
}
int rows = Math.min(sheet0.getPhysicalNumberOfRows(), 100);
for (int i = 0; i < rows; i++) {
HSSFRow row = sheet0.getRow(i);
String[] r = new String[row.getPhysicalNumberOfCells()];
for (int j = 0; j < row.getPhysicalNumberOfCells(); j++) {
if (i == 0) {
TableFiled tableFiled = new TableFiled();
tableFiled.setFieldName(readCell(row.getCell(j)));
tableFiled.setRemarks(readCell(row.getCell(j)));
fields.add(tableFiled);
} else {
r[j] = readCell(row.getCell(j));
}
}
if (i > 0) {
data.add(r);
}
}
} else if (StringUtils.equalsIgnoreCase(suffix, "xlsx")) {
XSSFWorkbook xssfWorkbook = new XSSFWorkbook(inputStream);
XSSFSheet sheet0 = xssfWorkbook.getSheetAt(0);
if (sheet0.getNumMergedRegions() > 0) {
throw new RuntimeException("Sheet have merged regions.");
}
int rows = Math.min(sheet0.getPhysicalNumberOfRows(), 100);
for (int i = 0; i < rows; i++) {
XSSFRow row = sheet0.getRow(i);
String[] r = new String[row.getPhysicalNumberOfCells()];
for (int j = 0; j < row.getPhysicalNumberOfCells(); j++) {
if (i == 0) {
TableFiled tableFiled = new TableFiled();
tableFiled.setFieldName(readCell(row.getCell(j)));
tableFiled.setRemarks(readCell(row.getCell(j)));
fields.add(tableFiled);
} else {
r[j] = readCell(row.getCell(j));
}
}
if (i > 0) {
data.add(r);
}
}
} else if (StringUtils.equalsIgnoreCase(suffix, "csv")) {
BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream, StandardCharsets.UTF_8));
String s = reader.readLine();// first line
String[] split = s.split(",");
for (String s1 : split) {
TableFiled tableFiled = new TableFiled();
tableFiled.setFieldName(s1);
tableFiled.setRemarks(s1);
fields.add(tableFiled);
}
int num = 1;
String line = null;
while ((line = reader.readLine()) != null) {
if (num > 100) {
break;
}
data.add(line.split(","));
num++;
}
}
String[] fieldArray = fields.stream().map(TableFiled::getFieldName).toArray(String[]::new);
if (CollectionUtils.isNotEmpty(data)) {
jsonArray = data.stream().map(ele -> {
Map<String, Object> map = new HashMap<>();
for (int i = 0; i < ele.length; i++) {
map.put(fieldArray[i], ele[i]);
}
return map;
}).collect(Collectors.toList());
}
inputStream.close();
Map<String, Object> map = new HashMap<>();
map.put("fields", fields);
map.put("data", jsonArray);
return map;
}
private String readCell(Cell cell) {
CellType cellTypeEnum = cell.getCellTypeEnum();
if (cellTypeEnum.equals(CellType.STRING)) {
return cell.getStringCellValue();
} else if (cellTypeEnum.equals(CellType.NUMERIC)) {
double d = cell.getNumericCellValue();
try {
return new Double(d).longValue() + "";
} catch (Exception e) {
BigDecimal b = new BigDecimal(d);
return b.setScale(2, BigDecimal.ROUND_HALF_UP).doubleValue() + "";
}
} else if (cellTypeEnum.equals(CellType.BOOLEAN)) {
return cell.getBooleanCellValue() ? "1" : "0";
} else {
return "";
}
}
}

View File

@ -783,7 +783,8 @@ export default {
detail: '详情',
type: '类型',
create_by: '创建者',
create_time: '创建时间'
create_time: '创建时间',
preview_100_data: '预览前100行数据'
},
datasource: {
datasource: '数据源',

View File

@ -9,7 +9,7 @@
<el-button size="mini" @click="cancel">
{{ $t('dataset.cancel') }}
</el-button>
<el-button size="mini" type="primary" @click="save">
<el-button :disabled="!form.name || fileList.length === 0" size="mini" type="primary" @click="save">
{{ $t('dataset.confirm') }}
</el-button>
</el-row>
@ -24,11 +24,14 @@
</el-form-item>
<el-form-item>
<el-upload
action="/posts/"
:action="baseUrl+'dataset/table/excel/upload'"
:multiple="false"
:show-file-list="false"
:file-list="fileList"
accept=".xls,.xlsx,.csv"
:on-success="uploadSuccess"
name="file"
:headers="headers"
>
<el-button size="mini" type="primary">{{ $t('dataset.upload_file') }}</el-button>
</el-upload>
@ -42,6 +45,7 @@
<el-card class="box-card dataPreview" shadow="never">
<div slot="header" class="clearfix">
<span>{{ $t('dataset.data_preview') }}</span>
<span style="font-size: 12px;color: #3d4d66;">{{ $t('dataset.preview_100_data') }}</span>
</div>
<div class="text item">
<ux-grid
@ -70,6 +74,9 @@
<script>
import { post } from '@/api/dataset/dataset'
import { getToken } from '@/utils/auth'
const token = getToken()
export default {
name: 'AddExcel',
@ -85,8 +92,12 @@ export default {
name: ''
},
fields: [],
data: [],
mode: '1',
height: 600
height: 600,
fileList: [],
headers: { Authorization: token },
baseUrl: process.env.VUE_APP_BASE_API
}
},
watch: {
@ -120,6 +131,18 @@ export default {
// },
uploadSuccess(response, file, fileList) {
console.log(response)
console.log(file)
console.log(fileList)
this.fields = response.data.fields
this.data = response.data.data
const datas = this.data
this.$refs.plxTable.reloadData(datas)
if (file.name.lastIndexOf('.') > 0) {
this.form.name = file.name.substring(0, file.name.lastIndexOf('.'))
}
this.fileList = fileList
},
save() {