diff --git a/core/core-backend/src/main/java/io/dataease/config/DeMvcConfig.java b/core/core-backend/src/main/java/io/dataease/config/DeMvcConfig.java index 3d77809d14..a21c988dc5 100644 --- a/core/core-backend/src/main/java/io/dataease/config/DeMvcConfig.java +++ b/core/core-backend/src/main/java/io/dataease/config/DeMvcConfig.java @@ -21,12 +21,16 @@ public class DeMvcConfig implements WebMvcConfigurer { public void addResourceHandlers(ResourceHandlerRegistry registry) { String workDir = FILE_PROTOCOL + ensureSuffix(WORK_DIR, FILE_SEPARATOR); String uploadUrlPattern = ensureBoth(URL_SEPARATOR + UPLOAD_URL_PREFIX, AuthConstant.DE_API_PREFIX, URL_SEPARATOR) + "**"; - registry.addResourceHandler(uploadUrlPattern) - .addResourceLocations(workDir); + registry.addResourceHandler(uploadUrlPattern).addResourceLocations(workDir); + // map String mapDir = FILE_PROTOCOL + ensureSuffix(MAP_DIR, FILE_SEPARATOR); String mapUrlPattern = ensureBoth(MAP_URL, AuthConstant.DE_API_PREFIX, URL_SEPARATOR) + "**"; - registry.addResourceHandler(mapUrlPattern) - .addResourceLocations(mapDir); + registry.addResourceHandler(mapUrlPattern).addResourceLocations(mapDir); + + String geoDir = FILE_PROTOCOL + ensureSuffix(CUSTOM_MAP_DIR, FILE_SEPARATOR); + String geoUrlPattern = ensureBoth(GEO_URL, AuthConstant.DE_API_PREFIX, URL_SEPARATOR) + "**"; + registry.addResourceHandler(geoUrlPattern).addResourceLocations(geoDir); + } } diff --git a/core/core-backend/src/main/java/io/dataease/datasource/provider/CalciteProvider.java b/core/core-backend/src/main/java/io/dataease/datasource/provider/CalciteProvider.java index 15c1e4a30d..43438cb658 100644 --- a/core/core-backend/src/main/java/io/dataease/datasource/provider/CalciteProvider.java +++ b/core/core-backend/src/main/java/io/dataease/datasource/provider/CalciteProvider.java @@ -16,7 +16,6 @@ import io.dataease.datasource.request.DatasourceRequest; import io.dataease.datasource.server.EngineServer; import io.dataease.datasource.type.*; import io.dataease.engine.constant.SQLConstants; -import io.dataease.engine.func.scalar.ScalarFunctions; import io.dataease.exception.DEException; import io.dataease.i18n.Translator; import io.dataease.utils.BeanUtils; @@ -26,6 +25,7 @@ import io.dataease.utils.LogUtil; import jakarta.annotation.PostConstruct; import jakarta.annotation.Resource; import org.apache.calcite.adapter.jdbc.JdbcSchema; +import org.apache.calcite.func.scalar.ScalarFunctions; import org.apache.calcite.jdbc.CalciteConnection; import org.apache.calcite.schema.Schema; import org.apache.calcite.schema.SchemaPlus; diff --git a/core/core-backend/src/main/java/io/dataease/engine/func/scalar/ScalarFunctions.java b/core/core-backend/src/main/java/io/dataease/engine/func/scalar/ScalarFunctions.java deleted file mode 100644 index 684cd2baec..0000000000 --- a/core/core-backend/src/main/java/io/dataease/engine/func/scalar/ScalarFunctions.java +++ /dev/null @@ -1,194 +0,0 @@ -package io.dataease.engine.func.scalar; - -import io.dataease.engine.utils.Utils; -import org.apache.commons.lang3.ObjectUtils; -import org.apache.commons.lang3.StringUtils; - -import java.text.SimpleDateFormat; -import java.util.Date; - -public class ScalarFunctions { - public static String format = "yyyy-MM-dd HH:mm:ss"; - public static String minuteFormat = "yyyy-MM-dd HH:mm"; - public static String hourFormat = "yyyy-MM-dd HH"; - public static String dateOnly = "yyyy-MM-dd"; - public static String monthOnly = "yyyy-MM"; - public static String yearOnly = "yyyy"; - public static String timeOnly = "HH:mm:ss"; - - public static String date_format(String date, String format) { - try { - if (StringUtils.isEmpty(date)) { - return null; - } - format = get_date_format(date); - SimpleDateFormat simpleDateFormat = new SimpleDateFormat(format); - Date parse = simpleDateFormat.parse(date); - return simpleDateFormat.format(parse); - } catch (Exception e) { - return null; - } - } - - public static String de_date_format(String date, String format) { - try { - if (StringUtils.isEmpty(date)) { - return null; - } - SimpleDateFormat simpleDateFormat = new SimpleDateFormat(format); - Date parse = simpleDateFormat.parse(date); - return simpleDateFormat.format(parse); - } catch (Exception e) { - return null; - } - } - - public static String str_to_date(String date, String format) { - try { - if (StringUtils.isEmpty(date)) { - return null; - } - format = get_date_format(date); - SimpleDateFormat simpleDateFormat = new SimpleDateFormat(format); - Date parse = simpleDateFormat.parse(date); - return simpleDateFormat.format(parse); - } catch (Exception e) { - return null; - } - } - - public static String de_str_to_date(String date, String format) { - try { - if (StringUtils.isEmpty(date)) { - return null; - } - SimpleDateFormat simpleDateFormat = new SimpleDateFormat(format); - Date parse = simpleDateFormat.parse(date); - return simpleDateFormat.format(parse); - } catch (Exception e) { - return null; - } - } - - public static String cast_date_format(String date, String sourceFormat, String targetFormat) { - try { - if (StringUtils.isEmpty(date)) { - return null; - } - sourceFormat = get_date_format(date); - SimpleDateFormat simpleDateFormat = new SimpleDateFormat(sourceFormat); - Date parse = simpleDateFormat.parse(date); - - SimpleDateFormat s = new SimpleDateFormat(targetFormat); - return s.format(parse); - } catch (Exception e) { - return null; - } - } - - public static String de_cast_date_format(String date, String sourceFormat, String targetFormat) { - try { - if (StringUtils.isEmpty(date)) { - return null; - } - SimpleDateFormat simpleDateFormat = new SimpleDateFormat(sourceFormat); - Date parse = simpleDateFormat.parse(date); - - SimpleDateFormat s = new SimpleDateFormat(targetFormat); - return s.format(parse); - } catch (Exception e) { - return null; - } - } - - public static Long unix_timestamp(String date) { - try { - if (StringUtils.isEmpty(date)) { - return null; - } - return Utils.allDateFormat2Long(date); - } catch (Exception e) { - return null; - } - } - - public static String get_date_format(String date) { - // check date split '-' or '/' - String format1 = format; - String minuteFormat1 = minuteFormat; - String hourFormat1 = hourFormat; - String timeOnly1 = timeOnly; - String dateOnly1 = dateOnly; - String monthOnly1 = monthOnly; - String yearOnly1 = yearOnly; - if (date != null && date.contains("/")) { - format1 = format1.replaceAll("-", "/"); - minuteFormat1 = minuteFormat1.replaceAll("-", "/"); - hourFormat1 = hourFormat1.replaceAll("-", "/"); - timeOnly1 = timeOnly1.replaceAll("-", "/"); - dateOnly1 = dateOnly1.replaceAll("-", "/"); - monthOnly1 = monthOnly1.replaceAll("-", "/"); - yearOnly1 = yearOnly1.replaceAll("-", "/"); - } - try { - SimpleDateFormat simpleDateFormat = new SimpleDateFormat(format1); - simpleDateFormat.parse(date); - return format1; - } catch (Exception e) { - } - try { - SimpleDateFormat simpleDateFormat = new SimpleDateFormat(minuteFormat1); - simpleDateFormat.parse(date); - return minuteFormat1; - } catch (Exception e) { - } - try { - SimpleDateFormat simpleDateFormat = new SimpleDateFormat(hourFormat1); - simpleDateFormat.parse(date); - return hourFormat1; - } catch (Exception e) { - } - try { - SimpleDateFormat simpleDateFormat = new SimpleDateFormat(timeOnly1); - simpleDateFormat.parse(date); - return timeOnly1; - } catch (Exception e) { - } - try { - SimpleDateFormat simpleDateFormat = new SimpleDateFormat(dateOnly1); - simpleDateFormat.parse(date); - return dateOnly1; - } catch (Exception e) { - } - try { - SimpleDateFormat simpleDateFormat = new SimpleDateFormat(monthOnly1); - simpleDateFormat.parse(date); - return monthOnly1; - } catch (Exception e) { - } - try { - SimpleDateFormat simpleDateFormat = new SimpleDateFormat(yearOnly1); - simpleDateFormat.parse(date); - return yearOnly1; - } catch (Exception e) { - } - return format1; - } - - public static String from_unixtime(Long timestamp, String format) { - try { - if (ObjectUtils.isEmpty(timestamp)) { - return null; - } - SimpleDateFormat simpleDateFormat = new SimpleDateFormat(format); - Date date = new Date(timestamp); - return simpleDateFormat.format(date); - } catch (Exception e) { - return null; - } - } - - public static String concat(String str1, String str2) { - return str1 + str2; - } -} diff --git a/core/core-backend/src/main/java/io/dataease/engine/trans/CustomWhere2Str.java b/core/core-backend/src/main/java/io/dataease/engine/trans/CustomWhere2Str.java index 725581e5e6..642be9c3d6 100644 --- a/core/core-backend/src/main/java/io/dataease/engine/trans/CustomWhere2Str.java +++ b/core/core-backend/src/main/java/io/dataease/engine/trans/CustomWhere2Str.java @@ -6,8 +6,8 @@ import io.dataease.api.dataset.union.model.SQLMeta; import io.dataease.api.dataset.union.model.SQLObj; import io.dataease.dto.dataset.DatasetTableFieldDTO; import io.dataease.engine.constant.SQLConstants; -import io.dataease.engine.func.scalar.ScalarFunctions; import io.dataease.engine.utils.Utils; +import org.apache.calcite.func.scalar.ScalarFunctions; import org.apache.commons.lang3.ObjectUtils; import org.apache.commons.lang3.StringUtils; diff --git a/core/core-backend/src/main/java/io/dataease/engine/trans/ExtWhere2Str.java b/core/core-backend/src/main/java/io/dataease/engine/trans/ExtWhere2Str.java index 1bdcdcace1..a05deff48d 100644 --- a/core/core-backend/src/main/java/io/dataease/engine/trans/ExtWhere2Str.java +++ b/core/core-backend/src/main/java/io/dataease/engine/trans/ExtWhere2Str.java @@ -5,8 +5,8 @@ import io.dataease.api.dataset.union.model.SQLMeta; import io.dataease.api.dataset.union.model.SQLObj; import io.dataease.dto.dataset.DatasetTableFieldDTO; import io.dataease.engine.constant.SQLConstants; -import io.dataease.engine.func.scalar.ScalarFunctions; import io.dataease.engine.utils.Utils; +import org.apache.calcite.func.scalar.ScalarFunctions; import org.apache.commons.lang3.ObjectUtils; import org.apache.commons.lang3.StringUtils; @@ -55,7 +55,7 @@ public class ExtWhere2Str { if (field.getDeType() == 1) { String date_format; if (StringUtils.containsIgnoreCase(request.getOperator(), "between")) { - date_format = ScalarFunctions.format; + date_format = "yyyy-MM-dd HH:mm:ss"; } else { date_format = ScalarFunctions.get_date_format(value.get(0)); } diff --git a/core/core-backend/src/main/java/io/dataease/engine/trans/WhereTree2Str.java b/core/core-backend/src/main/java/io/dataease/engine/trans/WhereTree2Str.java index 47bd9d9f7b..5087a44a7b 100644 --- a/core/core-backend/src/main/java/io/dataease/engine/trans/WhereTree2Str.java +++ b/core/core-backend/src/main/java/io/dataease/engine/trans/WhereTree2Str.java @@ -8,8 +8,8 @@ import io.dataease.api.permissions.dataset.dto.DatasetRowPermissionsTreeObj; import io.dataease.dto.dataset.DatasetTableFieldDTO; import io.dataease.engine.constant.ExtFieldConstant; import io.dataease.engine.constant.SQLConstants; -import io.dataease.engine.func.scalar.ScalarFunctions; import io.dataease.engine.utils.Utils; +import org.apache.calcite.func.scalar.ScalarFunctions; import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.lang3.ObjectUtils; import org.apache.commons.lang3.StringUtils; diff --git a/core/core-backend/src/main/java/io/dataease/map/bo/AreaBO.java b/core/core-backend/src/main/java/io/dataease/map/bo/AreaBO.java new file mode 100644 index 0000000000..c9620c84a3 --- /dev/null +++ b/core/core-backend/src/main/java/io/dataease/map/bo/AreaBO.java @@ -0,0 +1,13 @@ +package io.dataease.map.bo; + +import io.dataease.map.dao.auto.entity.Area; +import lombok.Data; +import lombok.EqualsAndHashCode; + +import java.io.Serializable; + +@EqualsAndHashCode(callSuper = true) +@Data +public class AreaBO extends Area implements Serializable { + private boolean custom = false; +} diff --git a/core/core-backend/src/main/java/io/dataease/map/dao/ext/entity/CoreAreaCustom.java b/core/core-backend/src/main/java/io/dataease/map/dao/ext/entity/CoreAreaCustom.java new file mode 100644 index 0000000000..808d7eef37 --- /dev/null +++ b/core/core-backend/src/main/java/io/dataease/map/dao/ext/entity/CoreAreaCustom.java @@ -0,0 +1,15 @@ +package io.dataease.map.dao.ext.entity; + +import lombok.Data; + +import java.io.Serializable; + +@Data +public class CoreAreaCustom implements Serializable { + + private String id; + + private String pid; + + private String name; +} diff --git a/core/core-backend/src/main/java/io/dataease/map/dao/ext/mapper/CoreAreaCustomMapper.java b/core/core-backend/src/main/java/io/dataease/map/dao/ext/mapper/CoreAreaCustomMapper.java new file mode 100644 index 0000000000..90605915f5 --- /dev/null +++ b/core/core-backend/src/main/java/io/dataease/map/dao/ext/mapper/CoreAreaCustomMapper.java @@ -0,0 +1,9 @@ +package io.dataease.map.dao.ext.mapper; + +import com.baomidou.mybatisplus.core.mapper.BaseMapper; +import io.dataease.map.dao.ext.entity.CoreAreaCustom; +import org.apache.ibatis.annotations.Mapper; + +@Mapper +public interface CoreAreaCustomMapper extends BaseMapper { +} diff --git a/core/core-backend/src/main/java/io/dataease/map/manage/MapManage.java b/core/core-backend/src/main/java/io/dataease/map/manage/MapManage.java index 98ddcc2e26..448339da25 100644 --- a/core/core-backend/src/main/java/io/dataease/map/manage/MapManage.java +++ b/core/core-backend/src/main/java/io/dataease/map/manage/MapManage.java @@ -1,17 +1,34 @@ package io.dataease.map.manage; -import cn.hutool.core.collection.CollectionUtil; +import io.dataease.api.map.dto.GeometryNodeCreator; import io.dataease.api.map.vo.AreaNode; +import io.dataease.constant.StaticResourceConstants; +import io.dataease.exception.DEException; +import io.dataease.map.bo.AreaBO; import io.dataease.map.dao.auto.entity.Area; import io.dataease.map.dao.auto.mapper.AreaMapper; +import io.dataease.map.dao.ext.entity.CoreAreaCustom; +import io.dataease.map.dao.ext.mapper.CoreAreaCustomMapper; import io.dataease.utils.BeanUtils; +import io.dataease.utils.CommonBeanFactory; +import io.dataease.utils.LogUtil; import jakarta.annotation.Resource; +import org.apache.commons.collections4.CollectionUtils; +import org.apache.commons.lang3.ObjectUtils; +import org.apache.commons.lang3.StringUtils; +import org.springframework.cache.annotation.CacheEvict; import org.springframework.cache.annotation.Cacheable; import org.springframework.stereotype.Component; +import org.springframework.transaction.annotation.Transactional; +import org.springframework.web.multipart.MultipartFile; +import java.io.File; +import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; +import java.util.concurrent.atomic.AtomicReference; +import java.util.stream.Collectors; import static io.dataease.constant.CacheConstant.CommonCacheConstant.WORLD_MAP_CACHE; @@ -19,6 +36,8 @@ import static io.dataease.constant.CacheConstant.CommonCacheConstant.WORLD_MAP_C public class MapManage { private final static AreaNode WORLD; + private static final String GEO_PREFIX = "geo_"; + static { WORLD = AreaNode.builder() .id("000") @@ -30,13 +49,34 @@ public class MapManage { @Resource private AreaMapper areaMapper; + @Resource + private CoreAreaCustomMapper coreAreaCustomMapper; + + public List defaultArea() { + return areaMapper.selectList(null); + } + + private MapManage proxy() { + return CommonBeanFactory.getBean(MapManage.class); + } + @Cacheable(value = WORLD_MAP_CACHE, key = "'world_map'") public AreaNode getWorldTree() { - List areas = areaMapper.selectList(null); + List areas = proxy().defaultArea(); + List areaBOS = areas.stream().map(item -> BeanUtils.copyBean(new AreaBO(), item)).collect(Collectors.toList()); + List coreAreaCustoms = coreAreaCustomMapper.selectList(null); + if (CollectionUtils.isNotEmpty(coreAreaCustoms)) { + List customBoList = coreAreaCustoms.stream().map(item -> { + AreaBO areaBO = BeanUtils.copyBean(new AreaBO(), item); + areaBO.setCustom(true); + return areaBO; + }).toList(); + areaBOS.addAll(customBoList); + } WORLD.setChildren(new ArrayList<>()); var areaNodeMap = new HashMap(); areaNodeMap.put(WORLD.getId(), WORLD); - areas.forEach(area -> { + areaBOS.forEach(area -> { var node = areaNodeMap.get(area.getId()); if (node == null) { node = AreaNode.builder().build(); @@ -64,5 +104,80 @@ public class MapManage { return WORLD; } + @CacheEvict(cacheNames = WORLD_MAP_CACHE, key = "'world_map'") + @Transactional + public void saveMapGeo(GeometryNodeCreator request, MultipartFile file) { + List areas = proxy().defaultArea(); + String code = getBusiGeoCode(request.getCode()); + + AtomicReference atomicReference = new AtomicReference<>(); + if (areas.stream().anyMatch(area -> { + boolean exist = area.getId().equals(code); + if (exist) { + atomicReference.set(area.getName()); + } + return exist; + })) { + DEException.throwException(String.format("Area code [%s] is already exists for [%s]", code, atomicReference.get())); + } + + CoreAreaCustom originData = null; + if (ObjectUtils.isNotEmpty(originData = coreAreaCustomMapper.selectById(getDaoGeoCode(code)))) { + DEException.throwException(String.format("Area code [%s] is already exists for [%s]", code, originData.getName())); + } + + CoreAreaCustom coreAreaCustom = new CoreAreaCustom(); + coreAreaCustom.setId(getDaoGeoCode(code)); + coreAreaCustom.setPid(request.getPid()); + coreAreaCustom.setName(request.getName()); + coreAreaCustomMapper.insert(coreAreaCustom); + + File geoFile = buildGeoFile(code); + try { + file.transferTo(geoFile); + } catch (IOException e) { + LogUtil.error(e.getMessage()); + DEException.throwException(e); + } + } + + @CacheEvict(cacheNames = WORLD_MAP_CACHE, key = "'world_map'") + @Transactional + public void deleteGeo(String code) { + if (!StringUtils.startsWith(code, GEO_PREFIX)) { + DEException.throwException("内置Geometry,禁止删除"); + } + coreAreaCustomMapper.deleteById(code); + File file = buildGeoFile(code); + if (file.exists()) { + file.delete(); + } + } + + private String getDaoGeoCode(String code) { + return StringUtils.startsWith(code, GEO_PREFIX) ? code : (GEO_PREFIX + code); + } + + private String getBusiGeoCode(String code) { + return StringUtils.startsWith(code, GEO_PREFIX) ? code.substring(GEO_PREFIX.length()) : code; + } + + private File buildGeoFile(String code) { + String id = getBusiGeoCode(code); + String customMapDir = StaticResourceConstants.CUSTOM_MAP_DIR; + String countryCode = countryCode(id); + String fileDirPath = customMapDir + "/" + countryCode + "/"; + File dir = new File(fileDirPath); + if (!dir.exists()) { + dir.mkdirs(); + } + String filePath = fileDirPath + id + ".json"; + return new File(filePath); + } + + private String countryCode(String code) { + return code.substring(0, 3); + } + } diff --git a/core/core-backend/src/main/java/io/dataease/map/server/GeoServer.java b/core/core-backend/src/main/java/io/dataease/map/server/GeoServer.java new file mode 100644 index 0000000000..4d4c0ebbe3 --- /dev/null +++ b/core/core-backend/src/main/java/io/dataease/map/server/GeoServer.java @@ -0,0 +1,26 @@ +package io.dataease.map.server; + +import io.dataease.api.map.GeoApi; +import io.dataease.api.map.dto.GeometryNodeCreator; +import io.dataease.map.manage.MapManage; +import jakarta.annotation.Resource; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RestController; +import org.springframework.web.multipart.MultipartFile; + +@RestController +@RequestMapping("/geometry") +public class GeoServer implements GeoApi { + + @Resource + private MapManage mapManage; + @Override + public void saveMapGeo(GeometryNodeCreator request, MultipartFile file) { + mapManage.saveMapGeo(request, file); + } + + @Override + public void deleteGeo(String id) { + mapManage.deleteGeo(id); + } +} diff --git a/core/core-backend/src/main/java/io/dataease/menu/manage/MenuManage.java b/core/core-backend/src/main/java/io/dataease/menu/manage/MenuManage.java index f1a88383ef..87f7a08b88 100644 --- a/core/core-backend/src/main/java/io/dataease/menu/manage/MenuManage.java +++ b/core/core-backend/src/main/java/io/dataease/menu/manage/MenuManage.java @@ -95,6 +95,7 @@ public class MenuManage { || coreMenu.getId().equals(18L) || coreMenu.getId().equals(21L) || coreMenu.getPid().equals(21L) - || coreMenu.getId().equals(25L); + || coreMenu.getId().equals(25L) + || coreMenu.getId().equals(26L); } } diff --git a/core/core-backend/src/main/java/io/dataease/system/dao/ext/mapper/ExtCoreSysSettingMapper.java b/core/core-backend/src/main/java/io/dataease/system/dao/ext/mapper/ExtCoreSysSettingMapper.java new file mode 100644 index 0000000000..cff68a42b5 --- /dev/null +++ b/core/core-backend/src/main/java/io/dataease/system/dao/ext/mapper/ExtCoreSysSettingMapper.java @@ -0,0 +1,10 @@ +package io.dataease.system.dao.ext.mapper; + +import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl; +import io.dataease.system.dao.auto.entity.CoreSysSetting; +import io.dataease.system.dao.auto.mapper.CoreSysSettingMapper; +import org.springframework.stereotype.Component; + +@Component("extCoreSysSettingMapper") +public class ExtCoreSysSettingMapper extends ServiceImpl { +} diff --git a/core/core-backend/src/main/java/io/dataease/system/manage/SysParameterManage.java b/core/core-backend/src/main/java/io/dataease/system/manage/SysParameterManage.java index 29198b5241..4f9d477557 100644 --- a/core/core-backend/src/main/java/io/dataease/system/manage/SysParameterManage.java +++ b/core/core-backend/src/main/java/io/dataease/system/manage/SysParameterManage.java @@ -1,15 +1,22 @@ package io.dataease.system.manage; import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper; +import io.dataease.api.system.vo.SettingItemVO; +import io.dataease.license.config.XpackInteract; import io.dataease.system.dao.auto.entity.CoreSysSetting; import io.dataease.system.dao.auto.mapper.CoreSysSettingMapper; +import io.dataease.system.dao.ext.mapper.ExtCoreSysSettingMapper; +import io.dataease.utils.BeanUtils; +import io.dataease.utils.CommonBeanFactory; import io.dataease.utils.IDUtils; +import io.dataease.utils.SystemSettingUtils; import jakarta.annotation.Resource; import org.apache.commons.lang3.ObjectUtils; import org.springframework.stereotype.Component; +import org.springframework.transaction.annotation.Transactional; import org.springframework.util.CollectionUtils; -import java.util.Collection; +import java.util.Comparator; import java.util.List; import java.util.Map; import java.util.stream.Collectors; @@ -22,6 +29,9 @@ public class SysParameterManage { @Resource private CoreSysSettingMapper coreSysSettingMapper; + @Resource + private ExtCoreSysSettingMapper extCoreSysSettingMapper; + public String singleVal(String key) { QueryWrapper queryWrapper = new QueryWrapper<>(); queryWrapper.eq("pkey", key); @@ -53,22 +63,54 @@ public class SysParameterManage { sysSetting.setPval(val); coreSysSettingMapper.updateById(sysSetting); } - void save(List boList) { - List all = all(); - } - private List all() { - QueryWrapper queryWrapper = new QueryWrapper<>(); - return coreSysSettingMapper.selectList(queryWrapper); - } - public Map groupVal(String groupKey) { + public Map groupVal(String groupKey) { QueryWrapper queryWrapper = new QueryWrapper<>(); - queryWrapper.like("pkey", groupKey); + queryWrapper.likeRight("pkey", groupKey); + queryWrapper.orderByAsc("sort"); List sysSettings = coreSysSettingMapper.selectList(queryWrapper); if (!CollectionUtils.isEmpty(sysSettings)) { return sysSettings.stream().collect(Collectors.toMap(CoreSysSetting::getPkey, CoreSysSetting::getPval)); } return null; } + + public List groupList(String groupKey) { + QueryWrapper queryWrapper = new QueryWrapper<>(); + queryWrapper.likeRight("pkey", groupKey); + queryWrapper.orderByAsc("sort"); + return coreSysSettingMapper.selectList(queryWrapper); + } + + @XpackInteract(value = "perSetting") + public List convert(List sysSettings) { + return sysSettings.stream().sorted(Comparator.comparing(CoreSysSetting::getSort)).map(item -> BeanUtils.copyBean(new SettingItemVO(), item)).toList(); + } + + + @Transactional + public void saveGroup(List vos, String groupKey) { + QueryWrapper queryWrapper = new QueryWrapper<>(); + queryWrapper.likeRight("pkey", groupKey); + coreSysSettingMapper.delete(queryWrapper); + List sysSettings = vos.stream().filter(vo -> !SystemSettingUtils.xpackSetting(vo.getPkey())).map(item -> { + CoreSysSetting sysSetting = BeanUtils.copyBean(new CoreSysSetting(), item); + sysSetting.setId(IDUtils.snowID()); + return sysSetting; + }).collect(Collectors.toList()); + extCoreSysSettingMapper.saveBatch(sysSettings); + } + + + @XpackInteract(value = "perSetting", before = false) + @Transactional + public void saveBasic(List vos) { + String key = "basic."; + proxy().saveGroup(vos, key); + } + + private SysParameterManage proxy() { + return CommonBeanFactory.getBean(SysParameterManage.class); + } } diff --git a/core/core-backend/src/main/java/io/dataease/system/server/SysParameterServer.java b/core/core-backend/src/main/java/io/dataease/system/server/SysParameterServer.java index 937d57c67b..eb33745da1 100644 --- a/core/core-backend/src/main/java/io/dataease/system/server/SysParameterServer.java +++ b/core/core-backend/src/main/java/io/dataease/system/server/SysParameterServer.java @@ -2,12 +2,16 @@ package io.dataease.system.server; import io.dataease.api.system.SysParameterApi; import io.dataease.api.system.request.OnlineMapEditor; +import io.dataease.api.system.vo.SettingItemVO; +import io.dataease.system.dao.auto.entity.CoreSysSetting; import io.dataease.system.manage.SysParameterManage; import jakarta.annotation.Resource; import org.apache.commons.lang3.StringUtils; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RestController; +import java.util.List; + @RestController @RequestMapping("/sysParameter") public class SysParameterServer implements SysParameterApi { @@ -29,4 +33,16 @@ public class SysParameterServer implements SysParameterApi { String key = sysParameterManage.queryOnlineMap(); return StringUtils.isNotBlank(key) ? key : ""; } + + @Override + public List queryBasicSetting() { + String key = "basic."; + List coreSysSettings = sysParameterManage.groupList(key); + return sysParameterManage.convert(coreSysSettings); + } + + @Override + public void saveBasicSetting(List settingItemVOS) { + sysParameterManage.saveBasic(settingItemVOS); + } } diff --git a/core/core-backend/src/main/java/org/apache/calcite/sql/SqlDialect.java b/core/core-backend/src/main/java/org/apache/calcite/sql/SqlDialect.java deleted file mode 100644 index 689e05dd6c..0000000000 --- a/core/core-backend/src/main/java/org/apache/calcite/sql/SqlDialect.java +++ /dev/null @@ -1,1811 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to you under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.calcite.sql; - -import com.google.common.base.Preconditions; -import com.google.common.base.Suppliers; -import com.google.common.collect.ImmutableSet; -import org.apache.calcite.avatica.util.Casing; -import org.apache.calcite.avatica.util.Quoting; -import org.apache.calcite.avatica.util.TimeUnit; -import org.apache.calcite.config.CharLiteralStyle; -import org.apache.calcite.config.NullCollation; -import org.apache.calcite.linq4j.function.Experimental; -import org.apache.calcite.rel.RelFieldCollation; -import org.apache.calcite.rel.core.JoinRelType; -import org.apache.calcite.rel.type.RelDataType; -import org.apache.calcite.rel.type.RelDataTypeSystem; -import org.apache.calcite.rel.type.RelDataTypeSystemImpl; -import org.apache.calcite.rex.RexCall; -import org.apache.calcite.rex.RexNode; -import org.apache.calcite.sql.dialect.JethroDataSqlDialect; -import org.apache.calcite.sql.fun.SqlInternalOperators; -import org.apache.calcite.sql.fun.SqlStdOperatorTable; -import org.apache.calcite.sql.parser.SqlParser; -import org.apache.calcite.sql.parser.SqlParserPos; -import org.apache.calcite.sql.type.AbstractSqlType; -import org.apache.calcite.sql.type.SqlTypeUtil; -import org.apache.calcite.sql.validate.SqlConformance; -import org.apache.calcite.sql.validate.SqlConformanceEnum; -import org.apache.calcite.util.format.FormatModel; -import org.apache.calcite.util.format.FormatModels; -import org.checkerframework.checker.nullness.qual.Nullable; -import org.checkerframework.dataflow.qual.Pure; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.sql.DatabaseMetaData; -import java.sql.ResultSet; -import java.sql.Timestamp; -import java.text.SimpleDateFormat; -import java.util.List; -import java.util.Locale; -import java.util.Set; -import java.util.function.Supplier; - -import static java.util.Objects.requireNonNull; -import static org.apache.calcite.util.DateTimeStringUtils.getDateFormatter; - -/** - * SqlDialect encapsulates the differences between dialects of SQL. - * - *

It is used by classes such as {@link SqlWriter} and - * {@link org.apache.calcite.sql.util.SqlBuilder}. - * - *

To add a new {@link SqlDialect} sub-class, extends this class to hold 2 public final - * static member: - *

    - *
  • DEFAULT_CONTEXT: a default {@link Context} instance, which can be used to customize - * or extending the dialect if the DEFAULT instance does not meet the requests
  • - *
  • DEFAULT: the default {@link SqlDialect} instance with context properties defined with - * DEFAULT_CONTEXT
  • - *
- */ -public class SqlDialect { - //~ Static fields/initializers --------------------------------------------- - - protected static final Logger LOGGER = - LoggerFactory.getLogger(SqlDialect.class); - - /** - * Empty context. - */ - public static final Context EMPTY_CONTEXT = emptyContext(); - - /** - * Built-in scalar functions and operators common for every dialect. - */ - protected static final Set BUILT_IN_OPERATORS_LIST = - ImmutableSet.builder() - .add(SqlStdOperatorTable.ABS) - .add(SqlStdOperatorTable.ACOS) - .add(SqlStdOperatorTable.AND) - .add(SqlStdOperatorTable.ASIN) - .add(SqlStdOperatorTable.BETWEEN) - .add(SqlStdOperatorTable.CASE) - .add(SqlStdOperatorTable.CAST) - .add(SqlStdOperatorTable.CEIL) - .add(SqlStdOperatorTable.CHAR_LENGTH) - .add(SqlStdOperatorTable.CHARACTER_LENGTH) - .add(SqlStdOperatorTable.COALESCE) - .add(SqlStdOperatorTable.CONCAT) - .add(SqlStdOperatorTable.CBRT) - .add(SqlStdOperatorTable.COS) - .add(SqlStdOperatorTable.COT) - .add(SqlStdOperatorTable.DIVIDE) - .add(SqlStdOperatorTable.EQUALS) - .add(SqlStdOperatorTable.FLOOR) - .add(SqlStdOperatorTable.GREATER_THAN) - .add(SqlStdOperatorTable.GREATER_THAN_OR_EQUAL) - .add(SqlStdOperatorTable.IN) - .add(SqlStdOperatorTable.IS_NOT_NULL) - .add(SqlStdOperatorTable.IS_NULL) - .add(SqlStdOperatorTable.LESS_THAN) - .add(SqlStdOperatorTable.LESS_THAN_OR_EQUAL) - .add(SqlStdOperatorTable.LIKE) - .add(SqlStdOperatorTable.LN) - .add(SqlStdOperatorTable.LOG10) - .add(SqlStdOperatorTable.MINUS) - .add(SqlStdOperatorTable.MOD) - .add(SqlStdOperatorTable.MULTIPLY) - .add(SqlStdOperatorTable.NOT) - .add(SqlStdOperatorTable.NOT_BETWEEN) - .add(SqlStdOperatorTable.NOT_EQUALS) - .add(SqlStdOperatorTable.NOT_IN) - .add(SqlStdOperatorTable.NOT_LIKE) - .add(SqlStdOperatorTable.OR) - .add(SqlStdOperatorTable.PI) - .add(SqlStdOperatorTable.PLUS) - .add(SqlStdOperatorTable.POWER) - .add(SqlStdOperatorTable.RAND) - .add(SqlStdOperatorTable.ROUND) - .add(SqlStdOperatorTable.ROW) - .add(SqlStdOperatorTable.SIN) - .add(SqlStdOperatorTable.SQRT) - .add(SqlStdOperatorTable.SUBSTRING) - .add(SqlStdOperatorTable.TAN) - .build(); - - - //~ Instance fields -------------------------------------------------------- - - protected final @Nullable String identifierQuoteString; - protected final @Nullable String identifierEndQuoteString; - protected final @Nullable String identifierEscapedQuote; - protected final String literalQuoteString; - protected final String literalEndQuoteString; - protected final String literalEscapedQuote; - private final DatabaseProduct databaseProduct; - protected final NullCollation nullCollation; - private final RelDataTypeSystem dataTypeSystem; - private final Casing unquotedCasing; - private final Casing quotedCasing; - private final boolean caseSensitive; - - //~ Constructors ----------------------------------------------------------- - - /** - * Creates a SqlDialect from a DatabaseMetaData. - * - *

Does not maintain a reference to the DatabaseMetaData -- or, more - * importantly, to its {@link java.sql.Connection} -- after this call has - * returned. - * - * @param databaseMetaData used to determine which dialect of SQL to generate - * @deprecated Replaced by {@link SqlDialectFactory} - */ - @Deprecated // to be removed before 2.0 - public static SqlDialect create(DatabaseMetaData databaseMetaData) { - return new SqlDialectFactoryImpl().create(databaseMetaData); - } - - @Deprecated // to be removed before 2.0 - public SqlDialect(DatabaseProduct databaseProduct, String databaseProductName, - String identifierQuoteString) { - this(EMPTY_CONTEXT - .withDatabaseProduct(databaseProduct) - .withDatabaseProductName(databaseProductName) - .withIdentifierQuoteString(identifierQuoteString)); - } - - /** - * Creates a SqlDialect. - * - * @param databaseProduct Database product; may be UNKNOWN, never null - * @param databaseProductName Database product name from JDBC driver - * @param identifierQuoteString String to quote identifiers. Null if quoting - * is not supported. If "[", close quote is - * deemed to be "]". - * @param nullCollation Whether NULL values appear first or last - * @deprecated Use {@link #SqlDialect(Context)} - */ - @Deprecated // to be removed before 2.0 - public SqlDialect(DatabaseProduct databaseProduct, String databaseProductName, - String identifierQuoteString, NullCollation nullCollation) { - this(EMPTY_CONTEXT - .withDatabaseProduct(databaseProduct) - .withDatabaseProductName(databaseProductName) - .withIdentifierQuoteString(identifierQuoteString) - .withNullCollation(nullCollation)); - } - - /** - * Creates a SqlDialect. - * - * @param context All the information necessary to create a dialect - */ - public SqlDialect(Context context) { - this.nullCollation = requireNonNull(context.nullCollation()); - this.dataTypeSystem = requireNonNull(context.dataTypeSystem()); - this.databaseProduct = requireNonNull(context.databaseProduct()); - this.literalQuoteString = requireNonNull(context.literalQuoteString()); - this.literalEndQuoteString = requireNonNull(context.literalQuoteString()); - this.literalEscapedQuote = - requireNonNull(context.literalEscapedQuoteString()); - String identifierQuoteString = context.identifierQuoteString(); - if (identifierQuoteString != null) { - identifierQuoteString = identifierQuoteString.trim(); - if (identifierQuoteString.equals("")) { - identifierQuoteString = null; - } - } - this.identifierQuoteString = identifierQuoteString; - this.identifierEndQuoteString = - identifierQuoteString == null ? null - : identifierQuoteString.equals("[") ? "]" - : identifierQuoteString; - this.identifierEscapedQuote = - context.identifierEscapedQuoteString() == null - ? identifierQuoteString == null - ? null - : this.identifierEndQuoteString + this.identifierEndQuoteString - : context.identifierEscapedQuoteString(); - this.unquotedCasing = requireNonNull(context.unquotedCasing()); - this.quotedCasing = requireNonNull(context.quotedCasing()); - this.caseSensitive = context.caseSensitive(); - } - - //~ Methods ---------------------------------------------------------------- - - /** - * Creates an empty context. Use {@link #EMPTY_CONTEXT} to reference the instance. - */ - private static Context emptyContext() { - return new ContextImpl(DatabaseProduct.UNKNOWN, null, null, -1, -1, - "'", "''", null, null, - Casing.UNCHANGED, Casing.TO_UPPER, true, SqlConformanceEnum.DEFAULT, - NullCollation.HIGH, RelDataTypeSystemImpl.DEFAULT, - JethroDataSqlDialect.JethroInfo.EMPTY); - } - - /** - * Converts a product name and version (per the JDBC driver) into a product - * enumeration. - * - * @param productName Product name - * @param productVersion Product version - * @return database product - */ - @Deprecated // to be removed before 2.0 - public static DatabaseProduct getProduct( - String productName, - String productVersion) { - final String upperProductName = - productName.toUpperCase(Locale.ROOT).trim(); - switch (upperProductName) { - case "ACCESS": - return DatabaseProduct.ACCESS; - case "APACHE DERBY": - return DatabaseProduct.DERBY; - case "CLICKHOUSE": - return DatabaseProduct.CLICKHOUSE; - case "DBMS:CLOUDSCAPE": - return DatabaseProduct.DERBY; - case "EXASOL": - return DatabaseProduct.EXASOL; - case "FIREBOLT": - return DatabaseProduct.FIREBOLT; - case "HIVE": - return DatabaseProduct.HIVE; - case "INGRES": - return DatabaseProduct.INGRES; - case "INTERBASE": - return DatabaseProduct.INTERBASE; - case "LUCIDDB": - return DatabaseProduct.LUCIDDB; - case "ORACLE": - return DatabaseProduct.ORACLE; - case "PHOENIX": - return DatabaseProduct.PHOENIX; - case "PRESTO": - case "AWS.ATHENA": - return DatabaseProduct.PRESTO; - case "MYSQL (INFOBRIGHT)": - return DatabaseProduct.INFOBRIGHT; - case "MYSQL": - return DatabaseProduct.MYSQL; - case "REDSHIFT": - return DatabaseProduct.REDSHIFT; - default: - break; - } - // Now the fuzzy matches. - if (productName.startsWith("DB2")) { - return DatabaseProduct.DB2; - } else if (upperProductName.contains("FIREBIRD")) { - return DatabaseProduct.FIREBIRD; - } else if (productName.startsWith("Informix")) { - return DatabaseProduct.INFORMIX; - } else if (upperProductName.contains("NETEZZA")) { - return DatabaseProduct.NETEZZA; - } else if (upperProductName.contains("PARACCEL")) { - return DatabaseProduct.PARACCEL; - } else if (productName.startsWith("HP Neoview")) { - return DatabaseProduct.NEOVIEW; - } else if (upperProductName.contains("POSTGRE")) { - return DatabaseProduct.POSTGRESQL; - } else if (upperProductName.contains("SQL SERVER")) { - return DatabaseProduct.MSSQL; - } else if (upperProductName.contains("SYBASE")) { - return DatabaseProduct.SYBASE; - } else if (upperProductName.contains("TERADATA")) { - return DatabaseProduct.TERADATA; - } else if (upperProductName.contains("HSQL")) { - return DatabaseProduct.HSQLDB; - } else if (upperProductName.contains("H2")) { - return DatabaseProduct.H2; - } else if (upperProductName.contains("VERTICA")) { - return DatabaseProduct.VERTICA; - } else if (upperProductName.contains("GOOGLE BIGQUERY") - || upperProductName.contains("GOOGLE BIG QUERY")) { - return DatabaseProduct.BIG_QUERY; - } else { - return DatabaseProduct.UNKNOWN; - } - } - - /** - * Returns the type system implementation for this dialect. - */ - public RelDataTypeSystem getTypeSystem() { - return dataTypeSystem; - } - - /** - * Encloses an identifier in quotation marks appropriate for the current SQL - * dialect. - * - *

For example, quoteIdentifier("emp") yields a string - * containing "emp" in Oracle, and a string containing - * [emp] in Access. - * - * @param val Identifier to quote - * @return Quoted identifier - */ - public String quoteIdentifier(String val) { - return quoteIdentifier(new StringBuilder(), val).toString(); - } - - /** - * Encloses an identifier in quotation marks appropriate for the current SQL - * dialect, writing the result to a {@link StringBuilder}. - * - *

For example, quoteIdentifier("emp") yields a string - * containing "emp" in Oracle, and a string containing - * [emp] in Access. - * - * @param buf Buffer - * @param val Identifier to quote - * @return The buffer - */ - public StringBuilder quoteIdentifier( - StringBuilder buf, - String val) { - if (identifierQuoteString == null // quoting is not supported - || identifierEndQuoteString == null - || identifierEscapedQuote == null - || !identifierNeedsQuote(val)) { - buf.append(val); - } else { - buf.append(identifierQuoteString); - buf.append(val.replace(identifierEndQuoteString, identifierEscapedQuote)); - buf.append(identifierEndQuoteString); - } - return buf; - } - - /** - * Quotes a multi-part identifier. - * - * @param buf Buffer - * @param identifiers List of parts of the identifier to quote - * @return The buffer - */ - public StringBuilder quoteIdentifier( - StringBuilder buf, - List identifiers) { - int i = 0; - for (String identifier : identifiers) { - if (i++ > 0) { - buf.append('.'); - } - quoteIdentifier(buf, identifier); - } - return buf; - } - - /** - * Returns whether to quote an identifier. - * By default, all identifiers are quoted. - */ - protected boolean identifierNeedsQuote(String val) { - return true; - } - - /** - * Converts a string into a string literal. - * - *

For example, {@code "can't run"} becomes {@code "'can''t run'"}. - */ - public final String quoteStringLiteral(String val) { - final StringBuilder buf = new StringBuilder(); - quoteStringLiteral(buf, null, val); - return buf.toString(); - } - - /** - * Appends a string literal to a buffer. - * - * @param buf Buffer - * @param charsetName Character set name, e.g. "utf16", or null - * @param val String value - */ - public void quoteStringLiteral(StringBuilder buf, @Nullable String charsetName, - String val) { - buf.append(literalQuoteString); - buf.append(val.replace(literalEndQuoteString, literalEscapedQuote)); - buf.append(literalEndQuoteString); - } - - public void unparseCall(SqlWriter writer, SqlCall call, int leftPrec, - int rightPrec) { - SqlOperator operator = call.getOperator(); - switch (call.getKind()) { - case ROW: - // Remove the ROW keyword if the dialect does not allow that. - if (!getConformance().allowExplicitRowValueConstructor()) { - if (writer.isAlwaysUseParentheses()) { - // If writer always uses parentheses, it will have started parentheses - // that we now regret. Use a special variant of the operator that does - // not print parentheses, so that we can use the ones already started. - operator = SqlInternalOperators.ANONYMOUS_ROW_NO_PARENTHESES; - } else { - // Use an operator that prints "(a, b, c)" rather than - // "ROW (a, b, c)". - operator = SqlInternalOperators.ANONYMOUS_ROW; - } - } - // fall through - default: - operator.unparse(writer, call, leftPrec, rightPrec); - } - } - - public void unparseDateTimeLiteral(SqlWriter writer, - SqlAbstractDateTimeLiteral literal, int leftPrec, int rightPrec) { - writer.literal(literal.toString()); - } - - public void unparseSqlDatetimeArithmetic(SqlWriter writer, - SqlCall call, SqlKind sqlKind, int leftPrec, int rightPrec) { - final SqlWriter.Frame frame = writer.startList("(", ")"); - call.operand(0).unparse(writer, leftPrec, rightPrec); - writer.sep((SqlKind.PLUS == sqlKind) ? "+" : "-"); - call.operand(1).unparse(writer, leftPrec, rightPrec); - writer.endList(frame); - // Only two parameters are present normally. - // Checking parameter count to prevent errors. - if (call.getOperandList().size() > 2) { - call.operand(2).unparse(writer, leftPrec, rightPrec); - } - } - - /** - * Converts an interval qualifier to a SQL string. The default implementation - * returns strings such as - * INTERVAL '1 2:3:4' DAY(4) TO SECOND(4). - */ - public void unparseSqlIntervalQualifier(SqlWriter writer, - SqlIntervalQualifier qualifier, RelDataTypeSystem typeSystem) { - final String start = qualifier.timeUnitRange.startUnit.name(); - final int fractionalSecondPrecision = - qualifier.getFractionalSecondPrecision(typeSystem); - final int startPrecision = qualifier.getStartPrecision(typeSystem); - if (qualifier.timeUnitRange.startUnit == TimeUnit.SECOND) { - if (!qualifier.useDefaultFractionalSecondPrecision()) { - final SqlWriter.Frame frame = writer.startFunCall(start); - writer.print(startPrecision); - writer.sep(",", true); - writer.print(qualifier.getFractionalSecondPrecision(typeSystem)); - writer.endList(frame); - } else if (!qualifier.useDefaultStartPrecision()) { - final SqlWriter.Frame frame = writer.startFunCall(start); - writer.print(startPrecision); - writer.endList(frame); - } else { - writer.keyword(start); - } - } else { - if (!qualifier.useDefaultStartPrecision()) { - final SqlWriter.Frame frame = writer.startFunCall(start); - writer.print(startPrecision); - writer.endList(frame); - } else { - writer.keyword(start); - } - - if (null != qualifier.timeUnitRange.endUnit) { - writer.keyword("TO"); - final String end = qualifier.timeUnitRange.endUnit.name(); - if ((TimeUnit.SECOND == qualifier.timeUnitRange.endUnit) - && !qualifier.useDefaultFractionalSecondPrecision()) { - final SqlWriter.Frame frame = writer.startFunCall(end); - writer.print(fractionalSecondPrecision); - writer.endList(frame); - } else { - writer.keyword(end); - } - } - } - } - - /** - * Converts an interval literal to a SQL string. The default implementation - * returns strings such as - * INTERVAL '1 2:3:4' DAY(4) TO SECOND(4). - */ - public void unparseSqlIntervalLiteral(SqlWriter writer, - SqlIntervalLiteral literal, int leftPrec, int rightPrec) { - SqlIntervalLiteral.IntervalValue interval = - literal.getValueAs(SqlIntervalLiteral.IntervalValue.class); - writer.keyword("INTERVAL"); - if (interval.getSign() == -1) { - writer.print("-"); - } - writer.literal("'" + interval.getIntervalLiteral() + "'"); - unparseSqlIntervalQualifier(writer, interval.getIntervalQualifier(), - RelDataTypeSystem.DEFAULT); - } - - /** - * Converts table scan hints. The default implementation suppresses all hints. - */ - public void unparseTableScanHints(SqlWriter writer, - SqlNodeList hints, int leftPrec, int rightPrec) { - } - - /** - * Returns whether the string contains any characters outside the - * comfortable 7-bit ASCII range (32 through 127, plus linefeed (10) and - * carriage return (13)). - * - *

Such characters can be used unquoted in SQL character literals. - * - * @param s String - * @return Whether string contains any non-7-bit-ASCII characters - */ - protected static boolean containsNonAscii(String s) { - for (int i = 0; i < s.length(); i++) { - char c = s.charAt(i); - if (c < 32 && c != 10 && c != 13 || c >= 128) { - return true; - } - } - return false; - } - - /** - * Converts a string into a unicode string literal. For example, - * can't{tab}run\ becomes u'can''t\0009run\\'. - */ - public void quoteStringLiteralUnicode(StringBuilder buf, String val) { - buf.append("u&'"); - for (int i = 0; i < val.length(); i++) { - char c = val.charAt(i); - if (c < 32 || c >= 128) { - buf.append('\\'); - buf.append(HEXITS[(c >> 12) & 0xf]); - buf.append(HEXITS[(c >> 8) & 0xf]); - buf.append(HEXITS[(c >> 4) & 0xf]); - buf.append(HEXITS[c & 0xf]); - } else if (c == '\'' || c == '\\') { - buf.append(c); - buf.append(c); - } else { - buf.append(c); - } - } - buf.append("'"); - } - - private static final char[] HEXITS = { - '0', '1', '2', '3', '4', '5', '6', '7', - '8', '9', 'a', 'b', 'c', 'd', 'e', 'f', - }; - - /** - * Converts a string literal back into a string. For example, 'can''t - * run' becomes can't run. - */ - public @Nullable String unquoteStringLiteral(@Nullable String val) { - if (val != null - && val.startsWith(literalQuoteString) - && val.endsWith(literalEndQuoteString)) { - final String stripped = - val.substring(literalQuoteString.length(), - val.length() - literalEndQuoteString.length()); - return stripped.replace(literalEscapedQuote, literalEndQuoteString); - } - return val; - } - - protected boolean allowsAs() { - return true; - } - - // -- behaviors -- - - /** - * Whether a sub-query in the FROM clause must have an alias. - * - *

For example, in PostgreSQL, this query is legal: - * - *

{@code SELECT * FROM (SELECT * FROM Emp) As e}
- * - *

but remove the alias {@code e} and it is not: - * - *

{@code SELECT * FROM (SELECT * FROM Emp)}
- * - *

In Oracle, both queries are legal. - */ - public boolean requiresAliasForFromItems() { - return false; - } - - /** - * Returns whether a qualified table in the FROM clause has an implicit alias - * which consists of just the table name. - * - *

For example, in {@link DatabaseProduct#ORACLE} - * - *

SELECT * FROM sales.emp
- * - *

is equivalent to - * - *

SELECT * FROM sales.emp AS emp
- * - *

and therefore - * - *

SELECT emp.empno FROM sales.emp
- * - *

is valid. But {@link DatabaseProduct#DB2} does not have an implicit - * alias, so the previous query it not valid; you need to write - * - *

SELECT sales.emp.empno FROM sales.emp
- * - *

Returns true for all databases except DB2. - */ - public boolean hasImplicitTableAlias() { - return true; - } - - /** - * Converts a timestamp to a SQL timestamp literal, e.g. - * {@code TIMESTAMP '2009-12-17 12:34:56'}. - * - *

Timestamp values do not have a time zone. We therefore interpret them - * as the number of milliseconds after the UTC epoch, and the formatted - * value is that time in UTC. - * - *

In particular, - * - *

quoteTimestampLiteral(new Timestamp(0)); - *
- * - *

returns {@code TIMESTAMP '1970-01-01 00:00:00'}, regardless of the JVM's - * time zone. - * - * @param timestamp Timestamp - * @return SQL timestamp literal - */ - public String quoteTimestampLiteral(Timestamp timestamp) { - final SimpleDateFormat format = getDateFormatter("'TIMESTAMP' ''yyyy-MM-dd HH:mm:ss''"); - return format.format(timestamp); - } - - /** - * Returns the database this dialect belongs to, - * {@link SqlDialect.DatabaseProduct#UNKNOWN} if not known, never null. - * - *

Please be judicious in how you use this method. If you wish to determine - * whether a dialect has a particular capability or behavior, it is usually - * better to add a method to SqlDialect and override that method in particular - * sub-classes of SqlDialect. - * - * @return Database product - * @deprecated To be removed without replacement - */ - @Deprecated // to be removed before 2.0 - public DatabaseProduct getDatabaseProduct() { - return databaseProduct; - } - - /** - * Returns whether the dialect supports character set names as part of a - * data type, for instance {@code VARCHAR(30) CHARACTER SET `ISO-8859-1`}. - */ - @Pure - public boolean supportsCharSet() { - return true; - } - - /** - * Returns whether the dialect supports GROUP BY literals. - * - *

For instance, in {@link DatabaseProduct#REDSHIFT}, the following queries - * are illegal: - * - *

{@code
-     * select avg(salary)
-     * from emp
-     * group by true
-     *
-     * select avg(salary)
-     * from emp
-     * group by 'a', DATE '2022-01-01'
-     * }
- */ - public boolean supportsGroupByLiteral() { - return true; - } - - public boolean supportsAggregateFunction(SqlKind kind) { - switch (kind) { - case COUNT: - case SUM: - case SUM0: - case MIN: - case MAX: - return true; - default: - break; - } - return false; - } - - /** - * Returns whether this dialect supports APPROX_COUNT_DISTINCT functions. - */ - public boolean supportsApproxCountDistinct() { - return false; - } - - /** - * Returns whether this dialect supports the use of FILTER clauses for - * aggregate functions. e.g. {@code COUNT(*) FILTER (WHERE a = 2)}. - */ - public boolean supportsAggregateFunctionFilter() { - return true; - } - - /** - * Returns whether this dialect supports window functions (OVER clause). - */ - public boolean supportsWindowFunctions() { - return true; - } - - /** - * Returns whether this dialect supports a given function or operator. - * It only applies to built-in scalar functions and operators, since - * user-defined functions and procedures should be read by JdbcSchema. - */ - public boolean supportsFunction(SqlOperator operator, RelDataType type, - List paramTypes) { - switch (operator.kind) { - case AND: - case BETWEEN: - case CASE: - case CAST: - case CEIL: - case COALESCE: - case DIVIDE: - case EQUALS: - case FLOOR: - case GREATER_THAN: - case GREATER_THAN_OR_EQUAL: - case IN: - case IS_NULL: - case IS_NOT_NULL: - case LESS_THAN: - case LESS_THAN_OR_EQUAL: - case MINUS: - case MOD: - case NOT: - case NOT_IN: - case NOT_EQUALS: - case NVL: - case OR: - case PLUS: - case ROW: - case TIMES: - return true; - default: - return BUILT_IN_OPERATORS_LIST.contains(operator); - } - } - - public CalendarPolicy getCalendarPolicy() { - return CalendarPolicy.NULL; - } - - /** - * Returns whether this dialect supports a given type. - */ - public boolean supportsDataType(RelDataType type) { - return true; - } - - /** - * Returns SqlNode for type in "cast(column as type)", which might be - * different between databases by type name, precision etc. - * - *

If this method returns null, the cast will be omitted. In the default - * implementation, this is the case for the NULL type, and therefore - * {@code CAST(NULL AS )} is rendered as {@code NULL}. - */ - public @Nullable SqlNode getCastSpec(RelDataType type) { - int maxPrecision = -1; - int maxScale = -1; - if (type instanceof AbstractSqlType) { - switch (type.getSqlTypeName()) { - case NULL: - return null; - case DECIMAL: - maxScale = getTypeSystem().getMaxScale(type.getSqlTypeName()); - // fall through - case CHAR: - case VARCHAR: - // if needed, adjust varchar length to max length supported by the system - maxPrecision = getTypeSystem().getMaxPrecision(type.getSqlTypeName()); - break; - default: - break; - } - String charSet = type.getCharset() != null && supportsCharSet() - ? type.getCharset().name() - : null; - return SqlTypeUtil.convertTypeToSpec(type, charSet, maxPrecision, maxScale); - } - return SqlTypeUtil.convertTypeToSpec(type); - } - - /** - * Rewrite SINGLE_VALUE into expression based on database variants - * E.g. HSQLDB, MYSQL, ORACLE, etc. - */ - public SqlNode rewriteSingleValueExpr(SqlNode aggCall, RelDataType relDataType) { - LOGGER.debug("SINGLE_VALUE rewrite not supported for {}", databaseProduct); - return aggCall; - } - - /** - * Returns the SqlNode for emulating the null direction for the given field - * or null if no emulation needs to be done. - * - * @param node The SqlNode representing the expression - * @param nullsFirst Whether nulls should come first - * @param desc Whether the sort direction is - * {@link RelFieldCollation.Direction#DESCENDING} or - * {@link RelFieldCollation.Direction#STRICTLY_DESCENDING} - * @return A SqlNode for null direction emulation or null if not required - */ - public @Nullable SqlNode emulateNullDirection(SqlNode node, boolean nullsFirst, - boolean desc) { - return null; - } - - public JoinType emulateJoinTypeForCrossJoin() { - return JoinType.COMMA; - } - - protected @Nullable SqlNode emulateNullDirectionWithIsNull(SqlNode node, - boolean nullsFirst, boolean desc) { - // No need for emulation if the nulls will anyways come out the way we want - // them based on "nullsFirst" and "desc". - if (nullCollation.isDefaultOrder(nullsFirst, desc)) { - return null; - } - - node = SqlStdOperatorTable.IS_NULL.createCall(SqlParserPos.ZERO, node); - if (nullsFirst) { - node = SqlStdOperatorTable.DESC.createCall(SqlParserPos.ZERO, node); - } - return node; - } - - /** - * Returns whether the dialect supports OFFSET/FETCH clauses - * introduced by SQL:2008, for instance - * {@code OFFSET 10 ROWS FETCH NEXT 20 ROWS ONLY}. - * If false, we assume that the dialect supports the alternative syntax - * {@code LIMIT 20 OFFSET 10}. - * - * @deprecated This method is no longer used. To change how the dialect - * unparses offset/fetch, override the {@link #unparseOffsetFetch} method. - */ - @Deprecated - public boolean supportsOffsetFetch() { - return true; - } - - /** - * Converts an offset and fetch into SQL. - * - *

At least one of {@code offset} and {@code fetch} must be provided. - * - *

Common options: - *

    - *
  • {@code OFFSET offset ROWS FETCH NEXT fetch ROWS ONLY} - * (ANSI standard SQL, Oracle, PostgreSQL, and the default) - *
  • {@code LIMIT fetch OFFSET offset} (Apache Hive, MySQL, Redshift) - *
- * - * @param writer Writer - * @param offset Number of rows to skip before emitting, or null - * @param fetch Number of rows to fetch, or null - * @see #unparseFetchUsingAnsi(SqlWriter, SqlNode, SqlNode) - * @see #unparseFetchUsingLimit(SqlWriter, SqlNode, SqlNode) - */ - public void unparseOffsetFetch(SqlWriter writer, @Nullable SqlNode offset, - @Nullable SqlNode fetch) { - unparseFetchUsingAnsi(writer, offset, fetch); - } - - /** - * Converts a fetch into a "SELECT TOP(fetch)". - * - *

A dialect that uses "TOP" syntax should override this method to print - * "TOP(fetch)", and override {@link #unparseOffsetFetch} to no-op. - * - *

The default implementation of this method is no-op. - * - * @param writer Writer - * @param offset Number of rows to skip before emitting, or null - * @param fetch Number of rows to fetch, or null - */ - public void unparseTopN(SqlWriter writer, @Nullable SqlNode offset, @Nullable SqlNode fetch) { - } - - /** - * Unparses offset/fetch using ANSI standard "OFFSET offset ROWS FETCH NEXT - * fetch ROWS ONLY" syntax. - */ - protected static void unparseFetchUsingAnsi(SqlWriter writer, @Nullable SqlNode offset, - @Nullable SqlNode fetch) { - Preconditions.checkArgument(fetch != null || offset != null); - if (offset != null) { - writer.newlineAndIndent(); - final SqlWriter.Frame offsetFrame = - writer.startList(SqlWriter.FrameTypeEnum.OFFSET); - writer.keyword("OFFSET"); - offset.unparse(writer, -1, -1); - writer.keyword("ROWS"); - writer.endList(offsetFrame); - } - if (fetch != null) { - writer.newlineAndIndent(); - final SqlWriter.Frame fetchFrame = - writer.startList(SqlWriter.FrameTypeEnum.FETCH); - writer.keyword("FETCH"); - writer.keyword("NEXT"); - fetch.unparse(writer, -1, -1); - writer.keyword("ROWS"); - writer.keyword("ONLY"); - writer.endList(fetchFrame); - } - } - - /** - * Unparses offset/fetch using "LIMIT fetch OFFSET offset" syntax. - */ - protected static void unparseFetchUsingLimit(SqlWriter writer, @Nullable SqlNode offset, - @Nullable SqlNode fetch) { - Preconditions.checkArgument(fetch != null || offset != null); - unparseLimit(writer, fetch); - unparseOffset(writer, offset); - } - - protected static void unparseLimit(SqlWriter writer, @Nullable SqlNode fetch) { - if (fetch != null) { - writer.newlineAndIndent(); - final SqlWriter.Frame fetchFrame = - writer.startList(SqlWriter.FrameTypeEnum.FETCH); - writer.keyword("LIMIT"); - fetch.unparse(writer, -1, -1); - writer.endList(fetchFrame); - } - } - - protected static void unparseOffset(SqlWriter writer, @Nullable SqlNode offset) { - if (offset != null) { - writer.newlineAndIndent(); - final SqlWriter.Frame offsetFrame = - writer.startList(SqlWriter.FrameTypeEnum.OFFSET); - writer.keyword("OFFSET"); - offset.unparse(writer, -1, -1); - writer.endList(offsetFrame); - } - } - - /** - * Returns a description of the format string used by functions in this - * dialect. - * - *

Dialects may need to override this element mapping if they differ from - * - * Oracle's format elements. By default, this returns {@link FormatModels#DEFAULT}. - */ - public FormatModel getFormatModel() { - return FormatModels.DEFAULT; - } - - /** - * Returns whether the dialect supports nested aggregations, for instance - * {@code SELECT SUM(SUM(1)) }. - */ - public boolean supportsNestedAggregations() { - return true; - } - - /** - * Returns whether this dialect supports "WITH ROLLUP" in the "GROUP BY" - * clause. - * - *

For instance, in MySQL version 5, - * - *

- * - * SELECT deptno, job, COUNT(*) AS c - * FROM emp - * GROUP BY deptno, job WITH ROLLUP - * - *
- * - *

is equivalent to standard SQL - * - *

- * - * SELECT deptno, job, COUNT(*) AS c - * FROM emp - * GROUP BY ROLLUP(deptno, job) - * ORDER BY deptno, job - * - *
- * - *

The "WITH ROLLUP" clause was introduced in MySQL and is not standard - * SQL. - * - *

See also {@link #supportsAggregateFunction(SqlKind)} applied to - * {@link SqlKind#ROLLUP}, which returns true in MySQL 8 and higher. - */ - public boolean supportsGroupByWithRollup() { - return false; - } - - /** - * Returns whether this dialect supports "WITH CUBE" in "GROUP BY" clause. - */ - public boolean supportsGroupByWithCube() { - return false; - } - - /** - * Returns whether this dialect support the specified type of join. - */ - public boolean supportsJoinType(JoinRelType joinType) { - return true; - } - - /** - * Returns how NULL values are sorted if an ORDER BY item does not contain - * NULLS ASCENDING or NULLS DESCENDING. - */ - public NullCollation getNullCollation() { - return nullCollation; - } - - /** - * Returns whether NULL values are sorted first or last, in this dialect, - * in an ORDER BY item of a given direction. - */ - public RelFieldCollation.NullDirection defaultNullDirection( - RelFieldCollation.Direction direction) { - switch (direction) { - case ASCENDING: - case STRICTLY_ASCENDING: - return getNullCollation().last(false) - ? RelFieldCollation.NullDirection.LAST - : RelFieldCollation.NullDirection.FIRST; - case DESCENDING: - case STRICTLY_DESCENDING: - return getNullCollation().last(true) - ? RelFieldCollation.NullDirection.LAST - : RelFieldCollation.NullDirection.FIRST; - default: - return RelFieldCollation.NullDirection.UNSPECIFIED; - } - } - - /** - * Returns whether the dialect supports VALUES in a sub-query with - * and an "AS t(column, ...)" values to define column names. - * - *

Currently, only Oracle does not. For this, we generate "SELECT v0 AS c0, - * v1 AS c1 ... UNION ALL ...". We may need to refactor this method when we - * support VALUES for other dialects. - */ - @Experimental - public boolean supportsAliasedValues() { - return true; - } - - /** - * Returns whether the dialect supports implicit type coercion. - * - *

Most of the sql dialects support implicit type coercion, so we make this method - * default return true. For instance, "cast('10' as integer) > 5" - * can be simplified to "'10' > 5" if the dialect supports implicit type coercion - * for VARCHAR and INTEGER comparison. - * - *

For sql dialect that does not support implicit type coercion, such as the BigQuery, - * we can not convert '10' into INT64 implicitly. - * - *

Now this method is used for some auxiliary decision when translating some {@link RexCall}s, - * see SqlImplementor#stripCastFromString for details. - * - * @param call the call to make decision - */ - public boolean supportsImplicitTypeCoercion(RexCall call) { - final RexNode operand0 = call.getOperands().get(0); - return SqlTypeUtil.isCharacter(operand0.getType()); - } - - /** - * Returns the name of the system table that has precisely one row. - * If there is no such table, returns null, and we will generate SELECT with - * no FROM clause. - * - *

For {@code VALUES 1}, - * Oracle returns ["DUAL"] and we generate "SELECT 1 FROM DUAL"; - * MySQL returns null and we generate "SELECT 1". - */ - @Experimental - public @Nullable List getSingleRowTableName() { - return null; - } - - /** - * Copies settings from this dialect into a parser configuration. - * - *

{@code SqlDialect}, {@link SqlParser.Config} and {@link SqlConformance} - * cover different aspects of the same thing - the dialect of SQL spoken by a - * database - and this method helps to bridge between them. (The aspects are, - * respectively, generating SQL to send to a source database, parsing SQL - * sent to Calcite, and validating queries sent to Calcite. It makes sense to - * keep them as separate interfaces because they are used by different - * modules.) - * - *

The settings copied may differ among dialects, and may change over time, - * but currently include the following: - * - *

    - *
  • {@link #getQuoting()} - *
  • {@link #getQuotedCasing()} - *
  • {@link #getUnquotedCasing()} - *
  • {@link #isCaseSensitive()} - *
  • {@link #getConformance()} - *
- * - * @param config Parser configuration builder - * @return The configuration builder - */ - public SqlParser.Config configureParser(SqlParser.Config config) { - final Quoting quoting = getQuoting(); - if (quoting != null) { - config = config.withQuoting(quoting); - } - return config.withQuotedCasing(getQuotedCasing()) - .withUnquotedCasing(getUnquotedCasing()) - .withCaseSensitive(isCaseSensitive()) - .withConformance(getConformance()) - .withCharLiteralStyles(ImmutableSet.of(CharLiteralStyle.STANDARD)); - } - - @Deprecated // to be removed before 2.0 - public SqlParser.ConfigBuilder configureParser( - SqlParser.ConfigBuilder configBuilder) { - return SqlParser.configBuilder( - configureParser(configBuilder.build())); - } - - /** - * Returns the {@link SqlConformance} that matches this dialect. - * - *

The base implementation returns its best guess, based upon - * {@link #databaseProduct}; sub-classes may override. - */ - public SqlConformance getConformance() { - switch (databaseProduct) { - case UNKNOWN: - case CALCITE: - return SqlConformanceEnum.DEFAULT; - case BIG_QUERY: - return SqlConformanceEnum.BIG_QUERY; - case MYSQL: - return SqlConformanceEnum.MYSQL_5; - case ORACLE: - return SqlConformanceEnum.ORACLE_10; - case MSSQL: - return SqlConformanceEnum.SQL_SERVER_2008; - default: - return SqlConformanceEnum.PRAGMATIC_2003; - } - } - - /** - * Returns the quoting scheme, or null if the combination of - * {@link #identifierQuoteString} and {@link #identifierEndQuoteString} - * does not correspond to any known quoting scheme. - */ - protected @Nullable Quoting getQuoting() { - if ("\"".equals(identifierQuoteString) - && "\"".equals(identifierEndQuoteString)) { - return Quoting.DOUBLE_QUOTE; - } else if ("`".equals(identifierQuoteString) - && "`".equals(identifierEndQuoteString)) { - return Quoting.BACK_TICK; - } else if ("[".equals(identifierQuoteString) - && "]".equals(identifierEndQuoteString)) { - return Quoting.BRACKET; - } else { - return null; - } - } - - /** - * Returns how unquoted identifiers are stored. - */ - public Casing getUnquotedCasing() { - return unquotedCasing; - } - - /** - * Returns how quoted identifiers are stored. - */ - public Casing getQuotedCasing() { - return quotedCasing; - } - - /** - * Returns whether matching of identifiers is case-sensitive. - */ - public boolean isCaseSensitive() { - return caseSensitive; - } - - /** - * A few utility functions copied from org.apache.calcite.util.Util. We have - * copied them because we wish to keep SqlDialect's dependencies to a - * minimum. - */ - @Deprecated // to be removed before 2.0 - public static class FakeUtil { - public static Error newInternal(Throwable e, String s) { - String message = "Internal error: \u0000" + s; - AssertionError ae = new AssertionError(message); - ae.initCause(e); - return ae; - } - - /** - * Replaces every occurrence of find in s with - * replace. - */ - public static String replace( - String s, - String find, - String replace) { - // let's be optimistic - int found = s.indexOf(find); - if (found == -1) { - return s; - } - StringBuilder sb = new StringBuilder(s.length()); - int start = 0; - for (; ; ) { - for (; start < found; start++) { - sb.append(s.charAt(start)); - } - if (found == s.length()) { - break; - } - sb.append(replace); - start += find.length(); - found = s.indexOf(find, start); - if (found == -1) { - found = s.length(); - } - } - return sb.toString(); - } - } - - - /** - * Whether this JDBC driver needs you to pass a Calendar object to methods - * such as {@link ResultSet#getTimestamp(int, java.util.Calendar)}. - */ - public enum CalendarPolicy { - NONE, - NULL, - LOCAL, - DIRECT, - SHIFT; - } - - /** - * Rough list of flavors of database. - * - *

These values cannot help you distinguish between features that exist - * in different versions or ports of a database, but they are sufficient - * to drive a {@code switch} statement if behavior is broadly different - * between say, MySQL and Oracle. - * - *

If possible, you should not refer to particular database at all; write - * extend the dialect to describe the particular capability, for example, - * whether the database allows expressions to appear in the GROUP BY clause. - */ - public enum DatabaseProduct { - ACCESS("Access", "\"", NullCollation.HIGH), - BIG_QUERY("Google BigQuery", "`", NullCollation.LOW), - CALCITE("Apache Calcite", "\"", NullCollation.HIGH), - CLICKHOUSE("ClickHouse", "`", NullCollation.LOW), - MSSQL("Microsoft SQL Server", "[", NullCollation.HIGH), - MYSQL("MySQL", "`", NullCollation.LOW), - ORACLE("Oracle", "\"", NullCollation.HIGH), - DERBY("Apache Derby", null, NullCollation.HIGH), - DB2("IBM DB2", null, NullCollation.HIGH), - EXASOL("Exasol", "\"", NullCollation.LOW), - FIREBIRD("Firebird", null, NullCollation.HIGH), - FIREBOLT("Firebolt", "\"", NullCollation.LOW), - H2("H2", "\"", NullCollation.HIGH), - HIVE("Apache Hive", null, NullCollation.LOW), - INFORMIX("Informix", null, NullCollation.HIGH), - INGRES("Ingres", null, NullCollation.HIGH), - JETHRO("JethroData", "\"", NullCollation.LOW), - LUCIDDB("LucidDB", "\"", NullCollation.HIGH), - INTERBASE("Interbase", null, NullCollation.HIGH), - PHOENIX("Phoenix", "\"", NullCollation.HIGH), - POSTGRESQL("PostgreSQL", "\"", NullCollation.HIGH), - PRESTO("Presto", "\"", NullCollation.LOW), - NETEZZA("Netezza", "\"", NullCollation.HIGH), - INFOBRIGHT("Infobright", "`", NullCollation.HIGH), - NEOVIEW("Neoview", null, NullCollation.HIGH), - SYBASE("Sybase", null, NullCollation.HIGH), - TERADATA("Teradata", "\"", NullCollation.HIGH), - HSQLDB("Hsqldb", null, NullCollation.HIGH), - VERTICA("Vertica", "\"", NullCollation.HIGH), - SQLSTREAM("SQLstream", "\"", NullCollation.HIGH), - SPARK("Spark", null, NullCollation.LOW), - - /** - * Paraccel, now called Actian Matrix. Redshift is based on this, so - * presumably the dialect capabilities are similar. - */ - PARACCEL("Paraccel", "\"", NullCollation.HIGH), - REDSHIFT("Redshift", "\"", NullCollation.HIGH), - SNOWFLAKE("Snowflake", "\"", NullCollation.HIGH), - - /** - * Placeholder for the unknown database. - * - *

Its dialect is useful for generating generic SQL. If you need to - * do something database-specific like quoting identifiers, don't rely - * on this dialect to do what you want. - */ - UNKNOWN("Unknown", "`", NullCollation.HIGH); - - @SuppressWarnings("ImmutableEnumChecker") - private final Supplier dialect; - - @SuppressWarnings("argument.type.incompatible") - DatabaseProduct(String databaseProductName, @Nullable String quoteString, - NullCollation nullCollation) { - requireNonNull(databaseProductName, "databaseProductName"); - requireNonNull(nullCollation, "nullCollation"); - // Note: below lambda accesses uninitialized DatabaseProduct.this, so it might be - // worth refactoring - dialect = Suppliers.memoize(() -> { - final SqlDialect dialect = - SqlDialectFactoryImpl.simple(DatabaseProduct.this); - if (dialect != null) { - return dialect; - } - return new SqlDialect(SqlDialect.EMPTY_CONTEXT - .withDatabaseProduct(DatabaseProduct.this) - .withDatabaseProductName(databaseProductName) - .withIdentifierQuoteString(quoteString) - .withNullCollation(nullCollation)); - })::get; - } - - /** - * Returns a dummy dialect for this database. - * - *

Since databases have many versions and flavors, this dummy dialect - * is at best an approximation. If you want exact information, better to - * use a dialect created from an actual connection's metadata - * (see {@link SqlDialectFactory#create(java.sql.DatabaseMetaData)}). - * - * @return Dialect representing lowest-common-denominator behavior for - * all versions of this database - */ - public SqlDialect getDialect() { - return dialect.get(); - } - } - - /** - * Information for creating a dialect. - * - *

It is immutable; to "set" a property, call one of the "with" methods, - * which returns a new context with the desired property value. - */ - public interface Context { - DatabaseProduct databaseProduct(); - - Context withDatabaseProduct(DatabaseProduct databaseProduct); - - @Nullable String databaseProductName(); - - Context withDatabaseProductName(String databaseProductName); - - @Nullable String databaseVersion(); - - Context withDatabaseVersion(String databaseVersion); - - int databaseMajorVersion(); - - Context withDatabaseMajorVersion(int databaseMajorVersion); - - int databaseMinorVersion(); - - Context withDatabaseMinorVersion(int databaseMinorVersion); - - String literalQuoteString(); - - Context withLiteralQuoteString(String literalQuoteString); - - String literalEscapedQuoteString(); - - Context withLiteralEscapedQuoteString( - String literalEscapedQuoteString); - - @Nullable String identifierQuoteString(); - - Context withIdentifierQuoteString(@Nullable String identifierQuoteString); - - @Nullable String identifierEscapedQuoteString(); - - Context withIdentifierEscapedQuoteString( - @Nullable String identifierEscapedQuoteString); - - Casing unquotedCasing(); - - Context withUnquotedCasing(Casing unquotedCasing); - - Casing quotedCasing(); - - Context withQuotedCasing(Casing unquotedCasing); - - boolean caseSensitive(); - - Context withCaseSensitive(boolean caseSensitive); - - SqlConformance conformance(); - - Context withConformance(SqlConformance conformance); - - NullCollation nullCollation(); - - Context withNullCollation(NullCollation nullCollation); - - RelDataTypeSystem dataTypeSystem(); - - Context withDataTypeSystem(RelDataTypeSystem dataTypeSystem); - - JethroDataSqlDialect.JethroInfo jethroInfo(); - - Context withJethroInfo(JethroDataSqlDialect.JethroInfo jethroInfo); - } - - /** - * Implementation of Context. - */ - private static class ContextImpl implements Context { - private final DatabaseProduct databaseProduct; - private final @Nullable String databaseProductName; - private final @Nullable String databaseVersion; - private final int databaseMajorVersion; - private final int databaseMinorVersion; - private final String literalQuoteString; - private final String literalEscapedQuoteString; - private final @Nullable String identifierQuoteString; - private final @Nullable String identifierEscapedQuoteString; - private final Casing unquotedCasing; - private final Casing quotedCasing; - private final boolean caseSensitive; - private final SqlConformance conformance; - private final NullCollation nullCollation; - private final RelDataTypeSystem dataTypeSystem; - private final JethroDataSqlDialect.JethroInfo jethroInfo; - - private ContextImpl(DatabaseProduct databaseProduct, - @Nullable String databaseProductName, @Nullable String databaseVersion, - int databaseMajorVersion, int databaseMinorVersion, - String literalQuoteString, String literalEscapedQuoteString, - @Nullable String identifierQuoteString, - @Nullable String identifierEscapedQuoteString, - Casing quotedCasing, Casing unquotedCasing, boolean caseSensitive, - SqlConformance conformance, NullCollation nullCollation, - RelDataTypeSystem dataTypeSystem, - JethroDataSqlDialect.JethroInfo jethroInfo) { - this.databaseProduct = requireNonNull(databaseProduct, "databaseProduct"); - this.databaseProductName = databaseProductName; - this.databaseVersion = databaseVersion; - this.databaseMajorVersion = databaseMajorVersion; - this.databaseMinorVersion = databaseMinorVersion; - this.literalQuoteString = literalQuoteString; - this.literalEscapedQuoteString = literalEscapedQuoteString; - this.identifierQuoteString = identifierQuoteString; - this.identifierEscapedQuoteString = identifierEscapedQuoteString; - this.quotedCasing = requireNonNull(quotedCasing, "quotedCasing"); - this.unquotedCasing = requireNonNull(unquotedCasing, "unquotedCasing"); - this.caseSensitive = caseSensitive; - this.conformance = requireNonNull(conformance, "conformance"); - this.nullCollation = requireNonNull(nullCollation, "nullCollation"); - this.dataTypeSystem = requireNonNull(dataTypeSystem, "dataTypeSystem"); - this.jethroInfo = requireNonNull(jethroInfo, "jethroInfo"); - } - - @Override - public DatabaseProduct databaseProduct() { - return databaseProduct; - } - - @Override - public Context withDatabaseProduct( - DatabaseProduct databaseProduct) { - return new ContextImpl(databaseProduct, databaseProductName, - databaseVersion, databaseMajorVersion, databaseMinorVersion, - literalQuoteString, literalEscapedQuoteString, - identifierQuoteString, identifierEscapedQuoteString, - quotedCasing, unquotedCasing, caseSensitive, - conformance, nullCollation, dataTypeSystem, jethroInfo); - } - - @Override - public @Nullable String databaseProductName() { - return databaseProductName; - } - - @Override - public Context withDatabaseProductName(String databaseProductName) { - return new ContextImpl(databaseProduct, databaseProductName, - databaseVersion, databaseMajorVersion, databaseMinorVersion, - literalQuoteString, literalEscapedQuoteString, - identifierQuoteString, identifierEscapedQuoteString, - quotedCasing, unquotedCasing, caseSensitive, - conformance, nullCollation, dataTypeSystem, jethroInfo); - } - - @Override - public @Nullable String databaseVersion() { - return databaseVersion; - } - - @Override - public Context withDatabaseVersion(String databaseVersion) { - return new ContextImpl(databaseProduct, databaseProductName, - databaseVersion, databaseMajorVersion, databaseMinorVersion, - literalQuoteString, literalEscapedQuoteString, - identifierQuoteString, identifierEscapedQuoteString, - quotedCasing, unquotedCasing, caseSensitive, - conformance, nullCollation, dataTypeSystem, jethroInfo); - } - - @Override - public int databaseMajorVersion() { - return databaseMajorVersion; - } - - @Override - public Context withDatabaseMajorVersion(int databaseMajorVersion) { - return new ContextImpl(databaseProduct, databaseProductName, - databaseVersion, databaseMajorVersion, databaseMinorVersion, - literalQuoteString, literalEscapedQuoteString, - identifierQuoteString, identifierEscapedQuoteString, - quotedCasing, unquotedCasing, caseSensitive, - conformance, nullCollation, dataTypeSystem, jethroInfo); - } - - @Override - public int databaseMinorVersion() { - return databaseMinorVersion; - } - - @Override - public Context withDatabaseMinorVersion(int databaseMinorVersion) { - return new ContextImpl(databaseProduct, databaseProductName, - databaseVersion, databaseMajorVersion, databaseMinorVersion, - literalQuoteString, literalEscapedQuoteString, - identifierQuoteString, identifierEscapedQuoteString, - quotedCasing, unquotedCasing, caseSensitive, - conformance, nullCollation, dataTypeSystem, jethroInfo); - } - - @Override - public String literalQuoteString() { - return literalQuoteString; - } - - @Override - public Context withLiteralQuoteString(String literalQuoteString) { - return new ContextImpl(databaseProduct, databaseProductName, - databaseVersion, databaseMajorVersion, databaseMinorVersion, - literalQuoteString, literalEscapedQuoteString, - identifierQuoteString, identifierEscapedQuoteString, - quotedCasing, unquotedCasing, caseSensitive, - conformance, nullCollation, dataTypeSystem, jethroInfo); - } - - @Override - public String literalEscapedQuoteString() { - return literalEscapedQuoteString; - } - - @Override - public Context withLiteralEscapedQuoteString( - String literalEscapedQuoteString) { - return new ContextImpl(databaseProduct, databaseProductName, - databaseVersion, databaseMajorVersion, databaseMinorVersion, - literalQuoteString, literalEscapedQuoteString, - identifierQuoteString, identifierEscapedQuoteString, - quotedCasing, unquotedCasing, caseSensitive, - conformance, nullCollation, dataTypeSystem, jethroInfo); - } - - @Override - public @Nullable String identifierQuoteString() { - return identifierQuoteString; - } - - @Override - public Context withIdentifierQuoteString( - @Nullable String identifierQuoteString) { - return new ContextImpl(databaseProduct, databaseProductName, - databaseVersion, databaseMajorVersion, databaseMinorVersion, - literalQuoteString, literalEscapedQuoteString, - identifierQuoteString, identifierEscapedQuoteString, - quotedCasing, unquotedCasing, caseSensitive, - conformance, nullCollation, dataTypeSystem, jethroInfo); - } - - @Override - public @Nullable String identifierEscapedQuoteString() { - return identifierEscapedQuoteString; - } - - @Override - public Context withIdentifierEscapedQuoteString( - @Nullable String identifierEscapedQuoteString) { - return new ContextImpl(databaseProduct, databaseProductName, - databaseVersion, databaseMajorVersion, databaseMinorVersion, - literalQuoteString, literalEscapedQuoteString, - identifierQuoteString, identifierEscapedQuoteString, - quotedCasing, unquotedCasing, caseSensitive, - conformance, nullCollation, dataTypeSystem, jethroInfo); - } - - @Override - public Casing unquotedCasing() { - return unquotedCasing; - } - - @Override - public Context withUnquotedCasing(Casing unquotedCasing) { - return new ContextImpl(databaseProduct, databaseProductName, - databaseVersion, databaseMajorVersion, databaseMinorVersion, - literalQuoteString, literalEscapedQuoteString, - identifierQuoteString, identifierEscapedQuoteString, - quotedCasing, unquotedCasing, caseSensitive, - conformance, nullCollation, dataTypeSystem, jethroInfo); - } - - @Override - public Casing quotedCasing() { - return quotedCasing; - } - - @Override - public Context withQuotedCasing(Casing quotedCasing) { - return new ContextImpl(databaseProduct, databaseProductName, - databaseVersion, databaseMajorVersion, databaseMinorVersion, - literalQuoteString, literalEscapedQuoteString, - identifierQuoteString, identifierEscapedQuoteString, - quotedCasing, unquotedCasing, caseSensitive, - conformance, nullCollation, dataTypeSystem, jethroInfo); - } - - @Override - public boolean caseSensitive() { - return caseSensitive; - } - - @Override - public Context withCaseSensitive(boolean caseSensitive) { - return new ContextImpl(databaseProduct, databaseProductName, - databaseVersion, databaseMajorVersion, databaseMinorVersion, - literalQuoteString, literalEscapedQuoteString, - identifierQuoteString, identifierEscapedQuoteString, - quotedCasing, unquotedCasing, caseSensitive, - conformance, nullCollation, dataTypeSystem, jethroInfo); - } - - @Override - public SqlConformance conformance() { - return conformance; - } - - @Override - public Context withConformance(SqlConformance conformance) { - return new ContextImpl(databaseProduct, databaseProductName, - databaseVersion, databaseMajorVersion, databaseMinorVersion, - literalQuoteString, literalEscapedQuoteString, - identifierQuoteString, identifierEscapedQuoteString, - quotedCasing, unquotedCasing, caseSensitive, - conformance, nullCollation, dataTypeSystem, jethroInfo); - } - - @Override - public NullCollation nullCollation() { - return nullCollation; - } - - @Override - public Context withNullCollation( - NullCollation nullCollation) { - return new ContextImpl(databaseProduct, databaseProductName, - databaseVersion, databaseMajorVersion, databaseMinorVersion, - literalQuoteString, literalEscapedQuoteString, - identifierQuoteString, identifierEscapedQuoteString, - quotedCasing, unquotedCasing, caseSensitive, - conformance, nullCollation, dataTypeSystem, jethroInfo); - } - - @Override - public RelDataTypeSystem dataTypeSystem() { - return dataTypeSystem; - } - - @Override - public Context withDataTypeSystem(RelDataTypeSystem dataTypeSystem) { - return new ContextImpl(databaseProduct, databaseProductName, - databaseVersion, databaseMajorVersion, databaseMinorVersion, - literalQuoteString, literalEscapedQuoteString, - identifierQuoteString, identifierEscapedQuoteString, - quotedCasing, unquotedCasing, caseSensitive, - conformance, nullCollation, dataTypeSystem, jethroInfo); - } - - @Override - public JethroDataSqlDialect.JethroInfo jethroInfo() { - return jethroInfo; - } - - @Override - public Context withJethroInfo(JethroDataSqlDialect.JethroInfo jethroInfo) { - return new ContextImpl(databaseProduct, databaseProductName, - databaseVersion, databaseMajorVersion, databaseMinorVersion, - literalQuoteString, literalEscapedQuoteString, - identifierQuoteString, identifierEscapedQuoteString, - quotedCasing, unquotedCasing, caseSensitive, - conformance, nullCollation, dataTypeSystem, jethroInfo); - } - } -} diff --git a/core/core-backend/src/main/resources/db/migration/V2.1__ddl.sql b/core/core-backend/src/main/resources/db/migration/V2.1__ddl.sql index 6d795fc843..b0130ce270 100644 --- a/core/core-backend/src/main/resources/db/migration/V2.1__ddl.sql +++ b/core/core-backend/src/main/resources/db/migration/V2.1__ddl.sql @@ -26,17 +26,32 @@ VALUES (20, 15, 2, 'template-setting', 'system/template-setting', 4, 'icon_templ COMMIT; DROP TABLE IF EXISTS `visualization_template_extend_data`; -CREATE TABLE `visualization_template_extend_data` ( - `id` bigint NOT NULL, - `dv_id` bigint DEFAULT NULL, - `view_id` bigint DEFAULT NULL, - `view_details` longtext, - `copy_from` varchar(255) DEFAULT NULL, - `copy_id` varchar(255) DEFAULT NULL, - PRIMARY KEY (`id`) +CREATE TABLE `visualization_template_extend_data` +( + `id` bigint NOT NULL, + `dv_id` bigint DEFAULT NULL, + `view_id` bigint DEFAULT NULL, + `view_details` longtext, + `copy_from` varchar(255) DEFAULT NULL, + `copy_id` varchar(255) DEFAULT NULL, + PRIMARY KEY (`id`) ); ALTER TABLE `core_opt_recent` MODIFY COLUMN `resource_id` bigint NULL COMMENT '资源ID' AFTER `id`, - ADD COLUMN `resource_name` varchar(255) NULL COMMENT '资源名称' AFTER `resource_id`; \ No newline at end of file + ADD COLUMN `resource_name` varchar(255) NULL COMMENT '资源名称' AFTER `resource_id`; + +DROP TABLE IF EXISTS `core_area_custom`; +CREATE TABLE `core_area_custom` +( + `id` varchar(255) NOT NULL, + `name` varchar(255) NOT NULL, + `pid` varchar(255) NOT NULL, + PRIMARY KEY (`id`) +); + +BEGIN; +INSERT INTO `core_sys_setting` VALUES (1, 'basic.dsIntervalTime', '6', 'text', 2); +INSERT INTO `core_sys_setting` VALUES (2, 'basic.dsExecuteTime', 'minute', 'text', 3); +COMMIT; diff --git a/core/core-backend/src/main/resources/i18n/core_zh_CN.properties b/core/core-backend/src/main/resources/i18n/core_zh_CN.properties index e01c771929..af0608789c 100644 --- a/core/core-backend/src/main/resources/i18n/core_zh_CN.properties +++ b/core/core-backend/src/main/resources/i18n/core_zh_CN.properties @@ -23,6 +23,7 @@ i18n_menu.summary=\u6982\u89C8 i18n_menu.ds=\u6570\u636E\u6E90\u7BA1\u7406 i18n_menu.task=\u4EFB\u52A1\u7BA1\u7406 i18n_menu.embedded=\u5D4C\u5165\u5F0F\u7BA1\u7406 +i18n_menu.platform=\u5E73\u53F0\u5BF9\u63A5 i18n_field_name_repeat=\u6709\u91CD\u590D\u5B57\u6BB5\u540D\uFF1A i18n_pid_not_eq_id=\u79FB\u52A8\u76EE\u6807\u4E0D\u80FD\u662F\u81EA\u5DF1\u6216\u5B50\u76EE\u5F55 i18n_ds_name_exists=\u8BE5\u5206\u7EC4\u4E0B\u540D\u79F0\u91CD\u590D diff --git a/core/core-frontend/src/api/map.ts b/core/core-frontend/src/api/map.ts index 66afbf439b..7023c28c49 100644 --- a/core/core-frontend/src/api/map.ts +++ b/core/core-frontend/src/api/map.ts @@ -5,9 +5,21 @@ export const getWorldTree = (): Promise> => { return request.get({ url: '/map/worldTree' }) } -export const getGeoJson = ( - country: string, - areaId: string -): Promise> => { - return request.get({ url: `/map/${country}/${areaId}.json` }) +export const getGeoJson = (areaId: string): Promise> => { + let prefix = '/map' + let areaCode = areaId + if (isCustomGeo(areaId)) { + prefix = '/geo' + areaCode = getBusiGeoCode(areaId) + } + const realCountry = areaCode.substring(0, 3) + const url = `${prefix}/${realCountry}/${areaCode}.json` + return request.get({ url }) +} + +const isCustomGeo = (id: string) => { + return id.startsWith('geo_') +} +const getBusiGeoCode = (id: string) => { + return id.substring(4) } diff --git a/core/core-frontend/src/assets/svg/de-json.svg b/core/core-frontend/src/assets/svg/de-json.svg new file mode 100644 index 0000000000..9e62a6c556 --- /dev/null +++ b/core/core-frontend/src/assets/svg/de-json.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/core/core-frontend/src/assets/svg/platform.svg b/core/core-frontend/src/assets/svg/platform.svg new file mode 100644 index 0000000000..5ebbc21ab9 --- /dev/null +++ b/core/core-frontend/src/assets/svg/platform.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/core/core-frontend/src/components/plugin/src/index.vue b/core/core-frontend/src/components/plugin/src/index.vue index 7f06d584f7..4ffce4b554 100644 --- a/core/core-frontend/src/components/plugin/src/index.vue +++ b/core/core-frontend/src/components/plugin/src/index.vue @@ -58,8 +58,7 @@ const loadComponent = () => { storeCacheProxy(byteArray) importProxy(byteArray) }) - .catch(e => { - console.error(e) + .catch(() => { showNolic() }) .finally(() => { diff --git a/core/core-frontend/src/config/axios/service.ts b/core/core-frontend/src/config/axios/service.ts index ee50af4d99..c066786dd7 100644 --- a/core/core-frontend/src/config/axios/service.ts +++ b/core/core-frontend/src/config/axios/service.ts @@ -130,7 +130,7 @@ service.interceptors.response.use( return response } else if (response.data.code === result_code || response.data.code === 50002) { return response.data - } else if (response.config.url.match(/^\/map\/\d{3}\/\d+\.json$/)) { + } else if (response.config.url.match(/^\/map|geo\/\d{3}\/\d+\.json$/)) { // TODO 处理静态文件 return response } else { diff --git a/core/core-frontend/src/custom-component/rich-text/DeRichEditor.vue b/core/core-frontend/src/custom-component/rich-text/DeRichEditor.vue index 867085704f..1b18cac782 100644 --- a/core/core-frontend/src/custom-component/rich-text/DeRichEditor.vue +++ b/core/core-frontend/src/custom-component/rich-text/DeRichEditor.vue @@ -3,6 +3,7 @@ diff --git a/core/core-frontend/src/views/system/parameter/email/EmailInfo.vue b/core/core-frontend/src/views/system/parameter/email/EmailInfo.vue index f525baee0b..998a07634c 100644 --- a/core/core-frontend/src/views/system/parameter/email/EmailInfo.vue +++ b/core/core-frontend/src/views/system/parameter/email/EmailInfo.vue @@ -1,5 +1,10 @@ diff --git a/core/core-frontend/src/views/system/parameter/index.vue b/core/core-frontend/src/views/system/parameter/index.vue index d550718101..2313a0f321 100644 --- a/core/core-frontend/src/views/system/parameter/index.vue +++ b/core/core-frontend/src/views/system/parameter/index.vue @@ -7,7 +7,7 @@

- +
@@ -17,12 +17,12 @@ import { ref } from 'vue' import { useI18n } from '@/hooks/web/useI18n' import MapSetting from './map/MapSetting.vue' import BasicInfo from './basic/BasicInfo.vue' -import EmailInfo from './email/EmailInfo.vue' +/* import EmailInfo from './email/EmailInfo.vue' */ const { t } = useI18n() const tabArray = [ { label: '基础设置', name: 'basic' }, - { label: '邮件设置', name: 'email' }, + /* { label: '邮件设置', name: 'email' }, */ { label: '地图设置', name: 'map' } /* {label: '引擎设置', name: 'engine'}, */ ] diff --git a/core/core-frontend/src/views/system/parameter/map/Geometry.vue b/core/core-frontend/src/views/system/parameter/map/Geometry.vue index 47c21db890..67244cba94 100644 --- a/core/core-frontend/src/views/system/parameter/map/Geometry.vue +++ b/core/core-frontend/src/views/system/parameter/map/Geometry.vue @@ -3,7 +3,7 @@
{{ t('online_map.geometry') }} - + @@ -43,6 +43,19 @@ :title="data.name" v-html="data.colorName && keyword ? data.colorName : data.name" /> + + + + + + + @@ -83,6 +96,7 @@
+ diff --git a/core/core-frontend/src/views/system/parameter/map/GeometryEdit.vue b/core/core-frontend/src/views/system/parameter/map/GeometryEdit.vue new file mode 100644 index 0000000000..8c8dd53dc7 --- /dev/null +++ b/core/core-frontend/src/views/system/parameter/map/GeometryEdit.vue @@ -0,0 +1,248 @@ + + + + + diff --git a/core/core-frontend/src/views/system/parameter/map/interface.ts b/core/core-frontend/src/views/system/parameter/map/interface.ts new file mode 100644 index 0000000000..1600eae66d --- /dev/null +++ b/core/core-frontend/src/views/system/parameter/map/interface.ts @@ -0,0 +1,5 @@ +export interface GeometryFrom { + pid?: string + code?: string + name?: string +} diff --git a/core/core-frontend/src/views/workbranch/index.vue b/core/core-frontend/src/views/workbranch/index.vue index 8122965f94..4514353224 100644 --- a/core/core-frontend/src/views/workbranch/index.vue +++ b/core/core-frontend/src/views/workbranch/index.vue @@ -517,7 +517,7 @@ initMarketTemplate() } .template-market-dashboard { - width: calc(100% - 384px); + width: calc(100% - 376px); height: 100%; .template-market { diff --git a/de-xpack b/de-xpack index a0ed462213..e31c921205 160000 --- a/de-xpack +++ b/de-xpack @@ -1 +1 @@ -Subproject commit a0ed462213c8f274e09305cdc2ab0b238e9a2547 +Subproject commit e31c9212056eae3656aa534e3cc778d00001fe98 diff --git a/installer/dataease/bin/apisix/init.sh b/installer/dataease/bin/apisix/init.sh index 0fc8c94111..9af6f1f45d 100644 --- a/installer/dataease/bin/apisix/init.sh +++ b/installer/dataease/bin/apisix/init.sh @@ -66,6 +66,7 @@ curl http://127.0.0.1:9180/apisix/admin/services/10 -X PUT -H "X-API-KEY: $DE_AP "X-DE-TOKEN", "X-DE-LINK-TOKEN", "X-EMBEDDED-TOKEN", + "X-DE-ASK-TOKEN", "Content-Type" ], "request_method": "POST", diff --git a/installer/dataease/docker-compose.yml b/installer/dataease/docker-compose.yml index 6ce1b2f606..f4c1f0c3fa 100644 --- a/installer/dataease/docker-compose.yml +++ b/installer/dataease/docker-compose.yml @@ -11,6 +11,7 @@ services: - ${DE_BASE}/dataease2.0/logs:/opt/dataease2.0/logs - ${DE_BASE}/dataease2.0/data/static-resource:/opt/dataease2.0/data/static-resource - ${DE_BASE}/dataease2.0/cache:/opt/dataease2.0/cache + - ${DE_BASE}/dataease2.0/data/geo:/opt/dataease2.0/data/geo depends_on: DE_MYSQL_HOST: condition: service_healthy diff --git a/installer/dataease/templates/application.yml b/installer/dataease/templates/application.yml index df7293079e..a9e6a81f78 100644 --- a/installer/dataease/templates/application.yml +++ b/installer/dataease/templates/application.yml @@ -11,7 +11,6 @@ spring: username: ${DE_MYSQL_USER} password: ${DE_MYSQL_PASSWORD} dataease: - origin-list: localhost:8080,localhost:8100,localhost:9080 apisix-api: domain: http://apisix:9180 key: DE_APISIX_KEY \ No newline at end of file diff --git a/pom.xml b/pom.xml index 25a84d89b3..c856156989 100644 --- a/pom.xml +++ b/pom.xml @@ -25,7 +25,7 @@ 3.5.3.1 1.4.199 4.1.0 - 1.35.0 + 1.36.0 2.6.0 3.5.2 3.12.1 diff --git a/sdk/api/api-base/src/main/java/io/dataease/api/lark/api/LarkApi.java b/sdk/api/api-base/src/main/java/io/dataease/api/lark/api/LarkApi.java new file mode 100644 index 0000000000..c608ee94a6 --- /dev/null +++ b/sdk/api/api-base/src/main/java/io/dataease/api/lark/api/LarkApi.java @@ -0,0 +1,21 @@ +package io.dataease.api.lark.api; + +import io.dataease.api.lark.dto.LarkTokenRequest; +import io.dataease.api.lark.vo.LarkInfoVO; +import io.dataease.api.lark.dto.LarkSettingCreator; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.PostMapping; +import org.springframework.web.bind.annotation.RequestBody; + +public interface LarkApi { + + @GetMapping("/info") + LarkInfoVO info(); + + @PostMapping("/create") + void save(@RequestBody LarkSettingCreator creator); + + @PostMapping("/token") + String larkToken(@RequestBody LarkTokenRequest request); + +} diff --git a/sdk/api/api-base/src/main/java/io/dataease/api/lark/dto/LarkSettingCreator.java b/sdk/api/api-base/src/main/java/io/dataease/api/lark/dto/LarkSettingCreator.java new file mode 100644 index 0000000000..94eb9d369d --- /dev/null +++ b/sdk/api/api-base/src/main/java/io/dataease/api/lark/dto/LarkSettingCreator.java @@ -0,0 +1,17 @@ +package io.dataease.api.lark.dto; + +import lombok.Data; + +import java.io.Serializable; + +@Data +public class LarkSettingCreator implements Serializable { + + private String appId; + + private String appSecret; + + private String callBack; + + private Boolean enable; +} diff --git a/sdk/api/api-base/src/main/java/io/dataease/api/lark/dto/LarkTokenRequest.java b/sdk/api/api-base/src/main/java/io/dataease/api/lark/dto/LarkTokenRequest.java new file mode 100644 index 0000000000..cb7ac21437 --- /dev/null +++ b/sdk/api/api-base/src/main/java/io/dataease/api/lark/dto/LarkTokenRequest.java @@ -0,0 +1,13 @@ +package io.dataease.api.lark.dto; + +import lombok.Data; + +import java.io.Serializable; + +@Data +public class LarkTokenRequest implements Serializable { + + private String code; + + private String state; +} diff --git a/sdk/api/api-base/src/main/java/io/dataease/api/lark/vo/LarkInfoVO.java b/sdk/api/api-base/src/main/java/io/dataease/api/lark/vo/LarkInfoVO.java new file mode 100644 index 0000000000..c1a19faa6d --- /dev/null +++ b/sdk/api/api-base/src/main/java/io/dataease/api/lark/vo/LarkInfoVO.java @@ -0,0 +1,19 @@ +package io.dataease.api.lark.vo; + + + +import lombok.Data; + +import java.io.Serializable; + +@Data +public class LarkInfoVO implements Serializable { + + private String appId; + + private String appSecret; + + private String callBack; + + private Boolean enable = false; +} diff --git a/sdk/api/api-base/src/main/java/io/dataease/api/map/GeoApi.java b/sdk/api/api-base/src/main/java/io/dataease/api/map/GeoApi.java new file mode 100644 index 0000000000..23a391bf0b --- /dev/null +++ b/sdk/api/api-base/src/main/java/io/dataease/api/map/GeoApi.java @@ -0,0 +1,18 @@ +package io.dataease.api.map; + +import io.dataease.api.map.dto.GeometryNodeCreator; +import io.swagger.v3.oas.annotations.tags.Tag; +import org.springframework.web.bind.annotation.PathVariable; +import org.springframework.web.bind.annotation.PostMapping; +import org.springframework.web.bind.annotation.RequestPart; +import org.springframework.web.multipart.MultipartFile; + +@Tag(name = "地理信息") +public interface GeoApi { + + @PostMapping(value = "/save", consumes = {"multipart/form-data"}) + void saveMapGeo(@RequestPart("request") GeometryNodeCreator request, @RequestPart(value = "file") MultipartFile file); + + @PostMapping("/delete/{id}") + void deleteGeo(@PathVariable("id") String id); +} diff --git a/sdk/api/api-base/src/main/java/io/dataease/api/map/dto/GeometryNodeCreator.java b/sdk/api/api-base/src/main/java/io/dataease/api/map/dto/GeometryNodeCreator.java new file mode 100644 index 0000000000..945e65a588 --- /dev/null +++ b/sdk/api/api-base/src/main/java/io/dataease/api/map/dto/GeometryNodeCreator.java @@ -0,0 +1,15 @@ +package io.dataease.api.map.dto; + +import lombok.Data; + +import java.io.Serializable; + +@Data +public class GeometryNodeCreator implements Serializable { + + private String code; + + private String name; + + private String pid; +} diff --git a/sdk/api/api-base/src/main/java/io/dataease/api/map/vo/AreaNode.java b/sdk/api/api-base/src/main/java/io/dataease/api/map/vo/AreaNode.java index 1b834bc896..2750b7e2da 100644 --- a/sdk/api/api-base/src/main/java/io/dataease/api/map/vo/AreaNode.java +++ b/sdk/api/api-base/src/main/java/io/dataease/api/map/vo/AreaNode.java @@ -18,6 +18,7 @@ public class AreaNode implements Serializable { private String level; private String name; private String pid; + private boolean custom = false; /** * 国家代码 */ diff --git a/sdk/api/api-base/src/main/java/io/dataease/api/system/SysParameterApi.java b/sdk/api/api-base/src/main/java/io/dataease/api/system/SysParameterApi.java index 6b42b57a0b..a2234315f5 100644 --- a/sdk/api/api-base/src/main/java/io/dataease/api/system/SysParameterApi.java +++ b/sdk/api/api-base/src/main/java/io/dataease/api/system/SysParameterApi.java @@ -1,11 +1,14 @@ package io.dataease.api.system; import io.dataease.api.system.request.OnlineMapEditor; +import io.dataease.api.system.vo.SettingItemVO; import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.PostMapping; import org.springframework.web.bind.annotation.RequestBody; +import java.util.List; + public interface SysParameterApi { @GetMapping("/singleVal/{key}") @@ -17,4 +20,10 @@ public interface SysParameterApi { @GetMapping("/queryOnlineMap") String queryOnlineMap(); + @GetMapping("basic/query") + List queryBasicSetting(); + + @PostMapping("/basic/save") + void saveBasicSetting(@RequestBody List settingItemVOS); + } diff --git a/sdk/api/api-base/src/main/java/io/dataease/api/system/vo/SettingItemVO.java b/sdk/api/api-base/src/main/java/io/dataease/api/system/vo/SettingItemVO.java new file mode 100644 index 0000000000..c004582577 --- /dev/null +++ b/sdk/api/api-base/src/main/java/io/dataease/api/system/vo/SettingItemVO.java @@ -0,0 +1,19 @@ +package io.dataease.api.system.vo; + +import lombok.Data; +import lombok.NoArgsConstructor; + +import java.io.Serializable; + +@Data +@NoArgsConstructor +public class SettingItemVO implements Serializable { + + private String pkey; + + private String pval; + + private String type; + + private Integer sort; +} diff --git a/sdk/api/api-permissions/src/main/java/io/dataease/api/permissions/apikey/api/ApiKeyApi.java b/sdk/api/api-permissions/src/main/java/io/dataease/api/permissions/apikey/api/ApiKeyApi.java new file mode 100644 index 0000000000..1c31d95544 --- /dev/null +++ b/sdk/api/api-permissions/src/main/java/io/dataease/api/permissions/apikey/api/ApiKeyApi.java @@ -0,0 +1,25 @@ +package io.dataease.api.permissions.apikey.api; + +import io.dataease.api.permissions.apikey.dto.ApikeyEnableEditor; +import io.dataease.api.permissions.apikey.vo.ApiKeyVO; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.PathVariable; +import org.springframework.web.bind.annotation.PostMapping; +import org.springframework.web.bind.annotation.RequestBody; + +import java.util.List; + +public interface ApiKeyApi { + + @PostMapping("/generate") + void generate(); + + @GetMapping("/query") + List query(); + + @PostMapping("/switch") + void switchEnable(@RequestBody ApikeyEnableEditor editor); + + @PostMapping("/delete/{id}") + void delete(@PathVariable("id") Long id); +} diff --git a/sdk/api/api-permissions/src/main/java/io/dataease/api/permissions/apikey/dto/ApikeyEnableEditor.java b/sdk/api/api-permissions/src/main/java/io/dataease/api/permissions/apikey/dto/ApikeyEnableEditor.java new file mode 100644 index 0000000000..b0586751bd --- /dev/null +++ b/sdk/api/api-permissions/src/main/java/io/dataease/api/permissions/apikey/dto/ApikeyEnableEditor.java @@ -0,0 +1,13 @@ +package io.dataease.api.permissions.apikey.dto; + +import lombok.Data; + +import java.io.Serializable; + +@Data +public class ApikeyEnableEditor implements Serializable { + + private Long id; + + private Boolean enable = false; +} diff --git a/sdk/api/api-permissions/src/main/java/io/dataease/api/permissions/apikey/vo/ApiKeyVO.java b/sdk/api/api-permissions/src/main/java/io/dataease/api/permissions/apikey/vo/ApiKeyVO.java new file mode 100644 index 0000000000..04edcc568f --- /dev/null +++ b/sdk/api/api-permissions/src/main/java/io/dataease/api/permissions/apikey/vo/ApiKeyVO.java @@ -0,0 +1,22 @@ +package io.dataease.api.permissions.apikey.vo; + +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.ToStringSerializer; +import lombok.Data; + +import java.io.Serializable; + +@Data +public class ApiKeyVO implements Serializable { + + @JsonSerialize(using= ToStringSerializer.class) + private Long id; + + private String accessKey; + + private String accessSecret; + + private Boolean enable; + + private Long createTime; +} diff --git a/sdk/api/api-permissions/src/main/java/io/dataease/api/permissions/embedded/api/EmbeddedApi.java b/sdk/api/api-permissions/src/main/java/io/dataease/api/permissions/embedded/api/EmbeddedApi.java index 7f2ffb2eb9..0ebe6015ab 100644 --- a/sdk/api/api-permissions/src/main/java/io/dataease/api/permissions/embedded/api/EmbeddedApi.java +++ b/sdk/api/api-permissions/src/main/java/io/dataease/api/permissions/embedded/api/EmbeddedApi.java @@ -27,4 +27,7 @@ public interface EmbeddedApi { @PostMapping("/reset") void reset(@RequestBody EmbeddedResetRequest request); + + @GetMapping("/domainList") + List domainList(); } diff --git a/sdk/api/api-permissions/src/main/java/io/dataease/api/permissions/setting/api/PerSettingApi.java b/sdk/api/api-permissions/src/main/java/io/dataease/api/permissions/setting/api/PerSettingApi.java new file mode 100644 index 0000000000..709ea98609 --- /dev/null +++ b/sdk/api/api-permissions/src/main/java/io/dataease/api/permissions/setting/api/PerSettingApi.java @@ -0,0 +1,17 @@ +package io.dataease.api.permissions.setting.api; + +import io.dataease.api.permissions.setting.vo.PerSettingItemVO; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.PostMapping; +import org.springframework.web.bind.annotation.RequestBody; + +import java.util.List; + +public interface PerSettingApi { + + @GetMapping("/basic/query") + List basicSetting(); + + @PostMapping("/baisc/save") + void saveBasic(@RequestBody List settings); +} diff --git a/sdk/api/api-permissions/src/main/java/io/dataease/api/permissions/setting/vo/PerSettingItemVO.java b/sdk/api/api-permissions/src/main/java/io/dataease/api/permissions/setting/vo/PerSettingItemVO.java new file mode 100644 index 0000000000..03bcfe4a8b --- /dev/null +++ b/sdk/api/api-permissions/src/main/java/io/dataease/api/permissions/setting/vo/PerSettingItemVO.java @@ -0,0 +1,19 @@ +package io.dataease.api.permissions.setting.vo; + +import lombok.Data; +import lombok.NoArgsConstructor; + +import java.io.Serializable; + +@Data +@NoArgsConstructor +public class PerSettingItemVO implements Serializable { + + private String pkey; + + private String pval; + + private String type; + + private Integer sort; +} diff --git a/sdk/common/src/main/java/io/dataease/auth/interceptor/CorsConfig.java b/sdk/common/src/main/java/io/dataease/auth/interceptor/CorsConfig.java index 87b5988a67..35e39b1a3a 100644 --- a/sdk/common/src/main/java/io/dataease/auth/interceptor/CorsConfig.java +++ b/sdk/common/src/main/java/io/dataease/auth/interceptor/CorsConfig.java @@ -1,6 +1,7 @@ package io.dataease.auth.interceptor; import io.dataease.constant.AuthConstant; +import jakarta.annotation.Resource; import org.springframework.beans.factory.annotation.Value; import org.springframework.context.annotation.Configuration; import org.springframework.web.bind.annotation.RestController; @@ -13,12 +14,16 @@ import java.util.List; @Configuration public class CorsConfig implements WebMvcConfigurer { - @Value("#{'${dataease.origin-list}'.split(',')}") + @Resource(name = "deCorsInterceptor") + private CorsInterceptor corsInterceptor; + + @Value("#{'${dataease.origin-list:http://127.0.0.1:8100}'.split(',')}") private List originList; @Override public void addInterceptors(InterceptorRegistry registry) { - registry.addInterceptor(new CorsInterceptor(originList)).addPathPatterns("/**"); + corsInterceptor.addOriginList(originList); + registry.addInterceptor(corsInterceptor).addPathPatterns("/**"); } @Override diff --git a/sdk/common/src/main/java/io/dataease/auth/interceptor/CorsInterceptor.java b/sdk/common/src/main/java/io/dataease/auth/interceptor/CorsInterceptor.java index 2df181c17d..f37bb2eecb 100644 --- a/sdk/common/src/main/java/io/dataease/auth/interceptor/CorsInterceptor.java +++ b/sdk/common/src/main/java/io/dataease/auth/interceptor/CorsInterceptor.java @@ -1,27 +1,73 @@ package io.dataease.auth.interceptor; +import cn.hutool.core.util.ReflectUtil; +import io.dataease.utils.CommonBeanFactory; import jakarta.servlet.http.HttpServletRequest; import jakarta.servlet.http.HttpServletResponse; +import org.apache.commons.collections4.CollectionUtils; +import org.apache.commons.lang3.ObjectUtils; import org.apache.commons.lang3.StringUtils; +import org.springframework.stereotype.Component; import org.springframework.web.servlet.HandlerInterceptor; +import java.util.ArrayList; import java.util.List; +@Component("deCorsInterceptor") public class CorsInterceptor implements HandlerInterceptor { - private List originList; + private final List originList; + + private final List busiOriginList = new ArrayList<>(); + + private Class aClass; + + private Object bean; + public CorsInterceptor(List originList) { this.originList = originList; } + public void addOriginList(List list) { + List strings = list.stream().filter(item -> !originList.contains(item)).toList(); + originList.addAll(strings); + } + + + public void addOriginList() { + String className = "io.dataease.api.permissions.embedded.api.EmbeddedApi"; + String methodName = "domainList"; + if (ObjectUtils.isEmpty(aClass)) { + try { + aClass = Class.forName(className); + } catch (ClassNotFoundException e) { + return; + } + } + if (ObjectUtils.isEmpty(bean)) { + bean = CommonBeanFactory.getBean(aClass); + } + if (ObjectUtils.isNotEmpty(bean)) { + Object result = ReflectUtil.invoke(bean, methodName); + if (ObjectUtils.isNotEmpty(result)) { + List list = (List) result; + if (CollectionUtils.isNotEmpty(list)) { + List strings = list.stream().filter(item -> !busiOriginList.contains(item)).toList(); + busiOriginList.addAll(strings); + } + + } + } + } + @Override public boolean preHandle(HttpServletRequest request, HttpServletResponse response, Object handler) throws Exception { - + addOriginList(); String origin = request.getHeader("Origin"); boolean embedded = StringUtils.startsWithAny(request.getRequestURI(), "/assets/", "/js/"); - if ((StringUtils.isNotBlank(origin) && originList.contains(origin)) || embedded) { + if ((StringUtils.isNotBlank(origin) && originList.contains(origin)) || busiOriginList.contains(origin) || embedded) { response.setHeader("Access-Control-Allow-Origin", embedded ? "*" : origin); response.setHeader("Access-Control-Allow-Credentials", "true"); response.setHeader("Access-Control-Allow-Methods", "GET, POST, PUT, PATCH, DELETE, HEAD, OPTIONS"); diff --git a/sdk/common/src/main/java/io/dataease/constant/AuthConstant.java b/sdk/common/src/main/java/io/dataease/constant/AuthConstant.java index 051bc88d8c..7d32607b18 100644 --- a/sdk/common/src/main/java/io/dataease/constant/AuthConstant.java +++ b/sdk/common/src/main/java/io/dataease/constant/AuthConstant.java @@ -18,6 +18,7 @@ public class AuthConstant { public final static String USER_IMPORT_ERROR_KEY = "USER-IMPORT-ERROR-KEY"; public final static String LINK_TOKEN_KEY = "X-DE-LINK-TOKEN"; + public final static String ASK_TOKEN_KEY = "X-DE-ASK-TOKEN"; public final static String DE_EXECUTE_VERSION = "X-DE-EXECUTE-VERSION"; diff --git a/sdk/common/src/main/java/io/dataease/constant/StaticResourceConstants.java b/sdk/common/src/main/java/io/dataease/constant/StaticResourceConstants.java index 4c8756c8e8..a8d42d9d45 100644 --- a/sdk/common/src/main/java/io/dataease/constant/StaticResourceConstants.java +++ b/sdk/common/src/main/java/io/dataease/constant/StaticResourceConstants.java @@ -16,8 +16,10 @@ public class StaticResourceConstants { public static String WORK_DIR = ensureSuffix(USER_HOME, FILE_SEPARATOR) + "static-resource" + FILE_SEPARATOR; public static String MAP_DIR = ensureSuffix(USER_HOME, FILE_SEPARATOR) + "map"; + public static String CUSTOM_MAP_DIR = ensureSuffix(USER_HOME, FILE_SEPARATOR) + "geo"; public static String MAP_URL = "/map"; + public static String GEO_URL = "/geo"; /** * Upload prefix. diff --git a/sdk/common/src/main/java/io/dataease/constant/XpackSettingConstants.java b/sdk/common/src/main/java/io/dataease/constant/XpackSettingConstants.java new file mode 100644 index 0000000000..d8e7585343 --- /dev/null +++ b/sdk/common/src/main/java/io/dataease/constant/XpackSettingConstants.java @@ -0,0 +1,6 @@ +package io.dataease.constant; + +public class XpackSettingConstants { + + public static final String AUTO_CREATE_USER = "basic.autoCreateUser"; +} diff --git a/sdk/common/src/main/java/io/dataease/utils/AesUtils.java b/sdk/common/src/main/java/io/dataease/utils/AesUtils.java new file mode 100644 index 0000000000..1dec907316 --- /dev/null +++ b/sdk/common/src/main/java/io/dataease/utils/AesUtils.java @@ -0,0 +1,36 @@ +package io.dataease.utils; + +import org.apache.commons.codec.binary.Base64; +import org.apache.commons.lang3.StringUtils; + +import javax.crypto.BadPaddingException; +import javax.crypto.Cipher; +import javax.crypto.IllegalBlockSizeException; +import javax.crypto.spec.IvParameterSpec; +import javax.crypto.spec.SecretKeySpec; + +import static java.nio.charset.StandardCharsets.UTF_8; + +public class AesUtils { + + public static String aesDecrypt(String src, String secretKey, String iv) { + if (StringUtils.isBlank(secretKey)) { + throw new RuntimeException("secretKey is empty"); + } + try { + byte[] raw = secretKey.getBytes(UTF_8); + SecretKeySpec secretKeySpec = new SecretKeySpec(raw, "AES"); + Cipher cipher = Cipher.getInstance("AES/CBC/PKCS5Padding"); + IvParameterSpec iv1 = new IvParameterSpec(iv.getBytes()); + cipher.init(Cipher.DECRYPT_MODE, secretKeySpec, iv1); + byte[] encrypted1 = Base64.decodeBase64(src); + byte[] original = cipher.doFinal(encrypted1); + return new String(original, UTF_8); + } catch (BadPaddingException | IllegalBlockSizeException e) { + // 解密的原字符串为非加密字符串,则直接返回原字符串 + return src; + } catch (Exception e) { + throw new RuntimeException("decrypt error,please check parameters", e); + } + } +} diff --git a/sdk/common/src/main/java/io/dataease/utils/HttpClientUtil.java b/sdk/common/src/main/java/io/dataease/utils/HttpClientUtil.java index 9fd9e1e168..6f6f0125e2 100755 --- a/sdk/common/src/main/java/io/dataease/utils/HttpClientUtil.java +++ b/sdk/common/src/main/java/io/dataease/utils/HttpClientUtil.java @@ -148,7 +148,7 @@ public class HttpClientUtil { public static String post(String url, String json, HttpClientConfig config) { CloseableHttpClient httpClient = null; try { - buildHttpClient(url); + httpClient = buildHttpClient(url); HttpPost httpPost = new HttpPost(url); if (config == null) { config = new HttpClientConfig(); diff --git a/sdk/common/src/main/java/io/dataease/utils/SystemSettingUtils.java b/sdk/common/src/main/java/io/dataease/utils/SystemSettingUtils.java new file mode 100644 index 0000000000..27676b29b3 --- /dev/null +++ b/sdk/common/src/main/java/io/dataease/utils/SystemSettingUtils.java @@ -0,0 +1,14 @@ +package io.dataease.utils; + +import cn.hutool.core.collection.ListUtil; +import io.dataease.constant.XpackSettingConstants; + +import java.util.List; + +public class SystemSettingUtils { + + public static boolean xpackSetting(String pkey) { + List xpackSettingList = ListUtil.toList(XpackSettingConstants.AUTO_CREATE_USER); + return xpackSettingList.contains(pkey); + } +} diff --git a/sdk/common/src/main/java/io/dataease/utils/WhitelistUtils.java b/sdk/common/src/main/java/io/dataease/utils/WhitelistUtils.java index 0284021338..955cb8e9a4 100644 --- a/sdk/common/src/main/java/io/dataease/utils/WhitelistUtils.java +++ b/sdk/common/src/main/java/io/dataease/utils/WhitelistUtils.java @@ -19,6 +19,8 @@ public class WhitelistUtils { "/swagger-resources", "/doc.html", "/panel.html", + "/lark/info", + "/lark/token", "/setting/authentication/status", "/"); @@ -34,6 +36,7 @@ public class WhitelistUtils { || StringUtils.startsWithAny(requestURI, "/static-resource/") || StringUtils.startsWithAny(requestURI, "/share/proxyInfo") || StringUtils.startsWithAny(requestURI, "/xpackComponent/content/") + || StringUtils.startsWithAny(requestURI, "/geo/") || StringUtils.startsWithAny(requestURI, "/map/"); } }