故障修复,新增列表导出功能

This commit is contained in:
daiqy88
2025-11-25 14:47:47 +08:00
parent 52f627e7fd
commit 25c44a7cf9
3 changed files with 217 additions and 27 deletions

View File

@@ -18,6 +18,9 @@ public interface FlowMapper {
@Delete("DELETE FROM simulation_flow_template WHERE uuid=#{uuid}")
int deleteFlowTemplate(@Param("uuid") String uuid);
@Update("UPDATE simulation_flow_template SET templateName=#{newName} WHERE templateName=#{oldName}")
int updateFlowTemplateName(@Param("oldName") String oldName,@Param("newName") String newName);
@Select("SELECT * FROM simulation_flow_template WHERE ${condition}")
List<SimulationFlowTemplate> queryFlowTemplateByCondition(@Param("condition") String condition);

View File

@@ -1,5 +1,5 @@
server:
port: 7103
port: 7107
spring:
application:
@@ -7,45 +7,40 @@ spring:
datasource:
username: root
password: mysql
jdbc-url: jdbc:mysql://192.168.2.166:3306/sdm_base_line?useUnicode=true&characterEncoding=utf-8&useSSL=true&serverTimezone=Asia/Shanghai
jdbc-url: jdbc:mysql://192.168.65.161:3306/spdm_baseline?useUnicode=true&characterEncoding=utf-8&useSSL=true&serverTimezone=Asia/Shanghai
driver-class-name: com.mysql.cj.jdbc.Driver
hikari:
# 设置连接池能够容纳的最大连接数。建议值CPU核心数 * 2 + 有效磁盘I/O数。一个常见的经验值是 10-20。
maximum-pool-size: 20
# 连接池在空闲时保持的最小连接数。
minimum-idle: 5
# 一个连接在被标记为空闲之前可以保持空闲状态的最长时间(毫秒)。当连接的空闲时间超过此值后,它可能会被连接池 evict驱逐
idle-timeout: 60000 # 1 min
# 一个连接从被创建开始其生命周期的最大时长毫秒。HikariCP的默认值就是30分钟这是一个非常合理的设置。
max-lifetime: 1800000 # 30 minHikari 默认)
# 应用程序尝试从连接池获取一个连接时等待的最长时间毫秒。建议值30-60秒。
connection-timeout: 30000 # 30s
maximum-pool-size: 450 # 连接池最大连接数(关键!)
minimum-idle: 50 # 最小空闲连接数(与最大一致,避免频繁创建销毁)
idle-timeout: 300000 # 空闲连接超时时间5分钟
max-lifetime: 600000 # 连接最大存活时间10分钟
connection-timeout: 30000 # 获取连接超时时间30秒避免线程阻塞
master:
username: root
password: mysql
jdbc-url: jdbc:mysql://192.168.2.166:3306/sdm_base_line?useUnicode=true&characterEncoding=utf-8&useSSL=true&serverTimezone=Asia/Shanghai
jdbc-url: jdbc:mysql://192.168.65.161:3306/spdm_baseline?useUnicode=true&characterEncoding=utf-8&useSSL=true&serverTimezone=Asia/Shanghai
driver-class-name: com.mysql.cj.jdbc.Driver
slave:
username: root
password: mysql
jdbc-url: jdbc:mysql://192.168.2.166:3306/sdm_base_line?useUnicode=true&characterEncoding=utf-8&useSSL=true&serverTimezone=Asia/Shanghai
jdbc-url: jdbc:mysql://192.168.65.161:3306/spdm_baseline?useUnicode=true&characterEncoding=utf-8&useSSL=true&serverTimezone=Asia/Shanghai
driver-class-name: com.mysql.cj.jdbc.Driver
enable: true
cloud:
nacos:
discovery:
server-addr: 192.168.2.166:8848
# server-addr: 127.0.0.1:8848
server-addr: 192.168.65.161:8848
group: DAI_GROUP
# server-addr: 127.0.0.1:8848
enabled: true
namespace: 3
# username: nacos
# password: ENC(+QKYnI6gAYu1SbLaZQTkZA==)
# username: nacos
# password: ENC(+QKYnI6gAYu1SbLaZQTkZA==)rd: ENC(+QKYnI6gAYu1SbLaZQTkZA==)
data:
redis:
# Redis默认情况下有16个分片(库)这里配置具体使用的分片默认是0
database: 0
# redis服务器地址填写自己的服务器地址
host: 192.168.2.166
host: 192.168.2.161
# redis端口默认6379
port: 6379
#redis连接超时等待,10秒

View File

@@ -228,6 +228,23 @@ public class ExcelUtil {
}
}
/**
* 获取excel表头
* @param exportExcelFormats
* @return
*/
private static List<HeadVO> getExcelHeader(List<ExportExcelFormat> exportExcelFormats)
{
List<HeadVO> excelHeader = new ArrayList<>();
for(ExportExcelFormat exportExcelFormat : exportExcelFormats)
{
HeadVO headVO = HeadVO.builder().build();
headVO.setKey(exportExcelFormat.getTitle());
excelHeader.add(headVO);
}
return excelHeader;
}
/**
* 导出没有合并单元格excel
* @param dataArray
@@ -238,13 +255,7 @@ public class ExcelUtil {
ExcelSheet excelSheet = new ExcelSheet();
excelSheet.setSheetName("export sheet1");
//获取excel表头
List<HeadVO> excelHeader = new ArrayList<>();
for(ExportExcelFormat exportExcelFormat : exportExcelFormats)
{
HeadVO headVO = HeadVO.builder().build();
headVO.setKey(exportExcelFormat.getTitle());
excelHeader.add(headVO);
}
List<HeadVO> excelHeader = getExcelHeader(exportExcelFormats);
excelSheet.setHeads(excelHeader);
//获取excel表行数据
@@ -273,4 +284,185 @@ public class ExcelUtil {
exportExcel(excelSheets,response);
}
static class ParaseData
{
List<String> keyValues = new ArrayList<>();
int lines;
ParaseData parent;
List<ParaseData> children = new ArrayList<>();
public void increaseLine()
{
lines++;
if(parent!=null)
{
parent.increaseLine();
}
}
public void addchildren(ParaseData paraseData)
{
paraseData.parent=this;
children.add(paraseData);
}
}
/**
* 解析excel字段数据
* @param jsonObject
* @param exportExcelFormats
* @param parent
* @return
*/
private static ParaseData paraseJsonObject(JSONObject jsonObject,List<ExportExcelFormat> exportExcelFormats,ParaseData parent)
{
List<ExportExcelFormat> excelFormats = exportExcelFormats.subList(0,exportExcelFormats.size());
Iterator<ExportExcelFormat> iterator = excelFormats.iterator();
boolean bMatch = false;
ParaseData paraseData = null;
while (iterator.hasNext()) {
ExportExcelFormat format = iterator.next();
String key = format.getKey();
if(jsonObject.containsKey(key))
{
if(!bMatch)
{
bMatch = true;
paraseData = new ParaseData();
if(parent != null)
{
parent.addchildren(paraseData);
}
paraseData.increaseLine();
}
String value = jsonObject.getString(key);
JSONObject dictObj = format.getDictData();
if(dictObj != null)
{
value = dictObj.getString(value);
}
paraseData.keyValues.add(value);
iterator.remove();
}
else
{
JSONArray children = jsonObject.getJSONArray("children");
if(children != null)
{
for(int i=0;i<children.size();i++)
{
JSONObject child = children.getJSONObject(i);
ParaseData parentData = paraseData;
if(parentData == null)
parentData = parent;
paraseJsonObject(child,exportExcelFormats,parentData);
}
}
}
}
return paraseData;
}
/**
* 将parase data树整理成excel列
* @param paraseData
* @param paraseDataMap
* @param beginColumn
* @param beginRow
*/
private static void combineParaseData(ParaseData paraseData,Map<Integer,List<ExcelCellValue>> paraseDataMap,int beginColumn,int beginRow)
{
for(int index=0;index<paraseData.keyValues.size();index++)
{
String key = paraseData.keyValues.get(index);
int columnIndex = beginColumn+index;
List<ExcelCellValue> cellValues = paraseDataMap.get(columnIndex);
if(cellValues==null)
{
cellValues = new ArrayList<>();
paraseDataMap.put(columnIndex,cellValues);
}
boolean isMerged = paraseData.lines > 1;
if(isMerged)
{
for (int rowIndex = 0; rowIndex < paraseData.lines; rowIndex++) {
ExcelCellValue cellValue = new ExcelCellValue();
cellValue.setValue(key);
cellValue.setMerge(true);
cellValue.setFirstRow(beginRow+rowIndex);
cellValue.setLastRow(beginRow+paraseData.lines-1);
cellValues.add(cellValue);
}
}
else
{
ExcelCellValue cellValue = new ExcelCellValue();
cellValue.setValue(key);
cellValue.setMerge(false);
cellValues.add(cellValue);
}
}
int rowNum = 0;
for(ParaseData paseData : paraseData.children)
{
combineParaseData(paseData,paraseDataMap,beginColumn+paraseData.keyValues.size(),rowNum);
if(paseData.lines>1)
{
rowNum += paraseData.lines;
}
else
{
rowNum++;
}
}
}
/**
* 导出有合并单元格excel
* @param dataArray
* @param exportExcelFormats
* @param response
*/
public static void exportExcelWithMerge(JSONArray dataArray,List<ExportExcelFormat> exportExcelFormats,HttpServletResponse response)
{
ExcelSheet excelSheet = new ExcelSheet();
excelSheet.setSheetName("export sheet1");
List<HeadVO> excelHeader = getExcelHeader(exportExcelFormats);
excelSheet.setHeads(excelHeader);
Map<Integer,List<ExcelCellValue>> paraseDataMap = new HashMap<>();
int rowNum = 0;
for(int index=0;index<dataArray.size();index++)
{
JSONObject jsonObject = dataArray.getJSONObject(index);
ParaseData data = paraseJsonObject(jsonObject,exportExcelFormats,null);
combineParaseData(data,paraseDataMap,0,rowNum);
if(data.lines>1)
{
rowNum += data.lines;
}
else
{
rowNum++;
}
}
int columnSize = paraseDataMap.size();
int rowIndex = 0;
LOOP: while(true)
{
RowValue rowValue = new RowValue();
for (int i = 0; i < columnSize; i++) {
List<ExcelCellValue> cellValues = paraseDataMap.get(i);
if(rowIndex > cellValues.size())
break LOOP;
rowValue.getCells().add(cellValues.get(rowIndex));
}
excelSheet.getRowValues().add(rowValue);
rowIndex++;
}
List<ExcelSheet> excelSheets = new ArrayList<>();
excelSheets.add(excelSheet);
exportExcel(excelSheets,response);
}
}