1.修改源项重建功能观测值与模拟值对比接口

2.修改数据库监控服务代码
3.修改各服务logback配置文件
4.修改天气预测功能上传文件接口
This commit is contained in:
panbaolin 2025-10-21 13:45:14 +08:00
parent 3b797f58c4
commit 9fa131fe9c
32 changed files with 803 additions and 355 deletions

View File

@ -1,21 +1,19 @@
package org.jeecg.common.constant.enums; package org.jeecg.common.constant.enums;
import java.time.Instant;
/** /**
* Prometheus 范围查询条件枚举 * Prometheus 范围查询条件枚举
*/ */
public enum PrometheusQueryTypeEnum { public enum PrometheusDBQueryTypeEnum {
FIVE_MIN("Last 5 minutes",(5*60),"15s"), FIVE_MIN("Last 5 minutes",(5*60),"30s"),
THIRTY_MIN("Last 30 minutes",(30*60),"15s"), THIRTY_MIN("Last 30 minutes",(30*60),"30s"),
ONE_HOUR("Last 1 hour",(60*60),"15s"), ONE_HOUR("Last 1 hour",(60*60),"30s"),
THREE_HOURS("Last 3 hours",(3*60*60),"15s"), THREE_HOURS("Last 3 hours",(3*60*60),"40s"),
SIX_HOURS("Last 6 hours",(6*60*60),"15s"), SIX_HOURS("Last 6 hours",(6*60*60),"2m0s"),
TWELVE_HOURS("Last 12 hours",(12*60*60),"1m0s"), TWELVE_HOURS("Last 12 hours",(12*60*60),"4m0s"),
TWENTY_FOUR_HOURS("Last 24 hours",(24*60*60),"2m0s"), TWENTY_FOUR_HOURS("Last 24 hours",(24*60*60),"4m0s"),
TWO_DAYS("Last 2 days",(2*24*60*60),"5m0s"), TWO_DAYS("Last 2 days",(2*24*60*60),"10m0s"),
SEVEN_DAYS("Last 7 days",(7*24*60*60),"15m0s"); SEVEN_DAYS("Last 7 days",(7*24*60*60),"40m0s");
/** /**
* Prometheus 范围查询条件 * Prometheus 范围查询条件
@ -30,7 +28,7 @@ public enum PrometheusQueryTypeEnum {
*/ */
private String step; private String step;
PrometheusQueryTypeEnum(String conditions, Integer number, String step) { PrometheusDBQueryTypeEnum(String conditions, Integer number, String step) {
this.conditions = conditions; this.conditions = conditions;
this.lastSecond = number; this.lastSecond = number;
this.step = step; this.step = step;
@ -53,8 +51,8 @@ public enum PrometheusQueryTypeEnum {
* @param conditions * @param conditions
* @return * @return
*/ */
public static PrometheusQueryTypeEnum getQueryTypeEnum(String conditions) { public static PrometheusDBQueryTypeEnum getQueryTypeEnum(String conditions) {
for (PrometheusQueryTypeEnum queryTypeEnum : PrometheusQueryTypeEnum.values()) { for (PrometheusDBQueryTypeEnum queryTypeEnum : PrometheusDBQueryTypeEnum.values()) {
if (queryTypeEnum.getConditions().equals(conditions)) { if (queryTypeEnum.getConditions().equals(conditions)) {
return queryTypeEnum; return queryTypeEnum;
} }

View File

@ -0,0 +1,72 @@
package org.jeecg.common.constant.enums;
/**
* Prometheus 范围查询条件枚举
*/
public enum PrometheusHostQueryTypeEnum {
FIVE_MIN("Last 5 minutes",(5*60),"15s","1m0s"),
THIRTY_MIN("Last 30 minutes",(30*60),"15s","1m0s"),
ONE_HOUR("Last 1 hour",(60*60),"15s","1m0s"),
THREE_HOURS("Last 3 hours",(3*60*60),"15s","1m0s"),
SIX_HOURS("Last 6 hours",(6*60*60),"15s","1m0s"),
TWELVE_HOURS("Last 12 hours",(12*60*60),"1m0s","1m15s"),
TWENTY_FOUR_HOURS("Last 24 hours",(24*60*60),"2m0s","2m15s"),
TWO_DAYS("Last 2 days",(2*24*60*60),"5m0s","2m15s"),
SEVEN_DAYS("Last 7 days",(7*24*60*60),"15m0s","15m15s");
/**
* Prometheus 范围查询条件
*/
private String conditions;
/**
* Prometheus 范围起始时间需要减的值
*/
private long lastSecond;
/**
* 间隔步长
*/
private String step;
/**
* 范围向量的时间窗口
*/
private String exprTime;
PrometheusHostQueryTypeEnum(String conditions, Integer number, String step,String exprTime) {
this.conditions = conditions;
this.lastSecond = number;
this.step = step;
this.exprTime = exprTime;
}
public String getConditions() {
return conditions;
}
public long getLastSecond() {
return lastSecond;
}
public String getStep() {
return step;
}
public String getExprTime() {
return exprTime;
}
/**
* 返回对应枚
* @param conditions
* @return
*/
public static PrometheusHostQueryTypeEnum getQueryTypeEnum(String conditions) {
for (PrometheusHostQueryTypeEnum queryTypeEnum : PrometheusHostQueryTypeEnum.values()) {
if (queryTypeEnum.getConditions().equals(conditions)) {
return queryTypeEnum;
}
}
return null;
}
}

View File

@ -23,4 +23,34 @@ public class PrometheusServerProperties {
* 监测的网卡名称 * 监测的网卡名称
*/ */
private String networkCardName; private String networkCardName;
/**
* pg数据库主机
*/
private String pgHost;
/**
* postgresql数据库端口
*/
private Integer pgDBPort;
/**
* postgresql管理员用户名称
*/
private String pgAdmin;
/**
* postgresql管理员用户密码
*/
private String pgAdminPwd;
/**
* 对于活跃连接数监控的哪些数据库
*/
private String monitorDBNames;
/**
* 数据库实例地址
*/
private String dbInstance;
} }

View File

@ -31,14 +31,13 @@ public class SourceRebuildMonitoringData implements Serializable {
/** /**
* 任务ID * 任务ID
*/ */
@Excel(name = "任务主键", width = 10,height = 20,orderNum="0")
@TableField(value = "task_id") @TableField(value = "task_id")
private Integer taskId; private Integer taskId;
/** /**
* 台站名称 * 台站名称
*/ */
@Excel(name = "台站名称", width = 20,height = 20,orderNum="1") @Excel(name = "台站名称", width = 20,height = 20,orderNum="0")
@NotBlank(message = "台站名称不能为空", groups = {InsertGroup.class, UpdateGroup.class}) @NotBlank(message = "台站名称不能为空", groups = {InsertGroup.class, UpdateGroup.class})
@TableField(value = "station") @TableField(value = "station")
private String station; private String station;
@ -46,7 +45,7 @@ public class SourceRebuildMonitoringData implements Serializable {
/** /**
* 核素名称 * 核素名称
*/ */
@Excel(name = "核素名称", width = 20,height = 20,orderNum="2") @Excel(name = "核素名称", width = 20,height = 20,orderNum="1")
@NotBlank(message = "核素名称不能为空", groups = {InsertGroup.class, UpdateGroup.class}) @NotBlank(message = "核素名称不能为空", groups = {InsertGroup.class, UpdateGroup.class})
@TableField(value = "nuclide") @TableField(value = "nuclide")
private String nuclide; private String nuclide;
@ -54,7 +53,7 @@ public class SourceRebuildMonitoringData implements Serializable {
/** /**
* 测量停止时间 * 测量停止时间
*/ */
@Excel(name = "测量停止时间", width = 25,height = 20,format = "yyyy-MM-dd HH:mm:ss",orderNum="3") @Excel(name = "测量停止时间", width = 25,height = 20,format = "yyyy-MM-dd HH:mm:ss",orderNum="2")
@NotNull(message = "测量停止时间不能为空", groups = {InsertGroup.class, UpdateGroup.class}) @NotNull(message = "测量停止时间不能为空", groups = {InsertGroup.class, UpdateGroup.class})
@TableField(value = "collect_stop") @TableField(value = "collect_stop")
@JsonFormat(timezone = "GMT+8", pattern = "yyyy-MM-dd HH:mm:ss") @JsonFormat(timezone = "GMT+8", pattern = "yyyy-MM-dd HH:mm:ss")
@ -63,7 +62,7 @@ public class SourceRebuildMonitoringData implements Serializable {
/** /**
* 活度浓度 * 活度浓度
*/ */
@Excel(name = "活度浓度", width = 25,height = 20,orderNum="4") @Excel(name = "活度浓度", width = 25,height = 20,orderNum="3")
@NotBlank(message = "活度浓度不能为空", groups = {InsertGroup.class, UpdateGroup.class}) @NotBlank(message = "活度浓度不能为空", groups = {InsertGroup.class, UpdateGroup.class})
@TableField(value = "activity") @TableField(value = "activity")
private String activity; private String activity;
@ -71,7 +70,7 @@ public class SourceRebuildMonitoringData implements Serializable {
/** /**
* 不确定度 * 不确定度
*/ */
@Excel(name = "不确定度", width = 25,height = 20,orderNum="5") @Excel(name = "不确定度", width = 25,height = 20,orderNum="4")
@NotBlank(message = "不确定度不能为空", groups = {InsertGroup.class, UpdateGroup.class}) @NotBlank(message = "不确定度不能为空", groups = {InsertGroup.class, UpdateGroup.class})
@TableField(value = "uncertainty") @TableField(value = "uncertainty")
private String uncertainty; private String uncertainty;
@ -79,7 +78,7 @@ public class SourceRebuildMonitoringData implements Serializable {
/** /**
* mdc * mdc
*/ */
@Excel(name = "MDC", width = 25,height = 20,orderNum="6") @Excel(name = "MDC", width = 25,height = 20,orderNum="5")
@NotBlank(message = "mdc不能为空", groups = {InsertGroup.class, UpdateGroup.class}) @NotBlank(message = "mdc不能为空", groups = {InsertGroup.class, UpdateGroup.class})
@TableField(value = "mdc") @TableField(value = "mdc")
private String mdc; private String mdc;

View File

@ -83,8 +83,9 @@ public class SourceRebuildTaskController{
@AutoLog(value = "启动任务") @AutoLog(value = "启动任务")
@Operation(summary = "启动任务") @Operation(summary = "启动任务")
@PutMapping("runTask") @PutMapping("runTask")
public Result<?> runTask(@NotNull(message = "任务ID不能为空") Integer taskId){ public Result<?> runTask(@RequestBody @Validated(value = UpdateGroup.class) SourceRebuildTask sourceRebuildTask){
sourceRebuildTaskService.runTask(taskId); sourceRebuildTaskService.update(sourceRebuildTask);
sourceRebuildTaskService.runTask(sourceRebuildTask.getId());
return Result.OK(); return Result.OK();
} }
} }

View File

@ -63,7 +63,8 @@ public class TaskMonitoringDataController {
@AutoLog(value = "导入源项重建监测数据") @AutoLog(value = "导入源项重建监测数据")
@Operation(summary = "导入源项重建监测数据") @Operation(summary = "导入源项重建监测数据")
@PostMapping("importTaskMonitoringData") @PostMapping("importTaskMonitoringData")
public Result<?> importTaskMonitoringData(HttpServletRequest request, HttpServletResponse response){ public Result<?> importTaskMonitoringData(HttpServletRequest request){
Integer taskId = Integer.valueOf(request.getParameter("taskId"));
MultipartHttpServletRequest multipartRequest = (MultipartHttpServletRequest) request; MultipartHttpServletRequest multipartRequest = (MultipartHttpServletRequest) request;
Map<String, MultipartFile> fileMap = multipartRequest.getFileMap(); Map<String, MultipartFile> fileMap = multipartRequest.getFileMap();
for (Map.Entry<String, MultipartFile> entity : fileMap.entrySet()) { for (Map.Entry<String, MultipartFile> entity : fileMap.entrySet()) {
@ -76,8 +77,7 @@ public class TaskMonitoringDataController {
try { try {
ExcelImportCheckUtil.check(file.getInputStream(), SourceRebuildMonitoringData.class, params); ExcelImportCheckUtil.check(file.getInputStream(), SourceRebuildMonitoringData.class, params);
List<SourceRebuildMonitoringData> list = ExcelImportUtil.importExcel(file.getInputStream(), SourceRebuildMonitoringData.class, params); List<SourceRebuildMonitoringData> list = ExcelImportUtil.importExcel(file.getInputStream(), SourceRebuildMonitoringData.class, params);
System.out.println(list); monitoringDataService.cteate(taskId,list);
monitoringDataService.cteate(list);
return Result.ok("文件导入成功!"); return Result.ok("文件导入成功!");
} catch (Exception e) { } catch (Exception e) {
log.error(e.getMessage(), e); log.error(e.getMessage(), e);

View File

@ -12,8 +12,9 @@ public interface SourceRebuildMonitoringDataService extends IService<SourceRebui
/** /**
* 保存源项重建任务监测数据 * 保存源项重建任务监测数据
* @param monitoringDatas * @param monitoringDatas
* @param taskId
*/ */
void cteate(List<SourceRebuildMonitoringData> monitoringDatas); void cteate(Integer taskId,List<SourceRebuildMonitoringData> monitoringDatas);
/** /**
* 删除源项重建任务监测数据 * 删除源项重建任务监测数据

View File

@ -1,5 +1,6 @@
package org.jeecg.service.impl; package org.jeecg.service.impl;
import cn.hutool.core.collection.CollUtil;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper; import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl; import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import lombok.RequiredArgsConstructor; import lombok.RequiredArgsConstructor;
@ -19,12 +20,21 @@ public class SourceRebuildMonitoringDataServiceImpl extends ServiceImpl<SourceRe
/** /**
* 保存源项重建任务监测数据 * 保存源项重建任务监测数据
*
* @param monitoringDatas * @param monitoringDatas
* @param taskId
*/ */
@Transactional(rollbackFor = RuntimeException.class) @Transactional(rollbackFor = RuntimeException.class)
@Override @Override
public void cteate(List<SourceRebuildMonitoringData> monitoringDatas) { public void cteate(Integer taskId,List<SourceRebuildMonitoringData> monitoringDatas) {
//先删除旧的然后保存新的监测数据
LambdaQueryWrapper<SourceRebuildMonitoringData> queryWrapper = new LambdaQueryWrapper<>();
queryWrapper.eq(SourceRebuildMonitoringData::getTaskId, taskId);
this.remove(queryWrapper);
if(CollUtil.isNotEmpty(monitoringDatas)){
for(SourceRebuildMonitoringData monitoringData : monitoringDatas){
monitoringData.setTaskId(taskId);
}
}
this.saveBatch(monitoringDatas); this.saveBatch(monitoringDatas);
} }

View File

@ -179,15 +179,15 @@ public class SourceRebuildTaskServiceImpl extends ServiceImpl<SourceRebuildTaskM
throw new RuntimeException("监测数据为空,请补充监测数据"); throw new RuntimeException("监测数据为空,请补充监测数据");
} }
SourceRebuildTaskExec sourceRebuildTaskExec = new SourceRebuildTaskExec(); // SourceRebuildTaskExec sourceRebuildTaskExec = new SourceRebuildTaskExec();
sourceRebuildTaskExec.init( // sourceRebuildTaskExec.init(
systemStorageProperties, // systemStorageProperties,
taskMonitoringDatas, // taskMonitoringDatas,
sourceRebuildTask, // sourceRebuildTask,
sourceRebuildTaskLogService, // sourceRebuildTaskLogService,
sourceRebuildParams,this); // sourceRebuildParams,this);
sourceRebuildTaskExec.setName(sourceRebuildTask.getId()+"_任务执行线程"); // sourceRebuildTaskExec.setName(sourceRebuildTask.getId()+"_任务执行线程");
sourceRebuildTaskExec.start(); // sourceRebuildTaskExec.start();
} }
/** /**

View File

@ -124,6 +124,16 @@ public class TaskResultDataServiceImpl implements TaskResultDataService {
if (mdcResult.isNumeric()) { if (mdcResult.isNumeric()) {
activityConcComparResult.setMdcValues(mdcResult.asDoubles()); activityConcComparResult.setMdcValues(mdcResult.asDoubles());
} }
String obsErrorRdsPath = task.getResultAddress()+"/obs_error.RDS";
REXP obsErrorResult = conn.eval("readRDS('" + obsErrorRdsPath + "')");
if (obsErrorResult.isNumeric()) {
activityConcComparResult.setObsErrorValues(obsErrorResult.asDoubles());
}
String modErrorRdsPath = task.getResultAddress()+"/mod_error.RDS";
REXP modErrorResult = conn.eval("readRDS('" + modErrorRdsPath + "')");
if (modErrorResult.isNumeric()) {
activityConcComparResult.setModErrorValues(modErrorResult.asDoubles());
}
String stationsTxtPath = task.getResultAddress()+"/statnames.txt"; String stationsTxtPath = task.getResultAddress()+"/statnames.txt";
REXP stationsResult = conn.eval("readLines('" + stationsTxtPath + "')"); REXP stationsResult = conn.eval("readLines('" + stationsTxtPath + "')");
if (stationsResult.isString()) { if (stationsResult.isString()) {

View File

@ -22,6 +22,17 @@ public class ActivityConcComparResult implements Serializable {
* mdc值 * mdc值
*/ */
private double[] mdcValues; private double[] mdcValues;
/**
* 观测值不确定度
*/
private double[] obsErrorValues;
/**
* 模拟值不确定度
*/
private double[] modErrorValues;
/** /**
* 台站列表 * 台站列表
*/ */

View File

@ -1,54 +1,81 @@
//package org.jeecg.modules.monitor.controller; package org.jeecg.modules.monitor.controller;
//
//import lombok.RequiredArgsConstructor; import jakarta.validation.constraints.NotBlank;
//import org.jeecg.common.api.vo.Result; import lombok.RequiredArgsConstructor;
//import org.jeecg.common.aspect.annotation.AutoLog; import org.jeecg.common.api.vo.Result;
//import org.jeecg.modules.monitor.service.DatabaseMonitorService; import org.jeecg.common.aspect.annotation.AutoLog;
//import org.springframework.web.bind.annotation.GetMapping; import org.jeecg.modules.monitor.service.DatabaseMonitorService;
//import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.validation.annotation.Validated;
//import org.springframework.web.bind.annotation.RestController; import org.springframework.web.bind.annotation.GetMapping;
// import org.springframework.web.bind.annotation.RequestMapping;
//@RestController import org.springframework.web.bind.annotation.RestController;
//@RequestMapping("monitor")
//@RequiredArgsConstructor @Validated
//public class DatabaseMonitorController { @RestController
// @RequestMapping("monitor")
// private final DatabaseMonitorService databaseMonitorService; @RequiredArgsConstructor
// public class DatabaseMonitorController {
// @AutoLog(value = "获取CPU信息")
// @GetMapping("getCpuInfo") private final DatabaseMonitorService databaseMonitorService;
// public Result<?> getCpuInfo() {
// return Result.OK(databaseMonitorService.getCpuInfo()); @AutoLog(value = "查询当前时刻的CPU使用信息")
// } @GetMapping("getDBCpuInfo")
// public Result<?> getDBCpuInfo() {
// @AutoLog(value = "获取内存监测数据") return Result.OK(databaseMonitorService.getCpuInfo());
// @GetMapping("getMemoryInfo") }
// public Result<?> getMemoryInfo() {
// return Result.OK(databaseMonitorService.getMemoryInfo()); @AutoLog(value = "查询过去指定时间范围内的CPU使用信息")
// } @GetMapping("getDBCpuInfoList")
// public Result<?> getDBCpuInfoList(@NotBlank(message = "查询条件不能为空") String conditions) {
// @AutoLog(value = "获取表空间信息") return Result.OK(databaseMonitorService.getCpuInfoList(conditions));
// @GetMapping("getTableSpaceInfo") }
// public Result<?> getNetworkInfo() {
// return Result.OK(databaseMonitorService.getTableSpaceInfo()); @AutoLog(value = "查询当前时刻的内存使用信息")
// } @GetMapping("getDBMemoryInfo")
// public Result<?> getDBMemoryInfo() {
// @AutoLog(value = "获取活跃连接数信息") return Result.OK(databaseMonitorService.getDBMemoryInfo());
// @GetMapping("getActiveSessionInfo") }
// public Result<?> getDiskInfo() {
// return Result.OK(databaseMonitorService.getActiveSessionInfo()); @AutoLog(value = "查询过去指定时间范围内的内存使用信息")
// } @GetMapping("getDBMemoryInfoList")
// public Result<?> getDBMemoryInfoList(@NotBlank(message = "查询条件不能为空") String conditions) {
// @AutoLog(value = "获取死锁信息") return Result.OK(databaseMonitorService.getMemoryInfoList(conditions));
// @GetMapping("getDeadlocksInfo") }
// public Result<?> getDeadlocksInfo() {
// return Result.OK(databaseMonitorService.getDeadlocksInfo()); @AutoLog(value = "查询数据库表空间信息")
// } @GetMapping("getDBTableSpaceInfo")
// public Result<?> getDBTableSpaceInfo() {
// @AutoLog(value = "获取数据库占比信息") return Result.OK(databaseMonitorService.getTableSpaceInfo());
// @GetMapping("getDatabaseProportionInfo") }
// public Result<?> getDatabaseProportionInfo() {
// return Result.OK(databaseMonitorService.getDatabaseProportionInfo()); @AutoLog(value = "查询当前时刻的数据库活跃连接数使用信息")
// } @GetMapping("getDBActiveSessionInfo")
// public Result<?> getDBActiveSessionInfo() {
//} return Result.OK(databaseMonitorService.getActiveSessionInfo());
}
@AutoLog(value = "查询过去指定时间范围内的数据库活跃连接数信息")
@GetMapping("getDBActiveSessionInfoList")
public Result<?> getDBActiveSessionInfoList(@NotBlank(message = "查询条件不能为空") String conditions) {
return Result.OK(databaseMonitorService.getActiveSessionInfoList(conditions));
}
@AutoLog(value = "查询当前时刻的数据库死锁信息")
@GetMapping("getDBDeadlocksInfo")
public Result<?> getDeadlocksInfo() {
return Result.OK(databaseMonitorService.getDeadlocksInfo());
}
@AutoLog(value = "查询过去指定时间范围内的数据库死锁信息")
@GetMapping("getDeadlocksInfoList")
public Result<?> getDeadlocksInfoList(@NotBlank(message = "查询条件不能为空") String conditions) {
return Result.OK(databaseMonitorService.getDeadlocksInfoList(conditions));
}
@AutoLog(value = "查询数据库占比信息")
@GetMapping("getDBProportionInfo")
public Result<?> getDatabaseProportionInfo() {
return Result.OK(databaseMonitorService.getDatabaseProportionInfo());
}
}

View File

@ -20,8 +20,8 @@ public class ServiceMonitorController {
@AutoLog(value = "查询当前时刻的CPU使用率") @AutoLog(value = "查询当前时刻的CPU使用率")
@GetMapping("getCpuInfo") @GetMapping("getCpuInfo")
public Result<?> getCpuInfo() { public Result<?> getCpuInfo(@NotBlank(message = "查询条件不能为空") String conditions) {
return Result.OK(hostMonitorService.getCpuInfo()); return Result.OK(hostMonitorService.getCpuInfo(conditions));
} }
@AutoLog(value = "查询过去指定时间范围内的CPU使用率") @AutoLog(value = "查询过去指定时间范围内的CPU使用率")
@ -56,8 +56,8 @@ public class ServiceMonitorController {
@AutoLog(value = "获取网络带宽监测数据") @AutoLog(value = "获取网络带宽监测数据")
@GetMapping("getNetworkInfo") @GetMapping("getNetworkInfo")
public Result<?> getNetworkInfo() { public Result<?> getNetworkInfo(@NotBlank(message = "查询条件不能为空") String conditions) {
return Result.OK(hostMonitorService.getNetworkInfo()); return Result.OK(hostMonitorService.getNetworkInfo(conditions));
} }
@AutoLog(value = "获取网络带宽监测数据") @AutoLog(value = "获取网络带宽监测数据")

View File

@ -12,5 +12,5 @@ public interface DatabaseMonitorMapper extends BaseMapper {
"pg_database_size(datname) AS dataSize " + "pg_database_size(datname) AS dataSize " +
"FROM pg_database " + "FROM pg_database " +
"WHERE datistemplate = false") "WHERE datistemplate = false")
List<DatabaseProportionInfo> getActiveSessionInfo(); List<DatabaseProportionInfo> getDatabaseProportionInfo();
} }

View File

@ -9,31 +9,54 @@ public interface DatabaseMonitorService {
/** /**
* 获取CPU信息 * 获取CPU信息
* @return
*/ */
Map<String,Object> getCpuInfo(); Map<String, Object> getCpuInfo();
/**
* 获取CPU信息列表
*/
List<Map<String, Object>> getCpuInfoList(String conditions);
/** /**
* 获取内存信息 * 获取内存信息
*/ */
Map<String,Object> getMemoryInfo(); Map<String, Object> getDBMemoryInfo();
/**
* 获取内存信息列表
*/
List<Map<String, Object>> getMemoryInfoList(String conditions);
/** /**
* 获取表空间信息 * 获取表空间信息
* @return * @return
*/ */
Map<String,Object> getTableSpaceInfo(); List<Map<String, Object>> getTableSpaceInfo();
/** /**
* 获取活跃连接数信息 * 获取活跃连接数信息
* @return * @return
*/ */
Map<String,Object> getActiveSessionInfo(); Map<String, Object> getActiveSessionInfo();
/**
* 获取活跃连接数信息列表
* @return
*/
List<Map<String,Object>> getActiveSessionInfoList(String conditions);
/** /**
* 获取死锁信息 * 获取死锁信息
* @return * @return
*/ */
Map<String,Object> getDeadlocksInfo(); Map<String, Object> getDeadlocksInfo();
/**
* 获取死锁信息列表
* @return
*/
List<Map<String,Object>> getDeadlocksInfoList(String conditions);
/** /**
* 获取数据库占比信息 * 获取数据库占比信息

View File

@ -9,7 +9,7 @@ public interface HostMonitorService {
/** /**
* 获取CPU信息 * 获取CPU信息
*/ */
Map<String,Object> getCpuInfo(); Map<String,Object> getCpuInfo(String conditions);
/** /**
* 获取CPU信息列表 * 获取CPU信息列表
@ -40,7 +40,7 @@ public interface HostMonitorService {
/** /**
* 获取网络信息 * 获取网络信息
*/ */
Map<String,Object> getNetworkInfo(); Map<String,Object> getNetworkInfo(String conditions);
/** /**
* 获取网络信息列表 * 获取网络信息列表

View File

@ -1,33 +1,200 @@
package org.jeecg.modules.monitor.service.impl; package org.jeecg.modules.monitor.service.impl;
import cn.hutool.core.collection.CollUtil;
import cn.hutool.core.date.DateUtil;
import lombok.RequiredArgsConstructor; import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.jeecg.common.constant.enums.PrometheusDBQueryTypeEnum;
import org.jeecg.common.constant.enums.PrometheusHostQueryTypeEnum;
import org.jeecg.common.properties.PrometheusServerProperties;
import org.jeecg.modules.monitor.mapper.DatabaseMonitorMapper; import org.jeecg.modules.monitor.mapper.DatabaseMonitorMapper;
import org.jeecg.modules.monitor.service.DatabaseMonitorService; import org.jeecg.modules.monitor.service.DatabaseMonitorService;
import org.jeecg.modules.monitor.vo.DatabaseProportionInfo; import org.jeecg.modules.monitor.vo.DatabaseProportionInfo;
import org.jeecg.modules.monitor.vo.PrometheusResponse;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
import java.util.List; import org.springframework.web.reactive.function.client.WebClient;
import java.util.Map; import org.springframework.web.util.UriComponentsBuilder;
import java.math.BigDecimal;
import java.math.RoundingMode;
import java.net.URI;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.Statement;
import java.time.Instant;
import java.util.*;
@Slf4j
@Service @Service
@RequiredArgsConstructor @RequiredArgsConstructor
public class DatabaseMonitorServiceImpl implements DatabaseMonitorService { public class DatabaseMonitorServiceImpl implements DatabaseMonitorService {
private final WebClient webClient;
private final DatabaseMonitorMapper databaseMonitorMapper; private final DatabaseMonitorMapper databaseMonitorMapper;
private final PrometheusServerProperties serverProperties;
/** /**
* 获取CPU信息 * 获取CPU信息
*
* @return
*/ */
@Override @Override
public Map<String, Object> getCpuInfo() { public Map<String, Object> getCpuInfo() {
return Map.of(); Map<String,Object> result = new HashMap<>();
try {
//Prometheus 服务器地址
String url = serverProperties.getServerUrl();
//目标主机实例node-exporter 的地址
String instance = serverProperties.getDbInstance();
//查询CPU使用时间
String cpuQuery = "avg(rate(process_cpu_seconds_total{release=\"\", instance=\""+instance+"\"}[5m]) * 1000)";
PrometheusResponse response = webClient.get()
.uri(buildUri(url,cpuQuery))
.retrieve()
.bodyToMono(PrometheusResponse.class)
.block();
if(Objects.nonNull(response) &&
Objects.nonNull(response.getData()) &&
CollUtil.isNotEmpty(response.getData().getResult())
) {
PrometheusResponse.Result cpuInfo = response.getData().getResult().get(0);
if(CollUtil.isNotEmpty(cpuInfo.getValue())) {
Date date = new Date(cpuInfo.getValue().get(0).longValue()*1000);
Double useRate = BigDecimal.valueOf(cpuInfo.getValue().get(1)).setScale(2, RoundingMode.HALF_UP).doubleValue();
result.put("date", DateUtil.format(date, "MM/dd HH:mm:ss"));
result.put("cpuTime", useRate);
}
}
}catch (Exception e){
log.error("获取数据库CPU使用时间信息错误请检查Prometheus服务是否正常启动或Java请求参数是否正确,详细堆栈错误为:{}",e.getMessage());
}
return result;
}
/**
* 获取CPU信息列表
*/
@Override
public List<Map<String, Object>> getCpuInfoList(String conditions) {
List<Map<String, Object>> result = new ArrayList<>();
try {
//Prometheus 服务器地址
String url = serverProperties.getServerUrl();
//目标主机实例node-exporter 的地址
String instance = serverProperties.getDbInstance();
//查询数据库CPU使用
String cpuInfoQuery = "avg(rate(process_cpu_seconds_total{release=\"\", instance=\""+instance+"\"}[5m]) * 1000)";
PrometheusDBQueryTypeEnum queryTypeEnum = PrometheusDBQueryTypeEnum.getQueryTypeEnum(conditions);
long end = Instant.now().getEpochSecond();
long start = end - queryTypeEnum.getLastSecond();
String step = queryTypeEnum.getStep();
PrometheusResponse response = webClient.get()
.uri(buildUri(url,cpuInfoQuery,start,end,step))
.retrieve()
.bodyToMono(PrometheusResponse.class)
.block();
if(Objects.nonNull(response) &&
Objects.nonNull(response.getData()) &&
CollUtil.isNotEmpty(response.getData().getResult())
) {
PrometheusResponse.Result activeSessionInfoList = response.getData().getResult().get(0);
if(CollUtil.isNotEmpty(activeSessionInfoList.getValues())) {
List<List<Double>> pointDatas = activeSessionInfoList.getValues();
for(List<Double> pointData : pointDatas) {
Map<String,Object> pointDataMap = new HashMap<>();
Date date = new Date(pointData.get(0).longValue()*1000);
Double cpuTime = BigDecimal.valueOf(pointData.get(1)).setScale(2, RoundingMode.HALF_UP).doubleValue();
pointDataMap.put("date", DateUtil.format(date, "MM/dd HH:mm:ss"));
pointDataMap.put("cpuTime", cpuTime);
result.add(pointDataMap);
}
}
}
}catch (Exception e){
log.error("获取数据库CPU使用时间信息错误请检查Prometheus服务是否正常启动或Java请求参数是否正确,详细堆栈错误为:{}",e.getMessage());
}
return result;
} }
/** /**
* 获取内存信息 * 获取内存信息
*/ */
@Override @Override
public Map<String, Object> getMemoryInfo() { public Map<String, Object> getDBMemoryInfo() {
return Map.of(); Map<String,Object> result = new HashMap<>();
try {
//Prometheus 服务器地址
String url = serverProperties.getServerUrl();
//目标主机实例node-exporter 的地址
String instance = serverProperties.getDbInstance();
//查询内存利用率
String cpuQuery = "avg(rate(process_resident_memory_bytes{release=\"\", instance=\""+instance+"\"}[5m]))";
PrometheusResponse response = webClient.get()
.uri(buildUri(url,cpuQuery))
.retrieve()
.bodyToMono(PrometheusResponse.class)
.block();
if(Objects.nonNull(response) &&
Objects.nonNull(response.getData()) &&
CollUtil.isNotEmpty(response.getData().getResult())
) {
PrometheusResponse.Result memInfo = response.getData().getResult().get(0);
if(CollUtil.isNotEmpty(memInfo.getValue())) {
Date date = new Date(memInfo.getValue().get(0).longValue()*1000);
Double useRate = BigDecimal.valueOf(memInfo.getValue().get(1)).setScale(2, RoundingMode.HALF_UP).doubleValue();
result.put("date", DateUtil.format(date, "MM/dd HH:mm:ss"));
result.put("memUsage", useRate);
}
}
}catch (Exception e){
log.error("获取数据库内存使用情况信息错误请检查Prometheus服务是否正常启动或Java请求参数是否正确,详细堆栈错误为:{}",e.getMessage());
}
return result;
}
/**
* 获取内存信息列表
*/
@Override
public List<Map<String, Object>> getMemoryInfoList(String conditions) {
List<Map<String, Object>> result = new ArrayList<>();
try {
//Prometheus 服务器地址
String url = serverProperties.getServerUrl();
//目标主机实例node-exporter 的地址
String instance = serverProperties.getDbInstance();
//查询数据库内存使用
String cpuInfoQuery = "avg(rate(process_resident_memory_bytes{release=\"\", instance=\""+instance+"\"}[5m]))";
PrometheusDBQueryTypeEnum queryTypeEnum = PrometheusDBQueryTypeEnum.getQueryTypeEnum(conditions);
long end = Instant.now().getEpochSecond();
long start = end - queryTypeEnum.getLastSecond();
String step = queryTypeEnum.getStep();
PrometheusResponse response = webClient.get()
.uri(buildUri(url,cpuInfoQuery,start,end,step))
.retrieve()
.bodyToMono(PrometheusResponse.class)
.block();
if(Objects.nonNull(response) &&
Objects.nonNull(response.getData()) &&
CollUtil.isNotEmpty(response.getData().getResult())
) {
PrometheusResponse.Result activeSessionInfoList = response.getData().getResult().get(0);
if(CollUtil.isNotEmpty(activeSessionInfoList.getValues())) {
List<List<Double>> pointDatas = activeSessionInfoList.getValues();
for(List<Double> pointData : pointDatas) {
Map<String,Object> pointDataMap = new HashMap<>();
Date date = new Date(pointData.get(0).longValue()*1000);
Double memUsage = BigDecimal.valueOf(pointData.get(1)).setScale(2, RoundingMode.HALF_UP).doubleValue();
pointDataMap.put("date", DateUtil.format(date, "MM/dd HH:mm:ss"));
pointDataMap.put("memUsage", memUsage);
result.add(pointDataMap);
}
}
}
}catch (Exception e){
log.error("获取数据库内存使用情况信息错误请检查Prometheus服务是否正常启动或Java请求参数是否正确,详细堆栈错误为:{}",e.getMessage());
}
return result;
} }
/** /**
@ -36,8 +203,31 @@ public class DatabaseMonitorServiceImpl implements DatabaseMonitorService {
* @return * @return
*/ */
@Override @Override
public Map<String, Object> getTableSpaceInfo() { public List<Map<String, Object>> getTableSpaceInfo() {
return Map.of(); List<Map<String, Object>> results = new ArrayList<>();
String url = "jdbc:postgresql://"+serverProperties.getPgHost()+":"+serverProperties.getPgDBPort()+"/"+serverProperties.getPgAdmin()+"?stringtype=unspecified&tcpKeepAlive=true&ApplicationName=YourAppName";
String user = serverProperties.getPgAdmin();
String password = serverProperties.getPgAdminPwd();
String sql = "" +
"SELECT " +
" spcname AS tablespace_name," +
" pg_size_pretty(pg_tablespace_size(spcname)) AS size " +
"FROM pg_tablespace " +
"ORDER BY pg_tablespace_size(spcname) DESC";
try (Connection conn = DriverManager.getConnection(url, user, password);
Statement stmt = conn.createStatement();
ResultSet rs = stmt.executeQuery(sql)) {
while (rs.next()) {
Map<String, Object> row = new HashMap<>();
row.put("tablespaceName", rs.getString("tablespace_name"));
row.put("size", rs.getString("size"));
results.add(row);
}
} catch (Exception e) {
e.printStackTrace();
}
return results;
} }
/** /**
@ -47,7 +237,85 @@ public class DatabaseMonitorServiceImpl implements DatabaseMonitorService {
*/ */
@Override @Override
public Map<String, Object> getActiveSessionInfo() { public Map<String, Object> getActiveSessionInfo() {
return Map.of(); Map<String,Object> result = new HashMap<>();
try {
//Prometheus 服务器地址
String url = serverProperties.getServerUrl();
//目标主机实例node-exporter 的地址
String instance = serverProperties.getDbInstance();
//监测的数据库
String monitorDBNames = serverProperties.getMonitorDBNames();
//查询活跃连接数
String activeSessionQuery = "pg_stat_activity_count{datname=~\"("+monitorDBNames+")\", instance=~\""+instance+"\", state=\"active\"} !=0";
PrometheusResponse response = webClient.get()
.uri(buildUri(url,activeSessionQuery))
.retrieve()
.bodyToMono(PrometheusResponse.class)
.block();
if(Objects.nonNull(response) &&
Objects.nonNull(response.getData()) &&
CollUtil.isNotEmpty(response.getData().getResult())
) {
PrometheusResponse.Result activeSessionInfo = response.getData().getResult().get(0);
if(CollUtil.isNotEmpty(activeSessionInfo.getValue())) {
Date date = new Date(activeSessionInfo.getValue().get(0).longValue()*1000);
Double useRate = BigDecimal.valueOf(activeSessionInfo.getValue().get(1)).setScale(2, RoundingMode.HALF_UP).doubleValue();
result.put("date", DateUtil.format(date, "MM/dd HH:mm:ss"));
result.put("activeSessions", useRate);
}
}
}catch (Exception e){
log.error("获取数据库活跃连接数信息错误请检查Prometheus服务是否正常启动或Java请求参数是否正确,详细堆栈错误为:{}",e.getMessage());
}
return result;
}
/**
* 获取活跃连接数信息列表
* @return
*/
@Override
public List<Map<String,Object>> getActiveSessionInfoList(String conditions) {
List<Map<String, Object>> result = new ArrayList<>();
try {
//Prometheus 服务器地址
String url = serverProperties.getServerUrl();
//目标主机实例node-exporter 的地址
String instance = serverProperties.getDbInstance();
//监测的数据库
String monitorDBNames = serverProperties.getMonitorDBNames();
//查询数据库活跃连接数
String activeSessionQuery = "pg_stat_activity_count{datname=~\"("+monitorDBNames+")\", instance=~\""+instance+"\", state=\"active\"} !=0";
PrometheusDBQueryTypeEnum queryTypeEnum = PrometheusDBQueryTypeEnum.getQueryTypeEnum(conditions);
long end = Instant.now().getEpochSecond();
long start = end - queryTypeEnum.getLastSecond();
String step = queryTypeEnum.getStep();
PrometheusResponse response = webClient.get()
.uri(buildUri(url,activeSessionQuery,start,end,step))
.retrieve()
.bodyToMono(PrometheusResponse.class)
.block();
if(Objects.nonNull(response) &&
Objects.nonNull(response.getData()) &&
CollUtil.isNotEmpty(response.getData().getResult())
) {
PrometheusResponse.Result activeSessionInfoList = response.getData().getResult().get(0);
if(CollUtil.isNotEmpty(activeSessionInfoList.getValues())) {
List<List<Double>> pointDatas = activeSessionInfoList.getValues();
for(List<Double> pointData : pointDatas) {
Map<String,Object> pointDataMap = new HashMap<>();
Date date = new Date(pointData.get(0).longValue()*1000);
Double useRate = BigDecimal.valueOf(pointData.get(1)).setScale(2, RoundingMode.HALF_UP).doubleValue();
pointDataMap.put("date", DateUtil.format(date, "MM/dd HH:mm:ss"));
pointDataMap.put("activeSessions", useRate);
result.add(pointDataMap);
}
}
}
}catch (Exception e){
log.error("获取数据库活跃连接数信息错误请检查Prometheus服务是否正常启动或Java请求参数是否正确,详细堆栈错误为:{}",e.getMessage());
}
return result;
} }
/** /**
@ -57,8 +325,83 @@ public class DatabaseMonitorServiceImpl implements DatabaseMonitorService {
*/ */
@Override @Override
public Map<String, Object> getDeadlocksInfo() { public Map<String, Object> getDeadlocksInfo() {
//irate(pg_stat_database_deadlocks{instance="192.168.186.143:9187", datname=~"postgres"}[5m]) Map<String,Object> result = new HashMap<>();
return Map.of(); try {
//Prometheus 服务器地址
String url = serverProperties.getServerUrl();
//目标主机实例node-exporter 的地址
String instance = serverProperties.getDbInstance();
//查询数据库死锁信息
String deadlocksQuery = "irate(pg_stat_database_deadlocks{instance=\""+instance+"\", datname=~\"postgres\"}[5m])";
PrometheusResponse response = webClient.get()
.uri(buildUri(url,deadlocksQuery))
.retrieve()
.bodyToMono(PrometheusResponse.class)
.block();
if(Objects.nonNull(response) &&
Objects.nonNull(response.getData()) &&
CollUtil.isNotEmpty(response.getData().getResult())
) {
PrometheusResponse.Result deadlocksInfo = response.getData().getResult().get(0);
if(CollUtil.isNotEmpty(deadlocksInfo.getValue())) {
Date date = new Date(deadlocksInfo.getValue().get(0).longValue()*1000);
Double useRate = BigDecimal.valueOf(deadlocksInfo.getValue().get(1)).setScale(2, RoundingMode.HALF_UP).doubleValue();
result.put("date", DateUtil.format(date, "MM/dd HH:mm:ss"));
result.put("count", useRate);
}
}
}catch (Exception e){
log.error("获取数据库死锁信息错误请检查Prometheus服务是否正常启动或Java请求参数是否正确,详细堆栈错误为:{}",e.getMessage());
}
return result;
}
/**
* 获取死锁信息
*
* @return
*/
@Override
public List<Map<String,Object>> getDeadlocksInfoList(String conditions) {
//
List<Map<String, Object>> result = new ArrayList<>();
try {
//Prometheus 服务器地址
String url = serverProperties.getServerUrl();
//目标主机实例node-exporter 的地址
String instance = serverProperties.getDbInstance();
//查询数据库死锁信息
String deadlocksQuery = "irate(pg_stat_database_deadlocks{instance=\""+instance+"\", datname=~\"postgres\"}[5m])";
PrometheusDBQueryTypeEnum queryTypeEnum = PrometheusDBQueryTypeEnum.getQueryTypeEnum(conditions);
long end = Instant.now().getEpochSecond();
long start = end - queryTypeEnum.getLastSecond();
String step = queryTypeEnum.getStep();
PrometheusResponse response = webClient.get()
.uri(buildUri(url,deadlocksQuery,start,end,step))
.retrieve()
.bodyToMono(PrometheusResponse.class)
.block();
if(Objects.nonNull(response) &&
Objects.nonNull(response.getData()) &&
CollUtil.isNotEmpty(response.getData().getResult())
) {
PrometheusResponse.Result activeSessionInfoList = response.getData().getResult().get(0);
if(CollUtil.isNotEmpty(activeSessionInfoList.getValues())) {
List<List<Double>> pointDatas = activeSessionInfoList.getValues();
for(List<Double> pointData : pointDatas) {
Map<String,Object> pointDataMap = new HashMap<>();
Date date = new Date(pointData.get(0).longValue()*1000);
Double count = BigDecimal.valueOf(pointData.get(1)).setScale(2, RoundingMode.HALF_UP).doubleValue();
pointDataMap.put("date", DateUtil.format(date, "MM/dd HH:mm:ss"));
pointDataMap.put("count", count);
result.add(pointDataMap);
}
}
}
}catch (Exception e){
log.error("获取数据库死锁信息错误请检查Prometheus服务是否正常启动或Java请求参数是否正确,详细堆栈错误为:{}",e.getMessage());
}
return result;
} }
/** /**
@ -67,6 +410,41 @@ public class DatabaseMonitorServiceImpl implements DatabaseMonitorService {
*/ */
@Override @Override
public List<DatabaseProportionInfo> getDatabaseProportionInfo() { public List<DatabaseProportionInfo> getDatabaseProportionInfo() {
return databaseMonitorMapper.getActiveSessionInfo(); return databaseMonitorMapper.getDatabaseProportionInfo();
}
/**
* 构建URI
* @param url
* @param query
* @return
*/
private URI buildUri(String url, String query){
URI uri = UriComponentsBuilder.fromHttpUrl(url + "/api/v1/query")
.queryParam("query", query)
.build()
.toUri();
return uri;
}
/**
* 构建URI
* @param url
* @param query
* @return
*/
private URI buildUri(String url,String query,Long start,Long end,String step){
String uriAddr = String.format(
"%s/api/v1/query_range?query=%s&start=%d&end=%d&step=%s",
url,
query,
start,
end,
step
);
URI uri = UriComponentsBuilder.fromHttpUrl(uriAddr)
.build()
.toUri();
return uri;
} }
} }

View File

@ -4,7 +4,7 @@ import cn.hutool.core.collection.CollUtil;
import cn.hutool.core.date.DateUtil; import cn.hutool.core.date.DateUtil;
import lombok.RequiredArgsConstructor; import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.jeecg.common.constant.enums.PrometheusQueryTypeEnum; import org.jeecg.common.constant.enums.PrometheusHostQueryTypeEnum;
import org.jeecg.common.properties.PrometheusServerProperties; import org.jeecg.common.properties.PrometheusServerProperties;
import org.jeecg.modules.monitor.service.HostMonitorService; import org.jeecg.modules.monitor.service.HostMonitorService;
import org.jeecg.modules.monitor.vo.PrometheusResponse; import org.jeecg.modules.monitor.vo.PrometheusResponse;
@ -32,7 +32,7 @@ public class HostMonitorServiceImpl implements HostMonitorService {
* 获取CPU信息 * 获取CPU信息
*/ */
@Override @Override
public Map<String,Object> getCpuInfo() { public Map<String,Object> getCpuInfo(String conditions) {
Map<String,Object> result = new HashMap<>(); Map<String,Object> result = new HashMap<>();
try { try {
//Prometheus 服务器地址 //Prometheus 服务器地址
@ -40,7 +40,9 @@ public class HostMonitorServiceImpl implements HostMonitorService {
//目标主机实例node-exporter 的地址 //目标主机实例node-exporter 的地址
String instance = serverProperties.getInstance(); String instance = serverProperties.getInstance();
//查询CPU利用率 //查询CPU利用率
String cpuQuery = "100 * (1 - avg(rate(node_cpu_seconds_total{mode=\"idle\", instance=\""+instance+"\"}[15s])))"; PrometheusHostQueryTypeEnum queryTypeEnum = PrometheusHostQueryTypeEnum.getQueryTypeEnum(conditions);
String exprTime = queryTypeEnum.getExprTime();
String cpuQuery = "100 * (1 - avg(rate(node_cpu_seconds_total{mode=\"idle\", instance=\""+instance+"\"}["+exprTime+"])))";
PrometheusResponse response = webClient.get() PrometheusResponse response = webClient.get()
.uri(buildUri(url,cpuQuery)) .uri(buildUri(url,cpuQuery))
.retrieve() .retrieve()
@ -76,11 +78,12 @@ public class HostMonitorServiceImpl implements HostMonitorService {
//目标主机实例node-exporter 的地址 //目标主机实例node-exporter 的地址
String instance = serverProperties.getInstance(); String instance = serverProperties.getInstance();
//查询CPU利用率 //查询CPU利用率
String cpuQuery = "100 * (1 - avg(rate(node_cpu_seconds_total{mode=\"idle\", instance=\""+instance+"\"}[15s])))"; PrometheusHostQueryTypeEnum queryTypeEnum = PrometheusHostQueryTypeEnum.getQueryTypeEnum(conditions);
PrometheusQueryTypeEnum queryTypeEnum = PrometheusQueryTypeEnum.getQueryTypeEnum(conditions);
long end = Instant.now().getEpochSecond(); long end = Instant.now().getEpochSecond();
long start = end - queryTypeEnum.getLastSecond(); long start = end - queryTypeEnum.getLastSecond();
String step = queryTypeEnum.getStep(); String step = queryTypeEnum.getStep();
String exprTime = queryTypeEnum.getExprTime();
String cpuQuery = "100 * (1 - avg(rate(node_cpu_seconds_total{mode=\"idle\", instance=\""+instance+"\"}["+exprTime+"])))";
PrometheusResponse response = webClient.get() PrometheusResponse response = webClient.get()
.uri(buildUri(url,cpuQuery,start,end,step)) .uri(buildUri(url,cpuQuery,start,end,step))
.retrieve() .retrieve()
@ -228,7 +231,7 @@ public class HostMonitorServiceImpl implements HostMonitorService {
String instance = serverProperties.getInstance(); String instance = serverProperties.getInstance();
//使用率 //使用率
String usageRateQuery = "(1 - (node_memory_MemAvailable_bytes{instance=\""+instance+"\"} / node_memory_MemTotal_bytes{instance=\""+instance+"\"})) * 100"; String usageRateQuery = "(1 - (node_memory_MemAvailable_bytes{instance=\""+instance+"\"} / node_memory_MemTotal_bytes{instance=\""+instance+"\"})) * 100";
PrometheusQueryTypeEnum queryTypeEnum = PrometheusQueryTypeEnum.getQueryTypeEnum(conditions); PrometheusHostQueryTypeEnum queryTypeEnum = PrometheusHostQueryTypeEnum.getQueryTypeEnum(conditions);
long end = Instant.now().getEpochSecond(); long end = Instant.now().getEpochSecond();
long start = end - queryTypeEnum.getLastSecond(); long start = end - queryTypeEnum.getLastSecond();
String step = queryTypeEnum.getStep(); String step = queryTypeEnum.getStep();
@ -296,15 +299,17 @@ public class HostMonitorServiceImpl implements HostMonitorService {
* 获取网络信息 * 获取网络信息
*/ */
@Override @Override
public Map<String,Object> getNetworkInfo() { public Map<String,Object> getNetworkInfo(String conditions) {
Map<String,Object> result = new HashMap<>(); Map<String,Object> result = new HashMap<>();
try { try {
//Prometheus 服务器地址 //Prometheus 服务器地址
String url = serverProperties.getServerUrl(); String url = serverProperties.getServerUrl();
//目标主机实例node-exporter 的地址 //目标主机实例node-exporter 的地址
String instance = serverProperties.getInstance(); String instance = serverProperties.getInstance();
PrometheusHostQueryTypeEnum queryTypeEnum = PrometheusHostQueryTypeEnum.getQueryTypeEnum(conditions);
String exprTime = queryTypeEnum.getExprTime();
//接收带宽 (Kbps) //接收带宽 (Kbps)
String receiveKbpsQuery = "rate(node_network_receive_bytes_total{instance=\"" + instance + "\", device=\""+serverProperties.getNetworkCardName()+"\"}[15s]) * 8 / 1000"; String receiveKbpsQuery = "rate(node_network_receive_bytes_total{instance=\"" + instance + "\", device=\""+serverProperties.getNetworkCardName()+"\"}["+exprTime+"]) * 8 / 1000";
PrometheusResponse receiveKbpsResponse = webClient.get() PrometheusResponse receiveKbpsResponse = webClient.get()
.uri(buildUri(url,receiveKbpsQuery)) .uri(buildUri(url,receiveKbpsQuery))
.retrieve() .retrieve()
@ -324,7 +329,7 @@ public class HostMonitorServiceImpl implements HostMonitorService {
} }
//发送带宽 (Kbps) //发送带宽 (Kbps)
String transmitKbpsQuery = "rate(node_network_transmit_bytes_total{instance=\"" + instance + "\", device=\""+serverProperties.getNetworkCardName()+"\"}[15s]) * 8 / 1000"; String transmitKbpsQuery = "rate(node_network_transmit_bytes_total{instance=\"" + instance + "\", device=\""+serverProperties.getNetworkCardName()+"\"}["+exprTime+"]) * 8 / 1000";
PrometheusResponse transmitKbpsResponse = webClient.get() PrometheusResponse transmitKbpsResponse = webClient.get()
.uri(buildUri(url,transmitKbpsQuery)) .uri(buildUri(url,transmitKbpsQuery))
.retrieve() .retrieve()
@ -362,12 +367,13 @@ public class HostMonitorServiceImpl implements HostMonitorService {
//目标主机实例node-exporter 的地址 //目标主机实例node-exporter 的地址
String instance = serverProperties.getInstance(); String instance = serverProperties.getInstance();
//构建查询参数 //构建查询参数
PrometheusQueryTypeEnum queryTypeEnum = PrometheusQueryTypeEnum.getQueryTypeEnum(conditions); PrometheusHostQueryTypeEnum queryTypeEnum = PrometheusHostQueryTypeEnum.getQueryTypeEnum(conditions);
long end = Instant.now().getEpochSecond(); long end = Instant.now().getEpochSecond();
long start = end - queryTypeEnum.getLastSecond(); long start = end - queryTypeEnum.getLastSecond();
String step = queryTypeEnum.getStep(); String step = queryTypeEnum.getStep();
String exprTime = queryTypeEnum.getExprTime();
//接收带宽 (Kbps) //接收带宽 (Kbps)
String receiveKbpsQuery = "rate(node_network_receive_bytes_total{instance=\"" + instance + "\", device=\""+serverProperties.getNetworkCardName()+"\"}[15s]) * 8 / 1000"; String receiveKbpsQuery = "rate(node_network_receive_bytes_total{instance=\"" + instance + "\", device=\""+serverProperties.getNetworkCardName()+"\"}["+exprTime+"]) * 8 / 1000";
PrometheusResponse receiveKbpsResponse = webClient.get() PrometheusResponse receiveKbpsResponse = webClient.get()
.uri(buildUri(url,receiveKbpsQuery,start,end,step)) .uri(buildUri(url,receiveKbpsQuery,start,end,step))
.retrieve() .retrieve()
@ -394,7 +400,7 @@ public class HostMonitorServiceImpl implements HostMonitorService {
} }
//发送带宽 (Kbps) //发送带宽 (Kbps)
String transmitKbpsQuery = "rate(node_network_transmit_bytes_total{instance=\"" + instance + "\", device=\""+serverProperties.getNetworkCardName()+"\"}[15s]) * 8 / 1000"; String transmitKbpsQuery = "rate(node_network_transmit_bytes_total{instance=\"" + instance + "\", device=\""+serverProperties.getNetworkCardName()+"\"}["+exprTime+"]) * 8 / 1000";
PrometheusResponse transmitKbpsResponse = webClient.get() PrometheusResponse transmitKbpsResponse = webClient.get()
.uri(buildUri(url,transmitKbpsQuery,start,end,step)) .uri(buildUri(url,transmitKbpsQuery,start,end,step))
.retrieve() .retrieve()
@ -493,10 +499,4 @@ public class HostMonitorServiceImpl implements HostMonitorService {
.toUri(); .toUri();
return uri; return uri;
} }
//&start=%d&end=%d&step=%s
public static void main(String[] args) {
Date date = new Date(1758868629*1000L);
System.out.println(DateUtil.format(date, "yyyy-MM-dd HH:mm:ss"));
}
} }

View File

@ -65,8 +65,7 @@ public class SysBaseApiImpl implements ISysBaseAPI {
if (oConvertUtils.isEmpty(username)) { if (oConvertUtils.isEmpty(username)) {
return null; return null;
} }
LoginUser user = authUserService.getEncodeUserInfo(username); return authUserService.getEncodeUserInfo(username);
return user;
} }
@Override @Override

View File

@ -43,7 +43,7 @@ public class WeatherDataController {
@AutoLog(value = "分页查询气象文件数据") @AutoLog(value = "分页查询气象文件数据")
@Operation(summary = "分页查询气象文件数据") @Operation(summary = "分页查询气象文件数据")
@GetMapping("page") @GetMapping("page")
public Result<?> page(PageRequest pageRequest, String fileExt, Integer dataSource, public Result<?> page(PageRequest pageRequest, String fileExt, String dataSource,
@DateTimeFormat(iso = DateTimeFormat.ISO.DATE) LocalDate startDate, @DateTimeFormat(iso = DateTimeFormat.ISO.DATE) LocalDate startDate,
@DateTimeFormat(iso = DateTimeFormat.ISO.DATE) LocalDate endDate) { @DateTimeFormat(iso = DateTimeFormat.ISO.DATE) LocalDate endDate) {
IPage<WeatherData> page = weatherDataService.page(pageRequest,fileExt,dataSource,startDate,endDate); IPage<WeatherData> page = weatherDataService.page(pageRequest,fileExt,dataSource,startDate,endDate);
@ -67,9 +67,10 @@ public class WeatherDataController {
public Result<?> uploadFile(FileVo fileVo){ public Result<?> uploadFile(FileVo fileVo){
if (!fileVo.getFileExt().equals(WeatherFileSuffixEnum.GRIB.getValue()) && !fileVo.getFileExt().equals(WeatherFileSuffixEnum.GRIB2.getValue())){ if (!fileVo.getFileExt().equals(WeatherFileSuffixEnum.GRIB.getValue()) && !fileVo.getFileExt().equals(WeatherFileSuffixEnum.GRIB2.getValue())){
throw new RuntimeException("不支持当前上传的文件类型!"); throw new RuntimeException("不支持当前上传的文件类型!");
}else{
FileUploadResultVo resultVo = weatherDataService.uploadFile(fileVo);
return Result.ok(resultVo);
} }
FileUploadResultVo resultVo = weatherDataService.uploadFile(fileVo);
return Result.ok(resultVo);
} }
/** /**
@ -113,36 +114,34 @@ public class WeatherDataController {
public static void main(String[] args) { public static void main(String[] args) {
//reftime_ISO //reftime_ISO
String filePath = "F:\\工作\\五木\\放射性核素监测数据综合分析及氙本底源解析系统\\其他资料\\气象数据\\中国CRA40再分析数据\\CRA40\\20250524\\CRA40_AVO_2025052418_GLB_0P25_HOUR_V1_0_0.grib2"; String filePath = "F:\\工作\\五木\\放射性核素监测数据综合分析及氙本底源解析系统\\其他资料\\气象数据\\盘古模型预测数据\\panguweather_2025073118.grib";
String filePath1 = "F:\\工作\\五木\\放射性核素监测数据综合分析及氙本底源解析系统\\其他资料\\气象数据\\中国CRA40再分析数据\\GRAPES\\2024110100\\Z_NAFP_C_BABJ_20241101000000_P_NWPC-GRAPES-GFS-HNEHE-00000.grib2"; try {
String filePath2 = "F:\\工作\\五木\\放射性核素监测数据综合分析及氙本底源解析系统\\其他资料\\气象数据\\盘古模型预测数据\\panguweather_2025073106.grib"; String md5 = calculateMD5(filePath);
// try { System.out.println("MD5: " + md5);
// String md5 = calculateMD5(filePath); } catch (IOException e) {
// System.out.println("MD5: " + md5); e.printStackTrace();
// } catch (IOException e) { }
// e.printStackTrace(); // try (NetcdfFile ncFile = NetcdfFile.open(filePath2)) {
// }
try (NetcdfFile ncFile = NetcdfFile.open(filePath2)) {
// Variable variable = ncFile.findVariable("reftime_ISO"); // Variable variable = ncFile.findVariable("reftime_ISO");
// if (variable != null) { // if (variable != null) {
// Array data = variable.read(); // Array data = variable.read();
// System.out.println(variable.getFullName()); // System.out.println(variable.getFullName());
// System.out.println(data.getObject(0)); // System.out.println(data.getObject(0));
// } // }
int index = 0; // int index = 0;
for (Variable variable : ncFile.getVariables()) { // for (Variable variable : ncFile.getVariables()) {
if (variable != null) { // if (variable != null) {
Array data = variable.read(); // Array data = variable.read();
System.out.println(variable.getFullName()); // System.out.println(variable.getFullName());
System.out.println(data); // System.out.println(data);
if (index == 7) { // if (index == 7) {
break; // break;
} // }
index++; // index++;
} // }
} // }
}catch (Exception e){ // }catch (Exception e){
//
} // }
} }
} }

View File

@ -27,7 +27,7 @@ public interface WeatherDataService {
* @param endDate * @param endDate
* @return * @return
*/ */
IPage<WeatherData> page(PageRequest pageRequest, String fileExt, Integer dataSource, LocalDate startDate, LocalDate endDate); IPage<WeatherData> page(PageRequest pageRequest, String fileExt, String dataSource, LocalDate startDate, LocalDate endDate);
/** /**
* 验证文件是否存在 * 验证文件是否存在

View File

@ -1,5 +1,6 @@
package org.jeecg.service.impl; package org.jeecg.service.impl;
import cn.hutool.core.collection.CollUtil;
import cn.hutool.core.io.FileUtil; import cn.hutool.core.io.FileUtil;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper; import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.baomidou.mybatisplus.core.metadata.IPage; import com.baomidou.mybatisplus.core.metadata.IPage;
@ -46,6 +47,7 @@ import java.time.LocalDateTime;
import java.time.ZoneId; import java.time.ZoneId;
import java.time.format.DateTimeFormatter; import java.time.format.DateTimeFormatter;
import java.util.*; import java.util.*;
import java.util.stream.Collectors;
import static org.jeecg.common.constant.LatLonSizeConstants.*; import static org.jeecg.common.constant.LatLonSizeConstants.*;
@ -161,7 +163,7 @@ public class WeatherDataServiceImpl extends ServiceImpl<WeatherDataMapper, Weath
* @return * @return
*/ */
@Override @Override
public IPage<WeatherData> page(PageRequest pageRequest, String fileExt, Integer dataSource, LocalDate startDate, LocalDate endDate) { public IPage<WeatherData> page(PageRequest pageRequest, String fileExt, String dataSource, LocalDate startDate, LocalDate endDate) {
LocalDateTime startTime = null; LocalDateTime startTime = null;
if(Objects.nonNull(startDate)){ if(Objects.nonNull(startDate)){
startTime = LocalDateTime.of(startDate.getYear(), startDate.getMonth(), startDate.getDayOfMonth(), 0, 0, 0); startTime = LocalDateTime.of(startDate.getYear(), startDate.getMonth(), startDate.getDayOfMonth(), 0, 0, 0);
@ -171,7 +173,14 @@ public class WeatherDataServiceImpl extends ServiceImpl<WeatherDataMapper, Weath
endTime = LocalDateTime.of(endDate.getYear(), endDate.getMonth(), endDate.getDayOfMonth(), 23, 59, 59); endTime = LocalDateTime.of(endDate.getYear(), endDate.getMonth(), endDate.getDayOfMonth(), 23, 59, 59);
} }
LambdaQueryWrapper<WeatherData> queryWrapper = new LambdaQueryWrapper<>(); LambdaQueryWrapper<WeatherData> queryWrapper = new LambdaQueryWrapper<>();
queryWrapper.eq(Objects.nonNull(dataSource),WeatherData::getDataSource, dataSource); List<Integer> dataSources = null;
if (StringUtils.isNotBlank(dataSource)) {
dataSources = Arrays.stream(dataSource.split(",")).map(Integer::parseInt).collect(Collectors.toList());
}else {
dataSources = new ArrayList<>();
dataSources.add(Integer.parseInt(fileExt));
}
queryWrapper.in(CollUtil.isNotEmpty(dataSources),WeatherData::getDataSource,dataSources);
queryWrapper.between((Objects.nonNull(startTime) && Objects.nonNull(endTime)),WeatherData::getDataStartTime,startTime,endTime); queryWrapper.between((Objects.nonNull(startTime) && Objects.nonNull(endTime)),WeatherData::getDataStartTime,startTime,endTime);
queryWrapper.eq(StringUtils.isNotBlank(fileExt),WeatherData::getFileExt, fileExt); queryWrapper.eq(StringUtils.isNotBlank(fileExt),WeatherData::getFileExt, fileExt);
queryWrapper.select(WeatherData::getId,WeatherData::getFileName,WeatherData::getFileSize,WeatherData::getDataSource,WeatherData::getFileExt,WeatherData::getDataStartTime,WeatherData::getFilePath); queryWrapper.select(WeatherData::getId,WeatherData::getFileName,WeatherData::getFileSize,WeatherData::getDataSource,WeatherData::getFileExt,WeatherData::getDataStartTime,WeatherData::getFilePath);
@ -249,6 +258,7 @@ public class WeatherDataServiceImpl extends ServiceImpl<WeatherDataMapper, Weath
this.transactionManager.commit(transactionStatus); this.transactionManager.commit(transactionStatus);
}catch (Exception e) { }catch (Exception e) {
flag = false; flag = false;
e.printStackTrace();
} }
if(fileVo.isFileShare()) { if(fileVo.isFileShare()) {
@ -268,12 +278,13 @@ public class WeatherDataServiceImpl extends ServiceImpl<WeatherDataMapper, Weath
File dataFile = new File(storagePath); File dataFile = new File(storagePath);
if(dataFile.exists() && dataFile.length()>0){ if(dataFile.exists() && dataFile.length()>0){
//获取文件数据开始日期 //获取文件数据开始日期
String reftime = NcUtil.getReftime(dataFile.getAbsolutePath()); // String reftime = "2025-10-16 00:00:00";//NcUtil.getReftime(dataFile.getAbsolutePath());
if(StringUtils.isBlank(reftime)) { // if(StringUtils.isBlank(reftime)) {
throw new JeecgFileUploadException("解析气象文件起始时间数据异常,此文件可能损坏"); // throw new JeecgFileUploadException("解析气象文件起始时间数据异常,此文件可能损坏");
} // }
Instant instant = Instant.parse(reftime); // Instant instant = Instant.parse(reftime);
LocalDateTime utcDateTime = LocalDateTime.ofInstant(instant, ZoneId.of("UTC")); // LocalDateTime utcDateTime = LocalDateTime.ofInstant(instant, ZoneId.of("UTC"));
LocalDateTime utcDateTime = LocalDateTime.now();
queryResult.setDataStartTime(utcDateTime); queryResult.setDataStartTime(utcDateTime);
//计算文件大小M //计算文件大小M
BigDecimal divideVal = new BigDecimal("1024"); BigDecimal divideVal = new BigDecimal("1024");

View File

@ -42,7 +42,7 @@ spring:
#Sentinel配置 #Sentinel配置
sentinel: sentinel:
transport: transport:
dashboard: jeecg-boot-sentinel:9000 dashboard: stas-sentinel:9000
# 支持链路限流 # 支持链路限流
web-context-unify: false web-context-unify: false
filter: filter:

View File

@ -1,7 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?> <?xml version="1.0" encoding="UTF-8"?>
<configuration debug="false"> <configuration debug="false">
<!--定义日志文件的存储地址 --> <!--定义日志文件的存储地址 -->
<property name="LOG_HOME" value="../logs" /> <property name="LOG_HOME" value="logs" />
<!--<property name="COLOR_PATTERN" value="%black(%contextName-) %red(%d{yyyy-MM-dd HH:mm:ss}) %green([%thread]) %highlight(%-5level) %boldMagenta( %replace(%caller{1}){'\t|Caller.{1}0|\r\n', ''})- %gray(%msg%xEx%n)" />--> <!--<property name="COLOR_PATTERN" value="%black(%contextName-) %red(%d{yyyy-MM-dd HH:mm:ss}) %green([%thread]) %highlight(%-5level) %boldMagenta( %replace(%caller{1}){'\t|Caller.{1}0|\r\n', ''})- %gray(%msg%xEx%n)" />-->
<!-- 控制台输出 --> <!-- 控制台输出 -->
@ -17,7 +17,7 @@
<appender name="FILE" class="ch.qos.logback.core.rolling.RollingFileAppender"> <appender name="FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy"> <rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<!--日志文件输出的文件名 --> <!--日志文件输出的文件名 -->
<FileNamePattern>${LOG_HOME}/jeecg-gateway-%d{yyyy-MM-dd}.%i.log</FileNamePattern> <FileNamePattern>${LOG_HOME}/stas-gateway-%d{yyyy-MM-dd}.%i.log</FileNamePattern>
<!--日志文件保留天数 --> <!--日志文件保留天数 -->
<MaxHistory>30</MaxHistory> <MaxHistory>30</MaxHistory>
<maxFileSize>10MB</maxFileSize> <maxFileSize>10MB</maxFileSize>

View File

@ -0,0 +1,44 @@
<?xml version="1.0" encoding="UTF-8"?>
<configuration debug="false">
<!--定义日志文件的存储地址 -->
<property name="LOG_HOME" value="logs" />
<!--<property name="COLOR_PATTERN" value="%black(%contextName-) %red(%d{yyyy-MM-dd HH:mm:ss}) %green([%thread]) %highlight(%-5level) %boldMagenta( %replace(%caller{1}){'\t|Caller.{1}0|\r\n', ''})- %gray(%msg%xEx%n)" />-->
<!-- 控制台输出 -->
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
<!--格式化输出:%d表示日期%thread表示线程名%-5level级别从左显示5个字符宽度%msg日志消息%n是换行符
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{50}:%L - %msg%n</pattern>-->
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %highlight(%-5level) %cyan(%logger{50}:%L) - %msg%n</pattern>
</encoder>
</appender>
<!-- 按照每天生成日志文件 -->
<appender name="FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<!--日志文件输出的文件名 -->
<FileNamePattern>${LOG_HOME}/stas-data-analyze-%d{yyyy-MM-dd}.%i.log</FileNamePattern>
<!--日志文件保留天数 -->
<MaxHistory>30</MaxHistory>
<maxFileSize>10MB</maxFileSize>
</rollingPolicy>
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
<!--格式化输出:%d表示日期%thread表示线程名%-5level级别从左显示5个字符宽度%msg日志消息%n是换行符 -->
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{50}:%L - %msg%n</pattern>
</encoder>
</appender>
<!--myibatis log configure -->
<logger name="com.apache.ibatis" level="TRACE" />
<logger name="java.sql.Connection" level="DEBUG" />
<logger name="java.sql.Statement" level="DEBUG" />
<logger name="java.sql.PreparedStatement" level="DEBUG" />
<!-- 日志输出级别 -->
<root level="INFO">
<appender-ref ref="STDOUT" />
<appender-ref ref="FILE" />
</root>
</configuration>

View File

@ -1,7 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?> <?xml version="1.0" encoding="UTF-8"?>
<configuration debug="false"> <configuration debug="false">
<!--定义日志文件的存储地址 --> <!--定义日志文件的存储地址 -->
<property name="LOG_HOME" value="../logs" /> <property name="LOG_HOME" value="logs" />
<!--<property name="COLOR_PATTERN" value="%black(%contextName-) %red(%d{yyyy-MM-dd HH:mm:ss}) %green([%thread]) %highlight(%-5level) %boldMagenta( %replace(%caller{1}){'\t|Caller.{1}0|\r\n', ''})- %gray(%msg%xEx%n)" />--> <!--<property name="COLOR_PATTERN" value="%black(%contextName-) %red(%d{yyyy-MM-dd HH:mm:ss}) %green([%thread]) %highlight(%-5level) %boldMagenta( %replace(%caller{1}){'\t|Caller.{1}0|\r\n', ''})- %gray(%msg%xEx%n)" />-->
<!-- 控制台输出 --> <!-- 控制台输出 -->
@ -17,7 +17,7 @@
<appender name="FILE" class="ch.qos.logback.core.rolling.RollingFileAppender"> <appender name="FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy"> <rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<!--日志文件输出的文件名 --> <!--日志文件输出的文件名 -->
<FileNamePattern>${LOG_HOME}/jeecg-system-%d{yyyy-MM-dd}.%i.log</FileNamePattern> <FileNamePattern>${LOG_HOME}/stas-event-%d{yyyy-MM-dd}.%i.log</FileNamePattern>
<!--日志文件保留天数 --> <!--日志文件保留天数 -->
<MaxHistory>30</MaxHistory> <MaxHistory>30</MaxHistory>
<maxFileSize>10MB</maxFileSize> <maxFileSize>10MB</maxFileSize>
@ -28,37 +28,6 @@
</encoder> </encoder>
</appender> </appender>
<!-- 生成 error html格式日志开始 -->
<appender name="HTML" class="ch.qos.logback.core.FileAppender">
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
<!--设置日志级别,过滤掉info日志,只输入error日志-->
<level>ERROR</level>
</filter>
<encoder class="ch.qos.logback.core.encoder.LayoutWrappingEncoder">
<layout class="ch.qos.logback.classic.html.HTMLLayout">
<pattern>%p%d%msg%M%F{32}%L</pattern>
</layout>
</encoder>
<file>${LOG_HOME}/error-log.html</file>
</appender>
<!-- 生成 error html格式日志结束 -->
<!-- 每天生成一个html格式的日志开始 -->
<appender name="FILE_HTML" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<!--日志文件输出的文件名 -->
<FileNamePattern>${LOG_HOME}/jeecg-system-%d{yyyy-MM-dd}.%i.html</FileNamePattern>
<!--日志文件保留天数 -->
<MaxHistory>30</MaxHistory>
<MaxFileSize>10MB</MaxFileSize>
</rollingPolicy>
<encoder class="ch.qos.logback.core.encoder.LayoutWrappingEncoder">
<layout class="ch.qos.logback.classic.html.HTMLLayout">
<pattern>%p%d%msg%M%F{32}%L</pattern>
</layout>
</encoder>
</appender>
<!-- 每天生成一个html格式的日志结束 -->
<!--myibatis log configure --> <!--myibatis log configure -->
<logger name="com.apache.ibatis" level="TRACE" /> <logger name="com.apache.ibatis" level="TRACE" />
@ -70,8 +39,6 @@
<root level="INFO"> <root level="INFO">
<appender-ref ref="STDOUT" /> <appender-ref ref="STDOUT" />
<appender-ref ref="FILE" /> <appender-ref ref="FILE" />
<appender-ref ref="HTML" />
<appender-ref ref="FILE_HTML" />
</root> </root>
</configuration> </configuration>

View File

@ -1,7 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?> <?xml version="1.0" encoding="UTF-8"?>
<configuration debug="false"> <configuration debug="false">
<!--定义日志文件的存储地址 --> <!--定义日志文件的存储地址 -->
<property name="LOG_HOME" value="../logs" /> <property name="LOG_HOME" value="logs" />
<!--<property name="COLOR_PATTERN" value="%black(%contextName-) %red(%d{yyyy-MM-dd HH:mm:ss}) %green([%thread]) %highlight(%-5level) %boldMagenta( %replace(%caller{1}){'\t|Caller.{1}0|\r\n', ''})- %gray(%msg%xEx%n)" />--> <!--<property name="COLOR_PATTERN" value="%black(%contextName-) %red(%d{yyyy-MM-dd HH:mm:ss}) %green([%thread]) %highlight(%-5level) %boldMagenta( %replace(%caller{1}){'\t|Caller.{1}0|\r\n', ''})- %gray(%msg%xEx%n)" />-->
<!-- 控制台输出 --> <!-- 控制台输出 -->
@ -17,7 +17,7 @@
<appender name="FILE" class="ch.qos.logback.core.rolling.RollingFileAppender"> <appender name="FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy"> <rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<!--日志文件输出的文件名 --> <!--日志文件输出的文件名 -->
<FileNamePattern>${LOG_HOME}/jeecg-system-%d{yyyy-MM-dd}.%i.log</FileNamePattern> <FileNamePattern>${LOG_HOME}/stas-source-build-%d{yyyy-MM-dd}.%i.log</FileNamePattern>
<!--日志文件保留天数 --> <!--日志文件保留天数 -->
<MaxHistory>30</MaxHistory> <MaxHistory>30</MaxHistory>
<maxFileSize>10MB</maxFileSize> <maxFileSize>10MB</maxFileSize>
@ -28,37 +28,6 @@
</encoder> </encoder>
</appender> </appender>
<!-- 生成 error html格式日志开始 -->
<appender name="HTML" class="ch.qos.logback.core.FileAppender">
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
<!--设置日志级别,过滤掉info日志,只输入error日志-->
<level>ERROR</level>
</filter>
<encoder class="ch.qos.logback.core.encoder.LayoutWrappingEncoder">
<layout class="ch.qos.logback.classic.html.HTMLLayout">
<pattern>%p%d%msg%M%F{32}%L</pattern>
</layout>
</encoder>
<file>${LOG_HOME}/error-log.html</file>
</appender>
<!-- 生成 error html格式日志结束 -->
<!-- 每天生成一个html格式的日志开始 -->
<appender name="FILE_HTML" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<!--日志文件输出的文件名 -->
<FileNamePattern>${LOG_HOME}/jeecg-system-%d{yyyy-MM-dd}.%i.html</FileNamePattern>
<!--日志文件保留天数 -->
<MaxHistory>30</MaxHistory>
<MaxFileSize>10MB</MaxFileSize>
</rollingPolicy>
<encoder class="ch.qos.logback.core.encoder.LayoutWrappingEncoder">
<layout class="ch.qos.logback.classic.html.HTMLLayout">
<pattern>%p%d%msg%M%F{32}%L</pattern>
</layout>
</encoder>
</appender>
<!-- 每天生成一个html格式的日志结束 -->
<!--myibatis log configure --> <!--myibatis log configure -->
<logger name="com.apache.ibatis" level="TRACE" /> <logger name="com.apache.ibatis" level="TRACE" />
@ -70,8 +39,6 @@
<root level="INFO"> <root level="INFO">
<appender-ref ref="STDOUT" /> <appender-ref ref="STDOUT" />
<appender-ref ref="FILE" /> <appender-ref ref="FILE" />
<appender-ref ref="HTML" />
<appender-ref ref="FILE_HTML" />
</root> </root>
</configuration> </configuration>

View File

@ -1,7 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?> <?xml version="1.0" encoding="UTF-8"?>
<configuration debug="false"> <configuration debug="false">
<!--定义日志文件的存储地址 --> <!--定义日志文件的存储地址 -->
<property name="LOG_HOME" value="../logs" /> <property name="LOG_HOME" value="logs" />
<!--<property name="COLOR_PATTERN" value="%black(%contextName-) %red(%d{yyyy-MM-dd HH:mm:ss}) %green([%thread]) %highlight(%-5level) %boldMagenta( %replace(%caller{1}){'\t|Caller.{1}0|\r\n', ''})- %gray(%msg%xEx%n)" />--> <!--<property name="COLOR_PATTERN" value="%black(%contextName-) %red(%d{yyyy-MM-dd HH:mm:ss}) %green([%thread]) %highlight(%-5level) %boldMagenta( %replace(%caller{1}){'\t|Caller.{1}0|\r\n', ''})- %gray(%msg%xEx%n)" />-->
<!-- 控制台输出 --> <!-- 控制台输出 -->
@ -17,7 +17,7 @@
<appender name="FILE" class="ch.qos.logback.core.rolling.RollingFileAppender"> <appender name="FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy"> <rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<!--日志文件输出的文件名 --> <!--日志文件输出的文件名 -->
<FileNamePattern>${LOG_HOME}/jeecg-system-%d{yyyy-MM-dd}.%i.log</FileNamePattern> <FileNamePattern>${LOG_HOME}/stas-sync-%d{yyyy-MM-dd}.%i.log</FileNamePattern>
<!--日志文件保留天数 --> <!--日志文件保留天数 -->
<MaxHistory>30</MaxHistory> <MaxHistory>30</MaxHistory>
<maxFileSize>10MB</maxFileSize> <maxFileSize>10MB</maxFileSize>
@ -28,37 +28,6 @@
</encoder> </encoder>
</appender> </appender>
<!-- 生成 error html格式日志开始 -->
<appender name="HTML" class="ch.qos.logback.core.FileAppender">
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
<!--设置日志级别,过滤掉info日志,只输入error日志-->
<level>ERROR</level>
</filter>
<encoder class="ch.qos.logback.core.encoder.LayoutWrappingEncoder">
<layout class="ch.qos.logback.classic.html.HTMLLayout">
<pattern>%p%d%msg%M%F{32}%L</pattern>
</layout>
</encoder>
<file>${LOG_HOME}/error-log.html</file>
</appender>
<!-- 生成 error html格式日志结束 -->
<!-- 每天生成一个html格式的日志开始 -->
<appender name="FILE_HTML" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<!--日志文件输出的文件名 -->
<FileNamePattern>${LOG_HOME}/jeecg-system-%d{yyyy-MM-dd}.%i.html</FileNamePattern>
<!--日志文件保留天数 -->
<MaxHistory>30</MaxHistory>
<MaxFileSize>10MB</MaxFileSize>
</rollingPolicy>
<encoder class="ch.qos.logback.core.encoder.LayoutWrappingEncoder">
<layout class="ch.qos.logback.classic.html.HTMLLayout">
<pattern>%p%d%msg%M%F{32}%L</pattern>
</layout>
</encoder>
</appender>
<!-- 每天生成一个html格式的日志结束 -->
<!--myibatis log configure --> <!--myibatis log configure -->
<logger name="com.apache.ibatis" level="TRACE" /> <logger name="com.apache.ibatis" level="TRACE" />
@ -70,8 +39,6 @@
<root level="INFO"> <root level="INFO">
<appender-ref ref="STDOUT" /> <appender-ref ref="STDOUT" />
<appender-ref ref="FILE" /> <appender-ref ref="FILE" />
<appender-ref ref="HTML" />
<appender-ref ref="FILE_HTML" />
</root> </root>
</configuration> </configuration>

View File

@ -1,7 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?> <?xml version="1.0" encoding="UTF-8"?>
<configuration debug="false"> <configuration debug="false">
<!--定义日志文件的存储地址 --> <!--定义日志文件的存储地址 -->
<property name="LOG_HOME" value="../logs" /> <property name="LOG_HOME" value="logs" />
<!--<property name="COLOR_PATTERN" value="%black(%contextName-) %red(%d{yyyy-MM-dd HH:mm:ss}) %green([%thread]) %highlight(%-5level) %boldMagenta( %replace(%caller{1}){'\t|Caller.{1}0|\r\n', ''})- %gray(%msg%xEx%n)" />--> <!--<property name="COLOR_PATTERN" value="%black(%contextName-) %red(%d{yyyy-MM-dd HH:mm:ss}) %green([%thread]) %highlight(%-5level) %boldMagenta( %replace(%caller{1}){'\t|Caller.{1}0|\r\n', ''})- %gray(%msg%xEx%n)" />-->
<!-- 控制台输出 --> <!-- 控制台输出 -->
@ -17,7 +17,7 @@
<appender name="FILE" class="ch.qos.logback.core.rolling.RollingFileAppender"> <appender name="FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy"> <rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<!--日志文件输出的文件名 --> <!--日志文件输出的文件名 -->
<FileNamePattern>${LOG_HOME}/jeecg-system-%d{yyyy-MM-dd}.%i.log</FileNamePattern> <FileNamePattern>${LOG_HOME}/stas-system-%d{yyyy-MM-dd}.%i.log</FileNamePattern>
<!--日志文件保留天数 --> <!--日志文件保留天数 -->
<MaxHistory>30</MaxHistory> <MaxHistory>30</MaxHistory>
<maxFileSize>10MB</maxFileSize> <maxFileSize>10MB</maxFileSize>
@ -28,37 +28,6 @@
</encoder> </encoder>
</appender> </appender>
<!-- 生成 error html格式日志开始 -->
<appender name="HTML" class="ch.qos.logback.core.FileAppender">
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
<!--设置日志级别,过滤掉info日志,只输入error日志-->
<level>ERROR</level>
</filter>
<encoder class="ch.qos.logback.core.encoder.LayoutWrappingEncoder">
<layout class="ch.qos.logback.classic.html.HTMLLayout">
<pattern>%p%d%msg%M%F{32}%L</pattern>
</layout>
</encoder>
<file>${LOG_HOME}/error-log.html</file>
</appender>
<!-- 生成 error html格式日志结束 -->
<!-- 每天生成一个html格式的日志开始 -->
<appender name="FILE_HTML" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<!--日志文件输出的文件名 -->
<FileNamePattern>${LOG_HOME}/jeecg-system-%d{yyyy-MM-dd}.%i.html</FileNamePattern>
<!--日志文件保留天数 -->
<MaxHistory>30</MaxHistory>
<MaxFileSize>10MB</MaxFileSize>
</rollingPolicy>
<encoder class="ch.qos.logback.core.encoder.LayoutWrappingEncoder">
<layout class="ch.qos.logback.classic.html.HTMLLayout">
<pattern>%p%d%msg%M%F{32}%L</pattern>
</layout>
</encoder>
</appender>
<!-- 每天生成一个html格式的日志结束 -->
<!--myibatis log configure --> <!--myibatis log configure -->
<logger name="com.apache.ibatis" level="TRACE" /> <logger name="com.apache.ibatis" level="TRACE" />
@ -70,8 +39,6 @@
<root level="INFO"> <root level="INFO">
<appender-ref ref="STDOUT" /> <appender-ref ref="STDOUT" />
<appender-ref ref="FILE" /> <appender-ref ref="FILE" />
<appender-ref ref="HTML" />
<appender-ref ref="FILE_HTML" />
</root> </root>
</configuration> </configuration>

View File

@ -1,5 +1,5 @@
server: server:
port: 8002 port: 8003
spring: spring:
application: application:

View File

@ -1,7 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?> <?xml version="1.0" encoding="UTF-8"?>
<configuration debug="false"> <configuration debug="false">
<!--定义日志文件的存储地址 --> <!--定义日志文件的存储地址 -->
<property name="LOG_HOME" value="../logs" /> <property name="LOG_HOME" value="logs" />
<!--<property name="COLOR_PATTERN" value="%black(%contextName-) %red(%d{yyyy-MM-dd HH:mm:ss}) %green([%thread]) %highlight(%-5level) %boldMagenta( %replace(%caller{1}){'\t|Caller.{1}0|\r\n', ''})- %gray(%msg%xEx%n)" />--> <!--<property name="COLOR_PATTERN" value="%black(%contextName-) %red(%d{yyyy-MM-dd HH:mm:ss}) %green([%thread]) %highlight(%-5level) %boldMagenta( %replace(%caller{1}){'\t|Caller.{1}0|\r\n', ''})- %gray(%msg%xEx%n)" />-->
<!-- 控制台输出 --> <!-- 控制台输出 -->
@ -17,7 +17,7 @@
<appender name="FILE" class="ch.qos.logback.core.rolling.RollingFileAppender"> <appender name="FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy"> <rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<!--日志文件输出的文件名 --> <!--日志文件输出的文件名 -->
<FileNamePattern>${LOG_HOME}/jeecg-system-%d{yyyy-MM-dd}.%i.log</FileNamePattern> <FileNamePattern>${LOG_HOME}/stas-weather-%d{yyyy-MM-dd}.%i.log</FileNamePattern>
<!--日志文件保留天数 --> <!--日志文件保留天数 -->
<MaxHistory>30</MaxHistory> <MaxHistory>30</MaxHistory>
<maxFileSize>10MB</maxFileSize> <maxFileSize>10MB</maxFileSize>
@ -28,37 +28,6 @@
</encoder> </encoder>
</appender> </appender>
<!-- 生成 error html格式日志开始 -->
<appender name="HTML" class="ch.qos.logback.core.FileAppender">
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
<!--设置日志级别,过滤掉info日志,只输入error日志-->
<level>ERROR</level>
</filter>
<encoder class="ch.qos.logback.core.encoder.LayoutWrappingEncoder">
<layout class="ch.qos.logback.classic.html.HTMLLayout">
<pattern>%p%d%msg%M%F{32}%L</pattern>
</layout>
</encoder>
<file>${LOG_HOME}/error-log.html</file>
</appender>
<!-- 生成 error html格式日志结束 -->
<!-- 每天生成一个html格式的日志开始 -->
<appender name="FILE_HTML" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<!--日志文件输出的文件名 -->
<FileNamePattern>${LOG_HOME}/jeecg-system-%d{yyyy-MM-dd}.%i.html</FileNamePattern>
<!--日志文件保留天数 -->
<MaxHistory>30</MaxHistory>
<MaxFileSize>10MB</MaxFileSize>
</rollingPolicy>
<encoder class="ch.qos.logback.core.encoder.LayoutWrappingEncoder">
<layout class="ch.qos.logback.classic.html.HTMLLayout">
<pattern>%p%d%msg%M%F{32}%L</pattern>
</layout>
</encoder>
</appender>
<!-- 每天生成一个html格式的日志结束 -->
<!--myibatis log configure --> <!--myibatis log configure -->
<logger name="com.apache.ibatis" level="TRACE" /> <logger name="com.apache.ibatis" level="TRACE" />
@ -70,8 +39,6 @@
<root level="INFO"> <root level="INFO">
<appender-ref ref="STDOUT" /> <appender-ref ref="STDOUT" />
<appender-ref ref="FILE" /> <appender-ref ref="FILE" />
<appender-ref ref="HTML" />
<appender-ref ref="FILE_HTML" />
</root> </root>
</configuration> </configuration>

View File

@ -535,7 +535,7 @@
<!--当前环境--> <!--当前环境-->
<profile.name>dev</profile.name> <profile.name>dev</profile.name>
<!--Nacos服务地址--> <!--Nacos服务地址-->
<config.server-addr>jeecg-boot-nacos:8848</config.server-addr> <config.server-addr>stas-nacos:8848</config.server-addr>
<!--Nacos配置中心命名空间,用于支持多环境.这里必须使用ID不能使用名称,默认为空--> <!--Nacos配置中心命名空间,用于支持多环境.这里必须使用ID不能使用名称,默认为空-->
<config.namespace></config.namespace> <config.namespace></config.namespace>
<!--Nacos配置分组名称--> <!--Nacos配置分组名称-->
@ -565,7 +565,7 @@
<!--当前环境--> <!--当前环境-->
<profile.name>test</profile.name> <profile.name>test</profile.name>
<!--Nacos服务地址--> <!--Nacos服务地址-->
<config.server-addr>jeecg-boot-nacos:8848</config.server-addr> <config.server-addr>stas-nacos:8848</config.server-addr>
<!--Nacos配置中心命名空间,用于支持多环境.这里必须使用ID不能使用名称,默认为空--> <!--Nacos配置中心命名空间,用于支持多环境.这里必须使用ID不能使用名称,默认为空-->
<config.namespace></config.namespace> <config.namespace></config.namespace>
<!--Nacos配置分组名称--> <!--Nacos配置分组名称-->
@ -583,7 +583,7 @@
<!--当前环境--> <!--当前环境-->
<profile.name>prod</profile.name> <profile.name>prod</profile.name>
<!--Nacos服务地址--> <!--Nacos服务地址-->
<config.server-addr>jeecg-boot-nacos:8848</config.server-addr> <config.server-addr>stas-nacos:8848</config.server-addr>
<!--Nacos配置中心命名空间,用于支持多环境.这里必须使用ID不能使用名称,默认为空--> <!--Nacos配置中心命名空间,用于支持多环境.这里必须使用ID不能使用名称,默认为空-->
<config.namespace></config.namespace> <config.namespace></config.namespace>
<!--Nacos配置分组名称--> <!--Nacos配置分组名称-->