Compare commits
119 Commits
4b98a910c7
...
PromdataPa
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
fd0a52368a | ||
|
|
650cc6f1b5 | ||
|
|
ff1c53ea40 | ||
|
|
b79655ccdc | ||
|
|
1f12197a91 | ||
|
|
d0455fb032 | ||
|
|
b2f6f7d2d0 | ||
|
|
ff439bb831 | ||
|
|
3980d66b49 | ||
| b16d910051 | |||
|
|
7e3a8e12d0 | ||
|
|
1d728d991e | ||
|
|
96de50285f | ||
|
|
225ec71ac3 | ||
|
|
90e72af72c | ||
|
|
d0bc646c22 | ||
|
|
90b31f9926 | ||
|
|
61748b8959 | ||
|
|
217510c07d | ||
|
|
e4b97be54e | ||
|
|
43d2b80fb2 | ||
|
|
9845f2fe5c | ||
|
|
607d71d1ca | ||
|
|
d7b6d3aebb | ||
|
|
98fdeca33b | ||
|
|
646696b8a0 | ||
|
|
58c5d36022 | ||
|
|
5fedaa299b | ||
|
|
c42d512dbd | ||
|
|
5baffb7e05 | ||
|
|
1ce174bd93 | ||
|
|
7fdac71062 | ||
|
|
6b61104641 | ||
|
|
40eeb0b9dd | ||
|
|
62747f0fcf | ||
|
|
b2f14528a9 | ||
|
|
cc3c67eae9 | ||
|
|
542258a271 | ||
|
|
a35dac78f8 | ||
|
|
e8b60ce28b | ||
|
|
d4d2927963 | ||
|
|
97e87409b5 | ||
|
|
dddf9dba65 | ||
|
|
4e953c01fc | ||
|
|
90c7bd80b1 | ||
|
|
322621a97b | ||
|
|
058a6c73a1 | ||
|
|
84972cdaeb | ||
|
|
28432c9c23 | ||
|
|
afe7361e06 | ||
|
|
2a8cb32d47 | ||
|
|
d7d650c5f9 | ||
|
|
469ef9e448 | ||
|
|
e8e23c5af8 | ||
|
|
a103c7dbf5 | ||
|
|
0c217963bb | ||
|
|
73807eaaaf | ||
|
|
d5b70edd11 | ||
|
|
d67815c7b6 | ||
|
|
8c25f1735d | ||
|
|
5e40c19ef1 | ||
|
|
6b9de37bf9 | ||
|
|
46ef8131c7 | ||
|
|
e91dcc8c02 | ||
|
|
7c1f0d4e63 | ||
|
|
464c3193d1 | ||
|
|
aed9147074 | ||
|
|
9e827c9831 | ||
|
|
34a10e3cd2 | ||
|
|
144b9b817d | ||
|
|
b2c37b8fe3 | ||
|
|
f997b6236c | ||
|
|
d557588b47 | ||
|
|
5238167212 | ||
|
|
dc1a8a1a44 | ||
|
|
d595397f08 | ||
|
|
dc865c6d9d | ||
|
|
a9fe0f219a | ||
|
|
035ebd8d40 | ||
|
|
d7f8db89a3 | ||
|
|
2149aa0208 | ||
|
|
e7b8000808 | ||
|
|
484a7a766f | ||
|
|
e55e6e8af6 | ||
|
|
236a548f58 | ||
|
|
0cf10a7e8a | ||
|
|
6b82cfb561 | ||
|
|
e66905e57f | ||
|
|
672ea11598 | ||
|
|
bea8ed607e | ||
|
|
37444eb6f4 | ||
|
|
0f4d3a2986 | ||
|
|
af83f42d26 | ||
|
|
2fc84f999c | ||
|
|
316e0e1b7e | ||
|
|
e50f95c325 | ||
|
|
b3580c15cc | ||
|
|
ded8d1b18d | ||
|
|
3d4b926b16 | ||
|
|
755bd45a0b | ||
|
|
f6fa253a11 | ||
|
|
4e8cce52ea | ||
|
|
c6e6c91e77 | ||
|
|
e9ca358eb1 | ||
|
|
a1703e72be | ||
|
|
f3f49f2c8e | ||
|
|
79779d6fcf | ||
|
|
e2dbf06601 | ||
|
|
0914881d26 | ||
|
|
e77bdbcc9e | ||
|
|
286eb1687d | ||
|
|
f4d7f129dd | ||
|
|
75736c0c4c | ||
| e9ecf164ee | |||
|
|
4f04227976 | ||
|
|
415334ad73 | ||
|
|
ba712f1907 | ||
|
|
d0bd05409d | ||
|
|
50d74916a6 |
13
.env.example
13
.env.example
@@ -1,10 +1,9 @@
|
||||
# PromdataPanel Environment Configuration
|
||||
# Note: Database and Cache settings will be automatically configured upon visiting /init.html
|
||||
|
||||
# Server Binding
|
||||
HOST=0.0.0.0
|
||||
PORT=3000
|
||||
REFRESH_INTERVAL=5000
|
||||
|
||||
# Valkey/Redis Cache Configuration
|
||||
VALKEY_HOST=localhost
|
||||
VALKEY_PORT=6379
|
||||
VALKEY_PASSWORD=
|
||||
VALKEY_DB=dashboard
|
||||
VALKEY_TTL=30
|
||||
# Aggregation interval in milliseconds (default 5s)
|
||||
REFRESH_INTERVAL=5000
|
||||
|
||||
94
Install.sh
94
Install.sh
@@ -1,94 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Data Visualization Display Wall - Systemd Installer
|
||||
# Requirements: Node.js, NPM, Systemd (Linux)
|
||||
|
||||
# Colors for output
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
BLUE='\033[0;34m'
|
||||
NC='\033[0m' # No Color
|
||||
|
||||
echo -e "${BLUE}=== Data Visualization Display Wall Installer ===${NC}"
|
||||
|
||||
# 1. Check permissions
|
||||
if [ "$EUID" -ne 0 ]; then
|
||||
echo -e "${RED}Please run as root (sudo ./Install.sh)${NC}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# 2. Get current directory and user
|
||||
PROJECT_DIR=$(pwd)
|
||||
REAL_USER=${SUDO_USER:-$USER}
|
||||
USER_HOME=$(getent passwd "$REAL_USER" | cut -d: -f6)
|
||||
|
||||
echo -e "Project Directory: ${GREEN}$PROJECT_DIR${NC}"
|
||||
echo -e "Running User: ${GREEN}$REAL_USER${NC}"
|
||||
|
||||
# 3. Check for dependencies
|
||||
if ! command -v node &> /dev/null; then
|
||||
echo -e "${RED}Node.js is not installed. Please install Node.js first.${NC}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! command -v npm &> /dev/null; then
|
||||
echo -e "${RED}NPM is not installed. Please install NPM first.${NC}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# 4. Install NPM dependencies
|
||||
echo -e "${BLUE}Installing dependencies...${NC}"
|
||||
# Run npm install as the real user to avoid permission issues in node_modules
|
||||
sudo -u "$REAL_USER" npm install
|
||||
|
||||
if [ $? -ne 0 ]; then
|
||||
echo -e "${RED}NPM install failed.${NC}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# 5. Create Systemd Service File
|
||||
SERVICE_FILE="/etc/systemd/system/data-wall.service"
|
||||
NODE_PATH=$(command -v node)
|
||||
|
||||
echo -e "${BLUE}Creating systemd service at $SERVICE_FILE...${NC}"
|
||||
|
||||
cat <<EOF > "$SERVICE_FILE"
|
||||
[Unit]
|
||||
Description=Data Visualization Display Wall
|
||||
After=network.target mysql.service
|
||||
|
||||
[Service]
|
||||
Type=simple
|
||||
User=$REAL_USER
|
||||
WorkingDirectory=$PROJECT_DIR
|
||||
ExecStart=$NODE_PATH server/index.js
|
||||
Restart=always
|
||||
RestartSec=10
|
||||
StandardOutput=syslog
|
||||
StandardError=syslog
|
||||
SyslogIdentifier=data-wall
|
||||
# Pass environment via .env file injection for flexibility
|
||||
EnvironmentFile=-$PROJECT_DIR/.env
|
||||
Environment=NODE_ENV=production
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
EOF
|
||||
|
||||
# 6. Reload Systemd and Start
|
||||
echo -e "${BLUE}Reloading systemd and starting service...${NC}"
|
||||
systemctl daemon-reload
|
||||
systemctl enable data-wall
|
||||
systemctl stop data-wall # Stop if already running
|
||||
systemctl start data-wall
|
||||
|
||||
# 7. Check Status
|
||||
if systemctl is-active --quiet data-wall; then
|
||||
echo -e "${GREEN}SUCCESS: Service is now running.${NC}"
|
||||
echo -e "You can access the dashboard at http://localhost:3000"
|
||||
echo -e "View logs with: ${BLUE}journalctl -u data-wall -f${NC}"
|
||||
else
|
||||
echo -e "${RED}FAILED: Service failed to start. Check logs with 'journalctl -u data-wall -xe'${NC}"
|
||||
fi
|
||||
|
||||
echo -e "${BLUE}================================================${NC}"
|
||||
112
README.md
112
README.md
@@ -1,78 +1,70 @@
|
||||
# 数据可视化展示大屏
|
||||
# PromdataPanel
|
||||
|
||||
多源 Prometheus 服务器监控展示大屏,支持对接多个 Prometheus 实例,实时展示所有服务器的 CPU、内存、磁盘、网络等关键指标。
|
||||
多源 Prometheus 服务器监控展示大屏。支持对接多个 Prometheus 实例,实时聚合展示所有服务器的 CPU、内存、磁盘、带宽等关键指标,并提供可视化节点分布图。
|
||||
|
||||
## 功能特性
|
||||
|
||||
- 🔌 **多数据源管理** - MySQL 存储配置,支持对接多个 Prometheus 实例
|
||||
- 📊 **NodeExporter 数据查询** - 自动聚合所有 Prometheus 中的 NodeExporter 数据
|
||||
- 🌐 **网络流量统计** - 24 小时网络流量趋势图,总流量统计
|
||||
- ⚡ **实时带宽监控** - 所有服务器网络带宽求和,实时显示
|
||||
- 💻 **资源使用概览** - CPU、内存、磁盘的总使用率和详细统计
|
||||
- 🖥️ **服务器列表** - 所有服务器的详细指标一览表
|
||||
- 🔌 **多数据源管理** - 支持对接多个 Prometheus 实例(Node_Exporter / BlackboxExporter)
|
||||
- 📊 **指标自动聚合** - 自动汇总所有数据源的 NodeExporter 指标,实时计算全网负载
|
||||
- 🌐 **网络流量统计** - 24 小时流量趋势图,实时带宽(Rx/Tx)求和显示
|
||||
- 🗺️ **节点分布可视化** - 自动识别服务器地理位置,并在全球地图上展示实时连接状态与延迟
|
||||
- ⚡ **毫秒级实时性** - 深度优化查询逻辑,支持 5s 采集频率的实时动态展示
|
||||
- 📱 **响应式与美学设计** - 现代 UI/UX 体验,支持暗色模式,极致性能优化
|
||||
|
||||
## 快速开始
|
||||
## 快速安装
|
||||
|
||||
### 1. 环境要求
|
||||
### 方式一:一键脚本安装 (推荐)
|
||||
|
||||
- Node.js >= 16
|
||||
- MySQL >= 5.7
|
||||
|
||||
### 2. 配置
|
||||
|
||||
复制环境变量文件并修改:
|
||||
在 Linux 服务器上,您可以使用以下脚本一键完成下载、环境检测、依赖安装并将其注册为 Systemd 系统服务:
|
||||
|
||||
```bash
|
||||
cp .env.example .env
|
||||
# 下载安装最新版本 (默认 v0.1.0)
|
||||
VERSION=v0.1.0 curl -sSL https://git.littlediary.cn/CN-JS-HuiBai/PromdataPanel/raw/branch/main/install.sh | bash
|
||||
```
|
||||
|
||||
编辑 `.env` 文件,配置 MySQL 连接信息:
|
||||
### 方式二:手动安装
|
||||
|
||||
```env
|
||||
MYSQL_HOST=localhost
|
||||
MYSQL_PORT=3306
|
||||
MYSQL_USER=root
|
||||
MYSQL_PASSWORD=your_password
|
||||
MYSQL_DATABASE=display_wall
|
||||
PORT=3000
|
||||
#### 1. 环境要求
|
||||
- **Node.js** >= 18
|
||||
- **MySQL** >= 8.0
|
||||
- **Valkey** >= 7.0 (或 Redis >= 6.0)
|
||||
|
||||
#### 2. 配置与启动
|
||||
1. 克隆代码库:`git clone https://git.littlediary.cn/CN-JS-HuiBai/PromdataPanel.git`
|
||||
2. 复制配置文件:`cp .env.example .env`
|
||||
3. 安装依赖:`npm install --production`
|
||||
4. 启动服务:`npm start`
|
||||
|
||||
#### 3. 系统初始化
|
||||
首次运行后,访问 `http://your-ip:3000/init.html`,按照引导完成 MySQL 数据库和 Valkey 缓存的连接。
|
||||
|
||||
## 使用指引
|
||||
|
||||
### 1. 添加 Prometheus 数据源
|
||||
点击页面右上角的 ⚙️ 按钮进入设置,添加并测试您的 Prometheus HTTP 地址。
|
||||
|
||||
### 2. Prometheus 采集配置
|
||||
建议在 `prometheus.yml` 中设置采集周期为 `5s` 以实现平滑的实时动态效果:
|
||||
|
||||
```yaml
|
||||
global:
|
||||
scrape_interval: 5s
|
||||
|
||||
scrape_configs:
|
||||
- job_name: 'nodes'
|
||||
static_configs:
|
||||
- targets: ['your-server-ip:9100']
|
||||
```
|
||||
|
||||
### 3. 初始化数据库
|
||||
|
||||
```bash
|
||||
npm run init-db
|
||||
```
|
||||
|
||||
### 4. 安装依赖并启动
|
||||
|
||||
```bash
|
||||
npm install
|
||||
npm run dev
|
||||
```
|
||||
|
||||
访问 `http://localhost:3000` 即可看到展示大屏。
|
||||
|
||||
### 5. 配置 Prometheus 数据源
|
||||
|
||||
点击右上角的 ⚙️ 按钮,添加你的 Prometheus 地址(如 `http://prometheus.example.com:9090`)。
|
||||
|
||||
## 技术栈
|
||||
|
||||
- **后端**: Node.js + Express
|
||||
- **数据库**: MySQL (mysql2)
|
||||
- **数据源**: Prometheus HTTP API
|
||||
- **前端**: 原生 HTML/CSS/JavaScript
|
||||
- **图表**: 自定义 Canvas 渲染
|
||||
- **Runtime**: Node.js
|
||||
- **Framework**: Express.js
|
||||
- **Database**: MySQL 8.0+
|
||||
- **Caching**: Valkey / Redis
|
||||
- **Visualization**: ECharts / Canvas
|
||||
- **Frontend**: Vanilla JS / CSS3
|
||||
|
||||
## API 接口
|
||||
## LICENSE
|
||||
|
||||
| 方法 | 路径 | 说明 |
|
||||
|------|------|------|
|
||||
| GET | `/api/sources` | 获取所有数据源 |
|
||||
| POST | `/api/sources` | 添加数据源 |
|
||||
| PUT | `/api/sources/:id` | 更新数据源 |
|
||||
| DELETE | `/api/sources/:id` | 删除数据源 |
|
||||
| POST | `/api/sources/test` | 测试数据源连接 |
|
||||
| GET | `/api/metrics/overview` | 获取聚合指标概览 |
|
||||
| GET | `/api/metrics/network-history` | 获取24h网络流量历史 |
|
||||
| GET | `/api/metrics/cpu-history` | 获取CPU使用率历史 |
|
||||
MIT License
|
||||
|
||||
197
install.sh
Normal file
197
install.sh
Normal file
@@ -0,0 +1,197 @@
|
||||
#!/bin/bash
|
||||
|
||||
# PromdataPanel - Multi-Prometheus Monitoring Dashboard Installer
|
||||
# This script handles OS detection, Node.js installation, project setup, and Systemd configuration.
|
||||
|
||||
# Colors for output
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
BLUE='\033[0;34m'
|
||||
NC='\033[0m' # No Color
|
||||
|
||||
# 0. Configuration
|
||||
VERSION=${VERSION:-"v0.1.0"}
|
||||
DOWNLOAD_URL="https://git.littlediary.cn/CN-JS-HuiBai/PromdataPanel/archive/${VERSION}.zip"
|
||||
MIN_NODE_VERSION=18
|
||||
|
||||
echo -e "${BLUE}================================================${NC}"
|
||||
echo -e "${BLUE} PromdataPanel Auto-Installer ${NC}"
|
||||
echo -e "${BLUE} Version: ${VERSION} ${NC}"
|
||||
echo -e "${BLUE}================================================${NC}"
|
||||
|
||||
# 1. OS Detection
|
||||
detect_os() {
|
||||
if [ -f /etc/os-release ]; then
|
||||
. /etc/os-release
|
||||
OS_ID=$ID
|
||||
OS_VER=$VERSION_ID
|
||||
else
|
||||
echo -e "${RED}Error: Cannot detect operating system type (/etc/os-release missing).${NC}"
|
||||
exit 1
|
||||
fi
|
||||
echo -e "Detected OS: ${GREEN}${OS_ID} ${OS_VER}${NC}"
|
||||
}
|
||||
|
||||
# 2. Node.js Installation/Verification
|
||||
install_node() {
|
||||
echo -e "${BLUE}Verifying Node.js environment...${NC}"
|
||||
|
||||
NODE_INSTALLED=false
|
||||
if command -v node &> /dev/null; then
|
||||
CURRENT_NODE_VER=$(node -v | cut -d'v' -f2 | cut -d'.' -f1)
|
||||
if [ "$CURRENT_NODE_VER" -ge "$MIN_NODE_VERSION" ]; then
|
||||
echo -e "${GREEN}Node.js v$(node -v) is already installed.${NC}"
|
||||
NODE_INSTALLED=true
|
||||
else
|
||||
echo -e "${YELLOW}Existing Node.js version (v$(node -v)) is too old (Requires >= $MIN_NODE_VERSION).${NC}"
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ "$NODE_INSTALLED" = false ]; then
|
||||
echo -e "${BLUE}Installing Node.js 20.x...${NC}"
|
||||
case "$OS_ID" in
|
||||
ubuntu|debian|raspbian)
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y ca-certificates curl gnupg
|
||||
curl -fsSL https://deb.nodesource.com/setup_20.x | sudo -E bash -
|
||||
sudo apt-get install -y nodejs
|
||||
;;
|
||||
centos|rhel|almalinux|rocky)
|
||||
curl -fsSL https://rpm.nodesource.com/setup_20.x | sudo bash -
|
||||
sudo yum install -y nodejs
|
||||
;;
|
||||
fedora)
|
||||
curl -fsSL https://rpm.nodesource.com/setup_20.x | sudo bash -
|
||||
sudo dnf install -y nodejs
|
||||
;;
|
||||
*)
|
||||
echo -e "${RED}Unsupported OS for automatic Node.js installation: $OS_ID${NC}"
|
||||
echo -e "Please install Node.js >= 18 manually.${NC}"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
}
|
||||
|
||||
# 3. Download and Extract (If needed)
|
||||
if [ ! -f "server/index.js" ]; then
|
||||
echo -e "${YELLOW}Project files not found. Starting download...${NC}"
|
||||
|
||||
if ! command -v curl &> /dev/null; then
|
||||
echo -e "${BLUE}Installing curl...${NC}"
|
||||
[ "$OS_ID" = "ubuntu" ] || [ "$OS_ID" = "debian" ] && sudo apt-get install -y curl
|
||||
[ "$OS_ID" = "centos" ] || [ "$OS_ID" = "rhel" ] && sudo yum install -y curl
|
||||
fi
|
||||
|
||||
if ! command -v unzip &> /dev/null; then
|
||||
echo -e "${BLUE}Installing unzip...${NC}"
|
||||
[ "$OS_ID" = "ubuntu" ] || [ "$OS_ID" = "debian" ] && sudo apt-get install -y unzip
|
||||
[ "$OS_ID" = "centos" ] || [ "$OS_ID" = "rhel" ] && sudo yum install -y unzip
|
||||
fi
|
||||
|
||||
TEMP_ZIP="promdatapanel_${VERSION}.zip"
|
||||
echo -e "${BLUE}Downloading ${DOWNLOAD_URL}...${NC}"
|
||||
curl -L "$DOWNLOAD_URL" -o "$TEMP_ZIP"
|
||||
|
||||
if [ $? -ne 0 ]; then
|
||||
echo -e "${RED}Download failed.${NC}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo -e "${BLUE}Extracting files...${NC}"
|
||||
unzip -q "$TEMP_ZIP"
|
||||
|
||||
EXTRACTED_DIR=$(ls -d */ | grep -E "^PromdataPanel" | head -n 1)
|
||||
if [ -d "$EXTRACTED_DIR" ]; then
|
||||
cd "$EXTRACTED_DIR" || exit 1
|
||||
else
|
||||
EXTRACTED_DIR=$(ls -d */ | head -n 1)
|
||||
[ -d "$EXTRACTED_DIR" ] && cd "$EXTRACTED_DIR" || exit 1
|
||||
fi
|
||||
rm "../$TEMP_ZIP" 2>/dev/null || rm "$TEMP_ZIP" 2>/dev/null
|
||||
fi
|
||||
|
||||
# 4. Initialize Setup
|
||||
# Permission check
|
||||
if [ "$EUID" -eq 0 ]; then
|
||||
REAL_USER=${SUDO_USER:-$USER}
|
||||
else
|
||||
REAL_USER=$USER
|
||||
fi
|
||||
|
||||
detect_os
|
||||
install_node
|
||||
|
||||
PROJECT_DIR=$(pwd)
|
||||
echo -e "Project Directory: ${GREEN}$PROJECT_DIR${NC}"
|
||||
echo -e "Running User: ${GREEN}$REAL_USER${NC}"
|
||||
|
||||
# Check for .env file
|
||||
if [ ! -f ".env" ]; then
|
||||
if [ -f ".env.example" ]; then
|
||||
echo -e "${BLUE}Creating .env from .env.example...${NC}"
|
||||
cp .env.example .env
|
||||
fi
|
||||
fi
|
||||
|
||||
# 5. Install Dependencies
|
||||
echo -e "${BLUE}Installing NPM dependencies...${NC}"
|
||||
npm install --production
|
||||
|
||||
if [ $? -ne 0 ]; then
|
||||
echo -e "${RED}NPM install failed.${NC}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# 6. Create Systemd Service File
|
||||
SERVICE_FILE="/etc/systemd/system/promdatapanel.service"
|
||||
NODE_PATH=$(command -v node)
|
||||
|
||||
echo -e "${BLUE}Creating systemd service at $SERVICE_FILE...${NC}"
|
||||
sudo bash -c "cat <<EOF > '$SERVICE_FILE'
|
||||
[Unit]
|
||||
Description=PromdataPanel Monitoring Dashboard
|
||||
After=network.target mysql.service redis-server.service valkey-server.service
|
||||
Wants=mysql.service
|
||||
|
||||
[Service]
|
||||
Type=simple
|
||||
User=$REAL_USER
|
||||
WorkingDirectory=$PROJECT_DIR
|
||||
ExecStart=$NODE_PATH server/index.js
|
||||
Restart=always
|
||||
RestartSec=10
|
||||
StandardOutput=syslog
|
||||
StandardError=syslog
|
||||
SyslogIdentifier=promdatapanel
|
||||
EnvironmentFile=-$PROJECT_DIR/.env
|
||||
Environment=NODE_ENV=production
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
EOF"
|
||||
|
||||
# 7. Reload and Start
|
||||
echo -e "${BLUE}Reloading systemd and restarting service...${NC}"
|
||||
sudo systemctl daemon-reload
|
||||
sudo systemctl enable promdatapanel
|
||||
sudo systemctl restart promdatapanel
|
||||
|
||||
# 8. Check Status
|
||||
echo -e "${BLUE}Checking service status...${NC}"
|
||||
sleep 2
|
||||
if sudo systemctl is-active --quiet promdatapanel; then
|
||||
echo -e "${GREEN}SUCCESS: PromdataPanel is now running.${NC}"
|
||||
PORT=$(grep "^PORT=" .env | cut -d'=' -f2)
|
||||
PORT=${PORT:-3000}
|
||||
IP_ADDR=$(hostname -I | awk '{print $1}')
|
||||
echo -e "Dashboard URL: ${YELLOW}http://${IP_ADDR}:${PORT}${NC}"
|
||||
else
|
||||
echo -e "${RED}FAILED: Service failed to start.${NC}"
|
||||
echo -e "Check logs with: ${BLUE}journalctl -u promdatapanel -xe${NC}"
|
||||
fi
|
||||
|
||||
echo -e "${BLUE}================================================${NC}"
|
||||
echo -e "${GREEN}Installation completed!${NC}"
|
||||
echo -e "${BLUE}================================================${NC}"
|
||||
24
package-lock.json
generated
24
package-lock.json
generated
@@ -13,7 +13,8 @@
|
||||
"dotenv": "^16.4.0",
|
||||
"express": "^4.21.0",
|
||||
"ioredis": "^5.10.1",
|
||||
"mysql2": "^3.11.0"
|
||||
"mysql2": "^3.11.0",
|
||||
"ws": "^8.20.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@ioredis/commands": {
|
||||
@@ -1215,6 +1216,27 @@
|
||||
"engines": {
|
||||
"node": ">= 0.8"
|
||||
}
|
||||
},
|
||||
"node_modules/ws": {
|
||||
"version": "8.20.0",
|
||||
"resolved": "https://registry.npmjs.org/ws/-/ws-8.20.0.tgz",
|
||||
"integrity": "sha512-sAt8BhgNbzCtgGbt2OxmpuryO63ZoDk/sqaB/znQm94T4fCEsy/yV+7CdC1kJhOU9lboAEU7R3kquuycDoibVA==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=10.0.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"bufferutil": "^4.0.1",
|
||||
"utf-8-validate": ">=5.0.2"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"bufferutil": {
|
||||
"optional": true
|
||||
},
|
||||
"utf-8-validate": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"name": "data-visualization-display-wall",
|
||||
"name": "promdatapanel",
|
||||
"version": "1.0.0",
|
||||
"description": "Data Visualization Display Wall - Multi-Prometheus Monitoring Dashboard",
|
||||
"main": "server/index.js",
|
||||
@@ -14,6 +14,7 @@
|
||||
"dotenv": "^16.4.0",
|
||||
"express": "^4.21.0",
|
||||
"ioredis": "^5.10.1",
|
||||
"mysql2": "^3.11.0"
|
||||
"mysql2": "^3.11.0",
|
||||
"ws": "^8.20.0"
|
||||
}
|
||||
}
|
||||
|
||||
1368
public/css/style.css
1368
public/css/style.css
File diff suppressed because it is too large
Load Diff
@@ -4,14 +4,15 @@
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<meta name="description" content="多源Prometheus服务器监控展示大屏 - 实时CPU、内存、磁盘、网络统计">
|
||||
<title>数据可视化展示大屏</title>
|
||||
<meta name="description" content="LDNET-GA">
|
||||
<title>LDNET-GA</title>
|
||||
<link rel="preconnect" href="https://fonts.googleapis.com">
|
||||
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin>
|
||||
<link
|
||||
href="https://fonts.googleapis.com/css2?family=Inter:wght@300;400;500;600;700;800;900&family=JetBrains+Mono:wght@400;500;600&display=swap"
|
||||
rel="stylesheet">
|
||||
<link rel="stylesheet" href="/css/style.css">
|
||||
<script src="https://cdn.jsdelivr.net/npm/echarts@5.4.3/dist/echarts.min.js"></script>
|
||||
<script>
|
||||
// Prevent theme flicker
|
||||
(function () {
|
||||
@@ -66,13 +67,6 @@
|
||||
</div>
|
||||
<h1 class="logo-text" id="logoText">数据可视化展示大屏</h1>
|
||||
</div>
|
||||
<div class="header-meta">
|
||||
<span class="server-count" id="serverCount">
|
||||
<span class="dot dot-pulse"></span>
|
||||
<span id="serverCountText">0 台服务器</span>
|
||||
</span>
|
||||
<span class="source-count" id="sourceCount">0 个数据源</span>
|
||||
</div>
|
||||
</div>
|
||||
<div class="header-right">
|
||||
|
||||
@@ -128,7 +122,7 @@
|
||||
</svg>
|
||||
</div>
|
||||
<div class="stat-card-content">
|
||||
<span class="stat-card-label">服务器总数</span>
|
||||
<span class="stat-card-label" id="totalServersLabel">服务器总数</span>
|
||||
<span class="stat-card-value" id="totalServers">0</span>
|
||||
</div>
|
||||
</div>
|
||||
@@ -187,9 +181,12 @@
|
||||
</svg>
|
||||
</div>
|
||||
<div class="stat-card-content">
|
||||
<span class="stat-card-label">实时总带宽</span>
|
||||
<span class="stat-card-value" id="totalBandwidth">0 B/s</span>
|
||||
<span class="stat-card-sub" id="bandwidthDetail">↓ 0 ↑ 0</span>
|
||||
<span class="stat-card-label">实时带宽 (MB/s ↑/↓)</span>
|
||||
<div class="stat-card-value-group">
|
||||
<span class="stat-card-value" id="totalBandwidthTx">0.00</span>
|
||||
<span class="stat-card-separator">/</span>
|
||||
<span class="stat-card-value" id="totalBandwidthRx">0.00</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</section>
|
||||
@@ -208,10 +205,12 @@
|
||||
</h2>
|
||||
</div>
|
||||
<div class="chart-legend">
|
||||
<span class="legend-item"><span class="legend-dot legend-rx"></span>接收 (RX)</span>
|
||||
<span class="legend-item"><span class="legend-dot legend-tx"></span>发送 (TX)</span>
|
||||
<span class="legend-item" id="legendP95" style="cursor: pointer;" title="点击切换 P95 线显示/隐藏">
|
||||
<span class="legend-dot legend-p95"></span>95计费 (P95)
|
||||
<span class="legend-item" id="legendRx" style="cursor: pointer;" title="点击切换 接收 (RX) 显示/隐藏"><span
|
||||
class="legend-dot legend-rx"></span>接收 (RX)</span>
|
||||
<span class="legend-item" id="legendTx" style="cursor: pointer;" title="点击切换 发送 (TX) 显示/隐藏"><span
|
||||
class="legend-dot legend-tx"></span>发送 (TX)</span>
|
||||
<span class="legend-item disabled" id="legendP95" style="cursor: pointer;" title="点击切换 P95 线显示/隐藏">
|
||||
<span class="legend-dot legend-p95"></span>95计费 (<span id="p95LabelText">上行</span>)
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
@@ -228,7 +227,7 @@
|
||||
<span class="traffic-value" id="traffic24hTx">0 B</span>
|
||||
</div>
|
||||
<div class="traffic-stat traffic-stat-p95">
|
||||
<span class="traffic-label">95计费带宽</span>
|
||||
<span class="traffic-label">95计费 (上行)</span>
|
||||
<span class="traffic-value" id="trafficP95">0 B/s</span>
|
||||
</div>
|
||||
<div class="traffic-stat traffic-stat-total">
|
||||
@@ -241,6 +240,43 @@
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Global Traffic 3D Globe -->
|
||||
<div class="chart-card globe-card" id="globeCard">
|
||||
<div class="chart-card-header">
|
||||
<h2 class="chart-title">
|
||||
<svg viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="1.5" class="chart-title-icon">
|
||||
<circle cx="12" cy="12" r="10" />
|
||||
<path
|
||||
d="M2 12h20M12 2a15.3 15.3 0 0 1 4 10 15.3 15.3 0 0 1-4 10 15.3 15.3 0 0 1-4-10 15.3 15.3 0 0 1 4-10z" />
|
||||
</svg>
|
||||
全球服务器分布
|
||||
</h2>
|
||||
<div class="chart-header-actions">
|
||||
<button class="btn-icon" id="btnExpandGlobe" title="放大显示">
|
||||
<svg viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2"
|
||||
style="width: 18px; height: 18px;">
|
||||
<path d="M15 3h6v6M9 21H3v-6M21 3l-7 7M3 21l7-7" />
|
||||
</svg>
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
<div class="globe-body" id="globeContainer"></div>
|
||||
<div class="chart-footer">
|
||||
<div class="traffic-stat">
|
||||
<span class="traffic-label">全球节点总数</span>
|
||||
<span class="traffic-value" id="globeTotalNodes">0</span>
|
||||
</div>
|
||||
<div class="traffic-stat">
|
||||
<span class="traffic-label">覆盖地区/国家</span>
|
||||
<span class="traffic-value" id="globeTotalRegions">0</span>
|
||||
</div>
|
||||
<div class="traffic-stat">
|
||||
<span class="traffic-label">实时活跃状态</span>
|
||||
<span class="traffic-value" style="color: var(--accent-emerald);">Active</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</section>
|
||||
|
||||
<!-- Server List -->
|
||||
@@ -256,19 +292,40 @@
|
||||
</svg>
|
||||
服务器详情
|
||||
</h2>
|
||||
<div class="chart-header-right">
|
||||
<div class="search-box">
|
||||
<input type="search" id="serverSearchFilter" name="q-filter-server" placeholder="检索服务器名称..."
|
||||
autocomplete="one-time-code" spellcheck="false">
|
||||
<svg viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round"
|
||||
stroke-linejoin="round" class="search-icon">
|
||||
<circle cx="11" cy="11" r="8"></circle>
|
||||
<line x1="21" y1="21" x2="16.65" y2="16.65"></line>
|
||||
</svg>
|
||||
</div>
|
||||
<button id="btnResetSort" class="btn-icon-sm" title="重置筛选与排序">
|
||||
<svg viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round"
|
||||
stroke-linejoin="round">
|
||||
<path d="M3 12a9 9 0 1 0 9-9 9.75 9.75 0 0 0-6.74 2.74L3 8"></path>
|
||||
<path d="M3 3v5h5"></path>
|
||||
</svg>
|
||||
</button>
|
||||
<select id="sourceFilter" class="source-select">
|
||||
<option value="all">所有数据源</option>
|
||||
</select>
|
||||
</div>
|
||||
</div>
|
||||
<div class="server-table-wrap">
|
||||
<table class="server-table" id="serverTable">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>状态</th>
|
||||
<th>Job / 实例</th>
|
||||
<th>数据源</th>
|
||||
<th>CPU</th>
|
||||
<th>内存</th>
|
||||
<th>磁盘</th>
|
||||
<th>网络 ↓</th>
|
||||
<th>网络 ↑</th>
|
||||
<th class="sortable active" data-sort="up">状态 <span class="sort-icon"></span></th>
|
||||
<th class="sortable" data-sort="job">Job / 实例 <span class="sort-icon"></span></th>
|
||||
<th class="sortable" data-sort="source">数据源 <span class="sort-icon"></span></th>
|
||||
<th class="sortable" data-sort="cpu">CPU <span class="sort-icon"></span></th>
|
||||
<th class="sortable" data-sort="mem">内存 <span class="sort-icon"></span></th>
|
||||
<th class="sortable" data-sort="disk">磁盘 <span class="sort-icon"></span></th>
|
||||
<th class="sortable" data-sort="netRx">网络 ↓ <span class="sort-icon"></span></th>
|
||||
<th class="sortable" data-sort="netTx">网络 ↑ <span class="sort-icon"></span></th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody id="serverTableBody">
|
||||
@@ -278,6 +335,21 @@
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
<div class="pagination-footer">
|
||||
<div class="page-size-selector">
|
||||
<span>每页显示</span>
|
||||
<select id="pageSizeSelect" class="source-select">
|
||||
<option value="10">10</option>
|
||||
<option value="20" selected>20</option>
|
||||
<option value="50">50</option>
|
||||
<option value="100">100</option>
|
||||
</select>
|
||||
<span>条</span>
|
||||
</div>
|
||||
<div class="pagination-controls" id="paginationControls">
|
||||
<!-- Pagination buttons will be injected here -->
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</section>
|
||||
</main>
|
||||
@@ -289,6 +361,8 @@
|
||||
<div class="modal-tabs">
|
||||
<button class="modal-tab active" data-tab="prom">数据源管理</button>
|
||||
<button class="modal-tab" data-tab="site">大屏设置</button>
|
||||
<button class="modal-tab" data-tab="latency">延迟线路管理</button>
|
||||
<button class="modal-tab" data-tab="auth">账号安全</button>
|
||||
</div>
|
||||
<button class="modal-close" id="modalClose">×</button>
|
||||
</div>
|
||||
@@ -299,13 +373,21 @@
|
||||
<div class="add-source-form" id="addSourceForm">
|
||||
<h3>添加数据源</h3>
|
||||
<div class="form-row">
|
||||
<div class="form-group">
|
||||
<div class="form-group" style="flex: 0.8;">
|
||||
<label for="sourceType">类型</label>
|
||||
<select id="sourceType"
|
||||
style="padding: 10px 14px; background: var(--bg-input); border: 1px solid var(--border-color); border-radius: var(--radius-sm); color: var(--text-primary); outline: none;">
|
||||
<option value="prometheus">Prometheus</option>
|
||||
<option value="blackbox">Blackbox Exporter</option>
|
||||
</select>
|
||||
</div>
|
||||
<div class="form-group" style="flex: 1;">
|
||||
<label for="sourceName">名称</label>
|
||||
<input type="text" id="sourceName" placeholder="例:生产环境" autocomplete="off">
|
||||
</div>
|
||||
<div class="form-group form-group-wide">
|
||||
<label for="sourceUrl">Prometheus URL</label>
|
||||
<input type="url" id="sourceUrl" placeholder="http://prometheus.example.com:9090" autocomplete="off">
|
||||
<label for="sourceUrl">URL 地址</label>
|
||||
<input type="url" id="sourceUrl" placeholder="http://1.2.3.4:9090" autocomplete="off">
|
||||
</div>
|
||||
</div>
|
||||
<div class="form-row">
|
||||
@@ -313,6 +395,15 @@
|
||||
<label for="sourceDesc">描述 (可选)</label>
|
||||
<input type="text" id="sourceDesc" placeholder="数据源描述" autocomplete="off">
|
||||
</div>
|
||||
<div class="form-group" id="serverSourceOption"
|
||||
style="display: flex; align-items: flex-end; padding-bottom: 8px;">
|
||||
<label
|
||||
style="display: flex; align-items: center; gap: 8px; cursor: pointer; font-size: 0.85rem; color: var(--text-secondary); white-space: nowrap;">
|
||||
<input type="checkbox" id="isServerSource" checked
|
||||
style="width: 16px; height: 16px; accent-color: var(--accent-indigo);">
|
||||
<span>用于服务器展示</span>
|
||||
</label>
|
||||
</div>
|
||||
<div class="form-actions">
|
||||
<button class="btn btn-test" id="btnTest">测试连接</button>
|
||||
<button class="btn btn-add" id="btnAdd">添加</button>
|
||||
@@ -346,21 +437,177 @@
|
||||
<label for="logoUrlInput">Logo URL (图片链接,为空则显示默认图标)</label>
|
||||
<input type="url" id="logoUrlInput" placeholder="https://example.com/logo.png">
|
||||
</div>
|
||||
<div class="form-group" style="margin-top: 15px;">
|
||||
<label for="defaultThemeInput">默认主题</label>
|
||||
<div class="settings-section" style="margin-top: 25px; border-top: 1px solid var(--border-color); padding-top: 20px;">
|
||||
<h4 style="font-size: 0.85rem; color: var(--accent-indigo); margin-bottom: 15px; text-transform: uppercase; letter-spacing: 0.5px;">界面外观 (Appearance)</h4>
|
||||
<div class="form-group">
|
||||
<label for="defaultThemeInput">色彩主题模式</label>
|
||||
<select id="defaultThemeInput"
|
||||
style="padding: 10px 14px; background: var(--bg-input); border: 1px solid var(--border-color); border-radius: var(--radius-sm); color: var(--text-primary); width: 100%;">
|
||||
<option value="auto">跟随系统主题 (Sync with OS)</option>
|
||||
<option value="dark">强制深色模式 (Always Dark)</option>
|
||||
<option value="light">强制浅色模式 (Always Light)</option>
|
||||
</select>
|
||||
<p style="font-size: 0.72rem; color: var(--text-muted); margin-top: 6px;">选择“跟随系统”后,应用将自动同步您操作系统或浏览器的黑暗/白天模式设置。</p>
|
||||
</div>
|
||||
</div>
|
||||
<div class="form-group" style="margin-top: 15px;">
|
||||
<label for="show95BandwidthInput">24h趋势图默认显示 95计费线</label>
|
||||
<select id="show95BandwidthInput"
|
||||
style="padding: 10px 14px; background: var(--bg-input); border: 1px solid var(--border-color); border-radius: var(--radius-sm); color: var(--text-primary);">
|
||||
<option value="dark">默认夜间模式</option>
|
||||
<option value="light">默认白天模式</option>
|
||||
<option value="auto">跟随浏览器/系统</option>
|
||||
<option value="1">显示</option>
|
||||
<option value="0">不显示</option>
|
||||
</select>
|
||||
</div>
|
||||
<div class="form-group" style="margin-top: 15px;">
|
||||
<label for="p95TypeSelect">95带宽计费统计类型</label>
|
||||
<select id="p95TypeSelect"
|
||||
style="padding: 10px 14px; background: var(--bg-input); border: 1px solid var(--border-color); border-radius: var(--radius-sm); color: var(--text-primary);">
|
||||
<option value="tx">仅统计上行 (TX)</option>
|
||||
<option value="rx">仅统计下行 (RX)</option>
|
||||
<option value="both">统计上行+下行 (Sum)</option>
|
||||
</select>
|
||||
</div>
|
||||
<div class="form-actions" style="margin-top: 25px; display: flex; justify-content: flex-end;">
|
||||
<button class="btn btn-add" id="btnSaveSiteSettings">保存设置</button>
|
||||
<button class="btn btn-add" id="btnSaveSiteSettings">保存基础设置</button>
|
||||
</div>
|
||||
<div class="form-message" id="siteSettingsMessage"></div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Latency Routes Tab -->
|
||||
<div class="tab-content" id="tab-latency">
|
||||
<div class="latency-settings-form">
|
||||
<h3>Blackbox 延迟连线管理</h3>
|
||||
<div class="latency-routes-manager">
|
||||
<!-- Add Route Form -->
|
||||
<div class="add-route-mini-form"
|
||||
style="background: rgba(255,255,255,0.02); padding: 15px; border-radius: 8px; margin-bottom: 20px; border: 1px solid var(--border-color);">
|
||||
<div class="form-row">
|
||||
<div class="form-group" style="flex: 1.5;">
|
||||
<label>探测用服务器</label>
|
||||
<select id="routeSourceSelect"
|
||||
style="padding: 10px 14px; background: var(--bg-input); border: 1px solid var(--border-color); border-radius: var(--radius-sm); color: var(--text-primary);">
|
||||
<option value="">-- 选择数据源 --</option>
|
||||
</select>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label>起航点</label>
|
||||
<input type="text" id="routeSourceInput" placeholder="例:China">
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label>目的地</label>
|
||||
<input type="text" id="routeDestInput" placeholder="例:United States">
|
||||
</div>
|
||||
</div>
|
||||
<div class="form-row" style="margin-top: 10px; align-items: flex-end;">
|
||||
<div class="form-group" style="flex: 2;">
|
||||
<label>Blackbox 探测目标 (IP 或 域名)</label>
|
||||
<input type="text" id="routeTargetInput" placeholder="例:1.1.1.1 或 google.com">
|
||||
</div>
|
||||
<div class="form-actions" style="padding-bottom: 0; display: flex; gap: 8px;">
|
||||
<button class="btn btn-add" id="btnAddRoute" style="padding: 10px 24px;">添加线路</button>
|
||||
<button class="btn btn-test" id="btnCancelEditRoute"
|
||||
style="display: none; padding: 10px 15px; background: rgba(0,0,0,0.3);">取消</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Routes List -->
|
||||
<div class="latency-routes-list-container">
|
||||
<h4
|
||||
style="font-size: 0.75rem; color: var(--text-muted); text-transform: uppercase;; margin-bottom: 10px;">
|
||||
已配置线路</h4>
|
||||
<div id="latencyRoutesList" class="latency-routes-list"
|
||||
style="display: flex; flex-direction: column; gap: 10px;">
|
||||
<!-- Routes will be injected here -->
|
||||
<div class="route-empty"
|
||||
style="text-align: center; padding: 20px; color: var(--text-muted); font-size: 0.85rem; background: rgba(0,0,0,0.1); border-radius: 8px;">
|
||||
暂无线路</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Account Security Tab -->
|
||||
<div class="tab-content" id="tab-auth">
|
||||
<div class="security-settings-form">
|
||||
<h3>修改登录密码</h3>
|
||||
<div class="form-group">
|
||||
<label for="oldPassword">当前密码</label>
|
||||
<input type="password" id="oldPassword" placeholder="请输入当前旧密码">
|
||||
</div>
|
||||
<div class="form-group" style="margin-top: 15px;">
|
||||
<label for="newPassword">新密码</label>
|
||||
<input type="password" id="newPassword" placeholder="请输入要设置的新密码">
|
||||
</div>
|
||||
<div class="form-group" style="margin-top: 15px;">
|
||||
<label for="confirmNewPassword">确认新密码</label>
|
||||
<input type="password" id="confirmNewPassword" placeholder="请再次确认新密码">
|
||||
</div>
|
||||
<div class="form-actions" style="margin-top: 25px; display: flex; justify-content: flex-end;">
|
||||
<button class="btn btn-add" id="btnChangePassword">提交修改</button>
|
||||
</div>
|
||||
<div class="form-message" id="changePasswordMessage"></div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Server Detail Modal -->
|
||||
<div class="modal-overlay" id="serverDetailModal">
|
||||
<div class="modal" style="max-width: 800px; width: 95%;">
|
||||
<div class="modal-header">
|
||||
<div style="display: flex; flex-direction: column;">
|
||||
<h2 id="serverDetailTitle" style="margin-bottom: 0;">服务器详情</h2>
|
||||
</div>
|
||||
<button class="modal-close" id="serverDetailClose">×</button>
|
||||
</div>
|
||||
<div class="modal-body" id="serverDetailBody" style="padding: 0;">
|
||||
<div id="detailLoading" style="text-align: center; padding: 40px; display: none;">
|
||||
<div class="dot dot-pulse"
|
||||
style="display: inline-block; width: 12px; height: 12px; background: var(--accent-indigo);"></div>
|
||||
<span style="margin-left: 10px; color: var(--text-secondary);">正在从数据源读取详情...</span>
|
||||
</div>
|
||||
<div class="detail-container" id="detailContainer">
|
||||
<!-- Metric Items are injected here -->
|
||||
<div class="detail-metrics-list" id="detailMetricsList"></div>
|
||||
|
||||
<div class="detail-partitions-container metric-item" id="detailPartitionsContainer" style="display: none;">
|
||||
<div class="metric-item-header" id="partitionHeader">
|
||||
<div class="metric-label-group">
|
||||
<span class="metric-label">磁盘分区详情 (已挂载)</span>
|
||||
<span class="metric-value" id="partitionSummary">读取中...</span>
|
||||
</div>
|
||||
<svg class="chevron-icon" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2">
|
||||
<polyline points="6 9 12 15 18 9"></polyline>
|
||||
</svg>
|
||||
</div>
|
||||
<div class="metric-item-content" id="partitionContent">
|
||||
<div class="detail-partitions-list" id="detailPartitionsList"></div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="detail-info-grid" id="detailInfoGrid">
|
||||
<div class="info-item">
|
||||
<span class="info-label">CPU 核心总数</span>
|
||||
<span class="info-value" id="detailCpuCores">0 核心</span>
|
||||
</div>
|
||||
<div class="info-item">
|
||||
<span class="info-label">物理内存总量</span>
|
||||
<span class="info-value" id="detailMemTotal">0 GB</span>
|
||||
</div>
|
||||
<div class="info-item">
|
||||
<span class="info-label">运行时间 (Uptime)</span>
|
||||
<span class="info-value" id="detailUptime">0天 0小时</span>
|
||||
</div>
|
||||
<div class="info-item">
|
||||
<span class="info-label">硬盘总量统计</span>
|
||||
<span class="info-value" id="detailDiskTotal">0 GB</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -70,6 +70,33 @@
|
||||
justify-content: center;
|
||||
padding: 10px 0;
|
||||
}
|
||||
|
||||
@media (max-width: 480px) {
|
||||
body {
|
||||
align-items: flex-start;
|
||||
padding: 16px 12px;
|
||||
}
|
||||
.init-container {
|
||||
padding: 24px 18px;
|
||||
border-radius: 10px;
|
||||
max-width: 100%;
|
||||
}
|
||||
.init-header h2 {
|
||||
font-size: 18px;
|
||||
}
|
||||
.init-header p {
|
||||
font-size: 12px;
|
||||
}
|
||||
.form-row {
|
||||
flex-direction: column;
|
||||
}
|
||||
.actions {
|
||||
flex-direction: column;
|
||||
}
|
||||
.actions .btn {
|
||||
width: 100%;
|
||||
}
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
@@ -117,11 +144,34 @@
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="init-header" style="margin: 24px 0 16px 0; text-align: left;">
|
||||
<h3 style="font-size: 16px; color: var(--text-main); margin: 0;">Valkey / Redis 缓存配置 (可选)</h3>
|
||||
</div>
|
||||
|
||||
<div class="form-row">
|
||||
<div class="form-group" style="flex: 2;">
|
||||
<label for="vHost">Valkey 地址</label>
|
||||
<input type="text" id="vHost" value="localhost" placeholder="localhost" autocomplete="off">
|
||||
</div>
|
||||
<div class="form-group" style="flex: 1;">
|
||||
<label for="vPort">端口</label>
|
||||
<input type="number" id="vPort" value="6379" placeholder="6379" autocomplete="off">
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="form-row">
|
||||
<div class="form-group form-group-wide">
|
||||
<label for="vPassword">Valkey 密码</label>
|
||||
<input type="password" id="vPassword" placeholder="留空则无密码">
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="form-message" id="messageBox"></div>
|
||||
|
||||
<div class="actions">
|
||||
<button class="btn btn-test" id="btnTest">测试连接</button>
|
||||
<button class="btn btn-add" id="btnInit">初始化数据库</button>
|
||||
<div class="actions" style="flex-wrap: wrap;">
|
||||
<button class="btn btn-test" id="btnTest" style="flex: 1 1 45%;">测试 MySQL</button>
|
||||
<button class="btn btn-test" id="btnTestValkey" style="flex: 1 1 45%;">测试 Valkey</button>
|
||||
<button class="btn btn-add" id="btnInit" style="flex: 1 1 100%;">确认并初始化系统</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -165,8 +215,8 @@
|
||||
</div>
|
||||
<div class="form-row">
|
||||
<div class="form-group form-group-wide">
|
||||
<label for="promName">数据源名称</label>
|
||||
<input type="text" id="promName" placeholder="例如:生产环境" autocomplete="off">
|
||||
<label for="promSourceName">数据源名称</label>
|
||||
<input type="text" id="promSourceName" name="p-source-name-init" placeholder="例如:生产环境" autocomplete="one-time-code">
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
||||
1567
public/js/app.js
1567
public/js/app.js
File diff suppressed because it is too large
Load Diff
@@ -9,11 +9,18 @@ class AreaChart {
|
||||
this.data = { timestamps: [], rx: [], tx: [] };
|
||||
this.animProgress = 0;
|
||||
this.animFrame = null;
|
||||
this.showP95 = true;
|
||||
this.showP95 = false;
|
||||
this.showRx = true;
|
||||
this.showTx = true;
|
||||
this.p95Type = 'tx'; // 'tx', 'rx', 'both'
|
||||
this.dpr = window.devicePixelRatio || 1;
|
||||
this.padding = { top: 20, right: 16, bottom: 32, left: 56 };
|
||||
|
||||
this._resize = this.resize.bind(this);
|
||||
this.prevMaxVal = 0;
|
||||
this.currentMaxVal = 0;
|
||||
|
||||
// Use debounced resize for performance and safety
|
||||
this._resize = typeof debounce === 'function' ? debounce(this.resize.bind(this), 100) : this.resize.bind(this);
|
||||
window.addEventListener('resize', this._resize);
|
||||
this.resize();
|
||||
}
|
||||
@@ -33,6 +40,21 @@ class AreaChart {
|
||||
setData(data) {
|
||||
if (!data || !data.timestamps) return;
|
||||
|
||||
// Store old data for smooth transition before updating this.data
|
||||
// Only clone if there is data to clone; otherwise use empty set
|
||||
if (this.data && this.data.timestamps && this.data.timestamps.length > 0) {
|
||||
this.prevData = {
|
||||
timestamps: [...this.data.timestamps],
|
||||
rx: [...this.data.rx],
|
||||
tx: [...this.data.tx]
|
||||
};
|
||||
} else {
|
||||
this.prevData = { timestamps: [], rx: [], tx: [] };
|
||||
}
|
||||
|
||||
// Smoothly transition max value context too
|
||||
this.prevMaxVal = this.currentMaxVal || 0;
|
||||
|
||||
// Downsample if data is too dense (target ~1500 points for performance)
|
||||
const MAX_POINTS = 1500;
|
||||
if (data.timestamps.length > MAX_POINTS) {
|
||||
@@ -48,10 +70,24 @@ class AreaChart {
|
||||
this.data = data;
|
||||
}
|
||||
|
||||
// Refresh currentMaxVal target for interpolation in draw()
|
||||
let rawMax = 1024;
|
||||
for (let i = 0; i < this.data.rx.length; i++) {
|
||||
if (this.showRx) rawMax = Math.max(rawMax, this.data.rx[i] || 0);
|
||||
if (this.showTx) rawMax = Math.max(rawMax, this.data.tx[i] || 0);
|
||||
}
|
||||
this.currentMaxVal = rawMax;
|
||||
|
||||
// Calculate P95 (95th percentile)
|
||||
// Common standard: 95th percentile of the peak (max of rx/tx or sum)
|
||||
// We'll use max(rx, tx) at each point which is common for billing
|
||||
const combined = data.rx.map((r, i) => Math.max(r || 0, data.tx[i] || 0));
|
||||
let combined = [];
|
||||
if (this.p95Type === 'tx') {
|
||||
combined = data.tx.map(t => t || 0);
|
||||
} else if (this.p95Type === 'rx') {
|
||||
combined = data.rx.map(r => r || 0);
|
||||
} else {
|
||||
combined = data.tx.map((t, i) => (t || 0) + (data.rx[i] || 0));
|
||||
}
|
||||
|
||||
if (combined.length > 0) {
|
||||
const sorted = [...combined].sort((a, b) => a - b);
|
||||
const p95Idx = Math.floor(sorted.length * 0.95);
|
||||
@@ -103,13 +139,15 @@ class AreaChart {
|
||||
return;
|
||||
}
|
||||
|
||||
// Find max raw value
|
||||
let maxDataVal = 0;
|
||||
for (let i = 0; i < rx.length; i++) {
|
||||
maxDataVal = Math.max(maxDataVal, rx[i] || 0, tx[i] || 0);
|
||||
// Determine consistent unit based on max data value
|
||||
let maxDataVal = 1024;
|
||||
if (this.prevMaxVal && this.animProgress < 1) {
|
||||
// Interpolate the max value context to keep vertical scale smooth
|
||||
maxDataVal = this.prevMaxVal + (this.currentMaxVal - this.prevMaxVal) * (this.animProgress || 0);
|
||||
} else {
|
||||
maxDataVal = this.currentMaxVal;
|
||||
}
|
||||
|
||||
// Determine consistent unit based on max data value
|
||||
const k = 1024;
|
||||
const sizes = ['B/s', 'KB/s', 'MB/s', 'GB/s', 'TB/s'];
|
||||
let unitIdx = Math.floor(Math.log(Math.max(1, maxDataVal)) / Math.log(k));
|
||||
@@ -139,9 +177,13 @@ class AreaChart {
|
||||
const len = timestamps.length;
|
||||
const xStep = chartW / (len - 1);
|
||||
|
||||
// Helper to get point
|
||||
// Helper to get point with smooth value transition
|
||||
const getX = (i) => p.left + i * xStep;
|
||||
const getY = (val) => p.top + chartH - (val / (maxVal || 1)) * chartH * this.animProgress;
|
||||
const getY = (val, prevVal = 0) => {
|
||||
// Interpolate value from previous state to new state
|
||||
const actualVal = prevVal + (val - prevVal) * this.animProgress;
|
||||
return p.top + chartH - (actualVal / (maxVal || 1)) * chartH;
|
||||
};
|
||||
|
||||
// Draw grid lines
|
||||
ctx.strokeStyle = 'rgba(99, 102, 241, 0.08)';
|
||||
@@ -177,15 +219,21 @@ class AreaChart {
|
||||
// Always show last label
|
||||
ctx.fillText(formatTime(timestamps[len - 1]), getX(len - 1), h - 8);
|
||||
|
||||
const getPVal = (arr, i) => (arr && i < arr.length) ? arr[i] : 0;
|
||||
|
||||
// Draw TX area
|
||||
this.drawArea(ctx, tx, getX, getY, chartH, p,
|
||||
if (this.showTx) {
|
||||
this.drawArea(ctx, tx, this.prevData ? this.prevData.tx : null, getX, getY, chartH, p,
|
||||
'rgba(99, 102, 241, 0.25)', 'rgba(99, 102, 241, 0.02)',
|
||||
'#6366f1', len);
|
||||
}
|
||||
|
||||
// Draw RX area (on top)
|
||||
this.drawArea(ctx, rx, getX, getY, chartH, p,
|
||||
if (this.showRx) {
|
||||
this.drawArea(ctx, rx, this.prevData ? this.prevData.rx : null, getX, getY, chartH, p,
|
||||
'rgba(6, 182, 212, 0.25)', 'rgba(6, 182, 212, 0.02)',
|
||||
'#06b6d4', len);
|
||||
}
|
||||
|
||||
// Draw P95 line
|
||||
if (this.showP95 && this.p95 && this.animProgress === 1) {
|
||||
@@ -217,22 +265,23 @@ class AreaChart {
|
||||
}
|
||||
}
|
||||
|
||||
drawArea(ctx, values, getX, getY, chartH, p, fillColorTop, fillColorBottom, strokeColor, len) {
|
||||
drawArea(ctx, values, prevValues, getX, getY, chartH, p, fillColorTop, fillColorBottom, strokeColor, len) {
|
||||
if (!values || values.length === 0) return;
|
||||
|
||||
const useSimple = len > 500;
|
||||
const useSimple = len > 250;
|
||||
const getPVal = (i) => (prevValues && i < prevValues.length) ? prevValues[i] : 0;
|
||||
|
||||
// Fill
|
||||
ctx.beginPath();
|
||||
ctx.moveTo(getX(0), getY(values[0] || 0));
|
||||
ctx.moveTo(getX(0), getY(values[0] || 0, getPVal(0)));
|
||||
for (let i = 1; i < len; i++) {
|
||||
const currY = getY(values[i] || 0, getPVal(i));
|
||||
if (useSimple) {
|
||||
ctx.lineTo(getX(i), getY(values[i] || 0));
|
||||
ctx.lineTo(getX(i), currY);
|
||||
} else {
|
||||
const prevX = getX(i - 1);
|
||||
const currX = getX(i);
|
||||
const prevY = getY(values[i - 1] || 0);
|
||||
const currY = getY(values[i] || 0);
|
||||
const prevY = getY(values[i - 1] || 0, getPVal(i - 1));
|
||||
const midX = (prevX + currX) / 2;
|
||||
ctx.bezierCurveTo(midX, prevY, midX, currY, currX, currY);
|
||||
}
|
||||
@@ -249,15 +298,15 @@ class AreaChart {
|
||||
|
||||
// Stroke
|
||||
ctx.beginPath();
|
||||
ctx.moveTo(getX(0), getY(values[0] || 0));
|
||||
ctx.moveTo(getX(0), getY(values[0] || 0, getPVal(0)));
|
||||
for (let i = 1; i < len; i++) {
|
||||
const currY = getY(values[i] || 0, getPVal(i));
|
||||
if (useSimple) {
|
||||
ctx.lineTo(getX(i), getY(values[i] || 0));
|
||||
ctx.lineTo(getX(i), currY);
|
||||
} else {
|
||||
const prevX = getX(i - 1);
|
||||
const currX = getX(i);
|
||||
const prevY = getY(values[i - 1] || 0);
|
||||
const currY = getY(values[i] || 0);
|
||||
const prevY = getY(values[i - 1] || 0, getPVal(i - 1));
|
||||
const midX = (prevX + currX) / 2;
|
||||
ctx.bezierCurveTo(midX, prevY, midX, currY, currX, currY);
|
||||
}
|
||||
@@ -273,3 +322,272 @@ class AreaChart {
|
||||
if (this.animFrame) cancelAnimationFrame(this.animFrame);
|
||||
}
|
||||
}
|
||||
|
||||
class MetricChart {
|
||||
constructor(canvas, unit = '') {
|
||||
this.canvas = canvas;
|
||||
this.ctx = canvas.getContext('2d');
|
||||
this.data = { timestamps: [], values: [], series: null };
|
||||
this.unit = unit; // '%', 'B/s', etc.
|
||||
this.dpr = window.devicePixelRatio || 1;
|
||||
this.padding = { top: 10, right: 10, bottom: 35, left: 60 };
|
||||
this.animProgress = 0;
|
||||
|
||||
this.prevMaxVal = 0;
|
||||
this.currentMaxVal = 0;
|
||||
|
||||
// Use debounced resize for performance and safety
|
||||
this._resize = typeof debounce === 'function' ? debounce(this.resize.bind(this), 100) : this.resize.bind(this);
|
||||
window.addEventListener('resize', this._resize);
|
||||
this.resize();
|
||||
}
|
||||
|
||||
resize() {
|
||||
const parent = this.canvas.parentElement;
|
||||
if (!parent) return;
|
||||
const rect = parent.getBoundingClientRect();
|
||||
if (rect.width === 0) return;
|
||||
this.width = rect.width;
|
||||
this.height = rect.height;
|
||||
this.canvas.width = this.width * this.dpr;
|
||||
this.canvas.height = this.height * this.dpr;
|
||||
this.canvas.style.width = this.width + 'px';
|
||||
this.canvas.style.height = this.height + 'px';
|
||||
this.ctx.setTransform(this.dpr, 0, 0, this.dpr, 0, 0);
|
||||
this.draw();
|
||||
}
|
||||
|
||||
setData(data) {
|
||||
if (this.data && this.data.values && this.data.values.length > 0) {
|
||||
this.prevData = JSON.parse(JSON.stringify(this.data));
|
||||
} else {
|
||||
this.prevData = { timestamps: [], values: [], series: null };
|
||||
}
|
||||
|
||||
this.prevMaxVal = this.currentMaxVal || 0;
|
||||
this.data = data || { timestamps: [], values: [], series: null };
|
||||
|
||||
// Target max
|
||||
if (this.data.series) {
|
||||
this.currentMaxVal = 100;
|
||||
} else {
|
||||
const raw = Math.max(...(this.data.values || []), 0.1);
|
||||
if (this.unit === '%' && raw <= 100) {
|
||||
if (raw > 80) this.currentMaxVal = 100;
|
||||
else if (raw > 40) this.currentMaxVal = 80;
|
||||
else if (raw > 20) this.currentMaxVal = 50;
|
||||
else this.currentMaxVal = 25;
|
||||
} else {
|
||||
this.currentMaxVal = raw * 1.25;
|
||||
}
|
||||
}
|
||||
|
||||
this.animate();
|
||||
}
|
||||
|
||||
animate() {
|
||||
if (this.animFrame) cancelAnimationFrame(this.animFrame);
|
||||
const start = performance.now();
|
||||
const duration = 500;
|
||||
const step = (now) => {
|
||||
const elapsed = now - start;
|
||||
this.animProgress = Math.min(elapsed / duration, 1);
|
||||
this.animProgress = 1 - Math.pow(1 - this.animProgress, 3);
|
||||
this.draw();
|
||||
if (elapsed < duration) this.animFrame = requestAnimationFrame(step);
|
||||
};
|
||||
this.animFrame = requestAnimationFrame(step);
|
||||
}
|
||||
|
||||
draw() {
|
||||
const ctx = this.ctx;
|
||||
const w = this.width;
|
||||
const h = this.height;
|
||||
const p = this.padding;
|
||||
const chartW = w - p.left - p.right;
|
||||
const chartH = h - p.top - p.bottom;
|
||||
|
||||
ctx.clearRect(0, 0, w, h);
|
||||
|
||||
const { timestamps, values, series } = this.data;
|
||||
if (!timestamps || timestamps.length < 2) {
|
||||
ctx.fillStyle = '#5a6380';
|
||||
ctx.font = '11px sans-serif';
|
||||
ctx.textAlign = 'center';
|
||||
ctx.fillText('正在加载或暂无数据...', w / 2, h / 2);
|
||||
return;
|
||||
}
|
||||
|
||||
// Determine Y max (interpolated)
|
||||
const targetMax = this.currentMaxVal || 0.1;
|
||||
const startMax = this.prevMaxVal || targetMax;
|
||||
const maxVal = startMax + (targetMax - startMax) * this.animProgress;
|
||||
|
||||
const len = timestamps.length;
|
||||
const xStep = chartW / (len - 1);
|
||||
const getX = (i) => p.left + i * xStep;
|
||||
const getY = (val, prevVal = 0) => {
|
||||
const actualVal = prevVal + (val - prevVal) * this.animProgress;
|
||||
return p.top + chartH - (actualVal / (maxVal || 1)) * chartH;
|
||||
};
|
||||
|
||||
// Grid
|
||||
ctx.strokeStyle = 'rgba(99, 102, 241, 0.05)';
|
||||
ctx.lineWidth = 1;
|
||||
for (let i = 0; i <= 3; i++) {
|
||||
const y = p.top + (chartH / 3) * i;
|
||||
ctx.beginPath();
|
||||
ctx.moveTo(p.left, y);
|
||||
ctx.lineTo(p.left + chartW, y);
|
||||
ctx.stroke();
|
||||
|
||||
const v = (maxVal * (1 - i / 3));
|
||||
ctx.fillStyle = '#5a6380';
|
||||
ctx.font = '9px "JetBrains Mono", monospace';
|
||||
ctx.textAlign = 'right';
|
||||
|
||||
let label = '';
|
||||
if (this.unit === 'B/s' || this.unit === 'B') {
|
||||
const isRate = this.unit === 'B/s';
|
||||
if (window.formatBandwidth && isRate) {
|
||||
label = window.formatBandwidth(v);
|
||||
} else if (window.formatBytes) {
|
||||
label = window.formatBytes(v) + (isRate ? '/s' : '');
|
||||
} else {
|
||||
label = v.toFixed(0) + this.unit;
|
||||
}
|
||||
} else {
|
||||
label = (v >= 1000 ? (v / 1000).toFixed(1) + 'k' : v.toFixed(v < 10 && v > 0 ? 1 : 0)) + this.unit;
|
||||
}
|
||||
ctx.fillText(label, p.left - 8, y + 3);
|
||||
}
|
||||
|
||||
// X-axis Timeline
|
||||
ctx.fillStyle = '#5a6380';
|
||||
ctx.font = '9px "JetBrains Mono", monospace';
|
||||
ctx.textAlign = 'center';
|
||||
const labelInterval = Math.max(1, Math.floor(len / 5));
|
||||
for (let i = 0; i < len; i += labelInterval) {
|
||||
const x = getX(i);
|
||||
ctx.fillText(formatTime(timestamps[i]), x, h - 8);
|
||||
}
|
||||
// Always show last label if not already shown
|
||||
if ((len - 1) % labelInterval !== 0) {
|
||||
ctx.fillText(formatTime(timestamps[len - 1]), getX(len - 1), h - 8);
|
||||
}
|
||||
|
||||
if (series) {
|
||||
// Draw Stacked Area
|
||||
const modes = [
|
||||
{ name: 'idle', color: 'rgba(34, 197, 94, 0.4)', stroke: '#22c55e' }, // Green
|
||||
{ name: 'other', color: 'rgba(168, 85, 247, 0.4)', stroke: '#a855f7' }, // Purple
|
||||
{ name: 'irq', color: 'rgba(249, 115, 22, 0.4)', stroke: '#f97316' }, // Orange
|
||||
{ name: 'iowait', color: 'rgba(239, 68, 68, 0.4)', stroke: '#ef4444' }, // Red
|
||||
{ name: 'system', color: 'rgba(234, 179, 8, 0.4)', stroke: '#eab308' }, // Yellow
|
||||
{ name: 'user', color: 'rgba(99, 102, 241, 0.4)', stroke: '#6366f1' } // Indigo
|
||||
];
|
||||
|
||||
let currentBase = new Array(len).fill(0);
|
||||
let prevBase = new Array(len).fill(0);
|
||||
|
||||
modes.forEach(mode => {
|
||||
const vals = series[mode.name];
|
||||
if (!vals) return;
|
||||
|
||||
const prevVals = (this.prevData && this.prevData.series) ? this.prevData.series[mode.name] : null;
|
||||
const getPVal = (arr, idx) => (arr && idx < arr.length) ? arr[idx] : 0;
|
||||
|
||||
ctx.beginPath();
|
||||
ctx.moveTo(getX(0), getY(currentBase[0] + vals[0], getPVal(prevBase, 0) + getPVal(prevVals, 0)));
|
||||
for (let i = 1; i < len; i++) {
|
||||
ctx.lineTo(getX(i), getY(currentBase[i] + vals[i], getPVal(prevBase, i) + getPVal(prevVals, i)));
|
||||
}
|
||||
ctx.lineTo(getX(len - 1), getY(currentBase[len - 1], getPVal(prevBase, len - 1)));
|
||||
for (let i = len - 1; i >= 0; i--) {
|
||||
ctx.lineTo(getX(i), getY(currentBase[i], getPVal(prevBase, i)));
|
||||
}
|
||||
ctx.closePath();
|
||||
ctx.fillStyle = mode.color;
|
||||
ctx.fill();
|
||||
|
||||
// Stroke
|
||||
ctx.beginPath();
|
||||
ctx.moveTo(getX(0), getY(currentBase[0] + vals[0], getPVal(prevBase, 0) + getPVal(prevVals, 0)));
|
||||
for (let i = 1; i < len; i++) {
|
||||
ctx.lineTo(getX(i), getY(currentBase[i] + vals[i], getPVal(prevBase, i) + getPVal(prevVals, i)));
|
||||
}
|
||||
ctx.strokeStyle = mode.stroke;
|
||||
ctx.lineWidth = 1;
|
||||
ctx.stroke();
|
||||
|
||||
// Update boxes for next series
|
||||
for (let i = 0; i < len; i++) {
|
||||
currentBase[i] += vals[i];
|
||||
if (prevBase) prevBase[i] = (prevBase[i] || 0) + getPVal(prevVals, i);
|
||||
}
|
||||
});
|
||||
|
||||
// Add Legend at bottom right (moved up slightly)
|
||||
ctx.font = '9px sans-serif';
|
||||
ctx.textAlign = 'right';
|
||||
let lx = w - 10;
|
||||
let ly = h - 20; // Increased padding from bottom
|
||||
[...modes].reverse().forEach(m => {
|
||||
ctx.fillStyle = m.stroke;
|
||||
ctx.fillRect(lx - 10, ly - 8, 8, 8);
|
||||
ctx.fillStyle = '#5a6380';
|
||||
ctx.fillText(m.name.charAt(0).toUpperCase() + m.name.slice(1), lx - 15, ly - 1);
|
||||
lx -= 70; // Increased gap for safety
|
||||
});
|
||||
|
||||
} else {
|
||||
const useSimple = len > 250;
|
||||
const prevVals = this.prevData ? this.prevData.values : null;
|
||||
const getPVal = (i) => (prevVals && i < prevVals.length) ? prevVals[i] : 0;
|
||||
|
||||
ctx.beginPath();
|
||||
ctx.moveTo(getX(0), getY(values[0], getPVal(0)));
|
||||
for (let i = 1; i < len; i++) {
|
||||
const currY = getY(values[i], getPVal(i));
|
||||
if (useSimple) {
|
||||
ctx.lineTo(getX(i), currY);
|
||||
} else {
|
||||
const prevX = getX(i - 1);
|
||||
const currX = getX(i);
|
||||
const prevY = getY(values[i - 1], getPVal(i - 1));
|
||||
const midX = (prevX + currX) / 2;
|
||||
ctx.bezierCurveTo(midX, prevY, midX, currY, currX, currY);
|
||||
}
|
||||
}
|
||||
|
||||
// Stroke
|
||||
ctx.strokeStyle = '#6366f1';
|
||||
ctx.lineWidth = 2;
|
||||
ctx.lineJoin = 'round';
|
||||
ctx.stroke();
|
||||
|
||||
// Fill
|
||||
ctx.lineTo(getX(len - 1), p.top + chartH);
|
||||
ctx.lineTo(getX(0), p.top + chartH);
|
||||
ctx.closePath();
|
||||
const grad = ctx.createLinearGradient(0, p.top, 0, p.top + chartH);
|
||||
grad.addColorStop(0, 'rgba(99, 102, 241, 0.15)');
|
||||
grad.addColorStop(1, 'rgba(99, 102, 241, 0)');
|
||||
ctx.fillStyle = grad;
|
||||
ctx.fill();
|
||||
|
||||
// Last point pulse
|
||||
const lastX = getX(len - 1);
|
||||
const lastY = getY(values[len - 1]);
|
||||
ctx.beginPath();
|
||||
ctx.arc(lastX, lastY, 3, 0, Math.PI * 2);
|
||||
ctx.fillStyle = '#6366f1';
|
||||
ctx.fill();
|
||||
}
|
||||
}
|
||||
|
||||
destroy() {
|
||||
window.removeEventListener('resize', this._resize);
|
||||
if (this.animFrame) cancelAnimationFrame(this.animFrame);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,8 +4,12 @@ document.addEventListener('DOMContentLoaded', () => {
|
||||
const userInput = document.getElementById('user');
|
||||
const passwordInput = document.getElementById('password');
|
||||
const databaseInput = document.getElementById('database');
|
||||
const vHostInput = document.getElementById('vHost');
|
||||
const vPortInput = document.getElementById('vPort');
|
||||
const vPasswordInput = document.getElementById('vPassword');
|
||||
|
||||
const btnTest = document.getElementById('btnTest');
|
||||
const btnTestValkey = document.getElementById('btnTestValkey');
|
||||
const btnInit = document.getElementById('btnInit');
|
||||
const messageBox = document.getElementById('messageBox');
|
||||
|
||||
@@ -14,7 +18,7 @@ document.addEventListener('DOMContentLoaded', () => {
|
||||
|
||||
const promForm = document.getElementById('promForm');
|
||||
const initForm = document.getElementById('initForm');
|
||||
const promName = document.getElementById('promName');
|
||||
const promName = document.getElementById('promSourceName');
|
||||
const promUrl = document.getElementById('promUrl');
|
||||
const promDesc = document.getElementById('promDesc');
|
||||
const btnPromTest = document.getElementById('btnPromTest');
|
||||
@@ -65,6 +69,7 @@ document.addEventListener('DOMContentLoaded', () => {
|
||||
promForm.style.display = 'block';
|
||||
initHeaderTitle.textContent = '配置 Prometheus';
|
||||
initHeaderDesc.textContent = '配置您的第一个 Prometheus 数据源监控连接';
|
||||
if (promName) promName.value = ''; // Ensure it's clear on load
|
||||
}
|
||||
} catch (err) {
|
||||
initForm.style.display = 'block';
|
||||
@@ -102,6 +107,34 @@ document.addEventListener('DOMContentLoaded', () => {
|
||||
}
|
||||
});
|
||||
|
||||
btnTestValkey.addEventListener('click', async () => {
|
||||
btnTestValkey.disabled = true;
|
||||
const oldText = btnTestValkey.textContent;
|
||||
btnTestValkey.textContent = '测试中...';
|
||||
try {
|
||||
const res = await fetch('/api/setup/test-valkey', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
host: vHostInput.value,
|
||||
port: vPortInput.value,
|
||||
password: vPasswordInput.value
|
||||
})
|
||||
});
|
||||
const data = await res.json();
|
||||
if (data.success) {
|
||||
showMessage('Valkey 连接成功!');
|
||||
} else {
|
||||
showMessage('Valkey 连接失败: ' + (data.error || '未知错误'), true);
|
||||
}
|
||||
} catch (err) {
|
||||
showMessage('Valkey 请求失败: ' + err.message, true);
|
||||
} finally {
|
||||
btnTestValkey.disabled = false;
|
||||
btnTestValkey.textContent = oldText;
|
||||
}
|
||||
});
|
||||
|
||||
btnInit.addEventListener('click', async () => {
|
||||
btnInit.disabled = true;
|
||||
const oldText = btnInit.textContent;
|
||||
@@ -115,7 +148,10 @@ document.addEventListener('DOMContentLoaded', () => {
|
||||
port: portInput.value,
|
||||
user: userInput.value,
|
||||
password: passwordInput.value,
|
||||
database: databaseInput.value
|
||||
database: databaseInput.value,
|
||||
vHost: vHostInput.value,
|
||||
vPort: vPortInput.value,
|
||||
vPassword: vPasswordInput.value
|
||||
})
|
||||
});
|
||||
const data = await res.json();
|
||||
|
||||
@@ -28,6 +28,15 @@ function formatBandwidth(bytesPerSec, decimals = 2) {
|
||||
return value.toFixed(decimals) + ' ' + sizes[i];
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert bytes per second to MB/s (numeric string)
|
||||
*/
|
||||
function toMBps(bytesPerSec, decimals = 2) {
|
||||
if (!bytesPerSec || bytesPerSec === 0) return '0.00';
|
||||
const mbps = bytesPerSec / (1024 * 1024);
|
||||
return mbps.toFixed(decimals);
|
||||
}
|
||||
|
||||
/**
|
||||
* Format percentage
|
||||
*/
|
||||
@@ -102,3 +111,17 @@ function animateValue(element, start, end, duration = 600) {
|
||||
|
||||
requestAnimationFrame(update);
|
||||
}
|
||||
|
||||
/**
|
||||
* Debounce function to limit execution frequency
|
||||
*/
|
||||
function debounce(fn, delay) {
|
||||
let timer = null;
|
||||
return function (...args) {
|
||||
if (timer) clearTimeout(timer);
|
||||
timer = setTimeout(() => {
|
||||
fn.apply(this, args);
|
||||
timer = null;
|
||||
}, delay);
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1,14 +1,20 @@
|
||||
const Redis = require('ioredis');
|
||||
|
||||
const host = process.env.VALKEY_HOST || 'localhost';
|
||||
const port = parseInt(process.env.VALKEY_PORT) || 6379;
|
||||
const password = process.env.VALKEY_PASSWORD || undefined;
|
||||
const db = parseInt(process.env.VALKEY_DB) || 0;
|
||||
const ttl = parseInt(process.env.VALKEY_TTL) || 30;
|
||||
|
||||
let redis = null;
|
||||
let ttl = 30;
|
||||
|
||||
try {
|
||||
function init() {
|
||||
if (redis) {
|
||||
redis.disconnect();
|
||||
}
|
||||
|
||||
const host = process.env.VALKEY_HOST || 'localhost';
|
||||
const port = parseInt(process.env.VALKEY_PORT) || 6379;
|
||||
const password = process.env.VALKEY_PASSWORD || undefined;
|
||||
const db = parseInt(process.env.VALKEY_DB) || 0;
|
||||
ttl = parseInt(process.env.VALKEY_TTL) || 30;
|
||||
|
||||
try {
|
||||
redis = new Redis({
|
||||
host,
|
||||
port,
|
||||
@@ -22,11 +28,15 @@ try {
|
||||
// Fail silently after one retry, we just won't cache
|
||||
console.warn('[Cache] Valkey connection failed, caching disabled:', err.message);
|
||||
});
|
||||
} catch (err) {
|
||||
} catch (err) {
|
||||
console.warn('[Cache] Valkey init failed:', err.message);
|
||||
}
|
||||
}
|
||||
|
||||
init();
|
||||
|
||||
const cache = {
|
||||
init,
|
||||
async get(key) {
|
||||
if (!redis) return null;
|
||||
try {
|
||||
@@ -53,6 +63,17 @@ const cache = {
|
||||
} catch (e) {
|
||||
// ignore
|
||||
}
|
||||
},
|
||||
|
||||
async checkHealth() {
|
||||
if (!redis) return { status: 'down', error: 'Valkey client not initialized' };
|
||||
try {
|
||||
const result = await redis.ping();
|
||||
if (result === 'PONG') return { status: 'up' };
|
||||
return { status: 'down', error: 'Invalid ping response' };
|
||||
} catch (e) {
|
||||
return { status: 'down', error: e.message };
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@@ -13,20 +13,15 @@ const REQUIRED_TABLES = [
|
||||
'users',
|
||||
'prometheus_sources',
|
||||
'site_settings',
|
||||
'traffic_stats'
|
||||
'traffic_stats',
|
||||
'server_locations',
|
||||
'latency_routes'
|
||||
];
|
||||
|
||||
async function checkAndFixDatabase() {
|
||||
// Only run if .env is already configured
|
||||
const envPath = path.join(__dirname, '..', '.env');
|
||||
if (!fs.existsSync(envPath)) return;
|
||||
|
||||
const dbHost = process.env.MYSQL_HOST || 'localhost';
|
||||
const dbUser = process.env.MYSQL_USER || 'root';
|
||||
const dbPass = process.env.MYSQL_PASSWORD || '';
|
||||
const dbPort = parseInt(process.env.MYSQL_PORT) || 3306;
|
||||
const dbName = process.env.MYSQL_DATABASE || 'display_wall';
|
||||
|
||||
try {
|
||||
// Check tables
|
||||
const [rows] = await db.query("SHOW TABLES");
|
||||
@@ -35,36 +30,73 @@ async function checkAndFixDatabase() {
|
||||
const missingTables = REQUIRED_TABLES.filter(t => !existingTables.includes(t));
|
||||
|
||||
if (missingTables.length > 0) {
|
||||
console.log(`[Database Integrity] ⚠️ Missing tables: ${missingTables.join(', ')}`);
|
||||
await recreateDatabase(dbHost, dbPort, dbUser, dbPass, dbName);
|
||||
} else {
|
||||
// console.log(`[Database Integrity] ✅ All tables accounted for.`);
|
||||
console.log(`[Database Integrity] ⚠️ Missing tables: ${missingTables.join(', ')}. Creating them...`);
|
||||
|
||||
for (const table of missingTables) {
|
||||
await createTable(table);
|
||||
}
|
||||
console.log(`[Database Integrity] ✅ Missing tables created.`);
|
||||
}
|
||||
|
||||
// Check for is_server_source and type in prometheus_sources
|
||||
const [promColumns] = await db.query("SHOW COLUMNS FROM prometheus_sources");
|
||||
const promColumnNames = promColumns.map(c => c.Field);
|
||||
|
||||
if (!promColumnNames.includes('is_server_source')) {
|
||||
console.log(`[Database Integrity] ⚠️ Missing column 'is_server_source' in 'prometheus_sources'. Adding it...`);
|
||||
await db.query("ALTER TABLE prometheus_sources ADD COLUMN is_server_source TINYINT(1) DEFAULT 1 AFTER description");
|
||||
console.log(`[Database Integrity] ✅ Column 'is_server_source' added.`);
|
||||
}
|
||||
|
||||
if (!promColumnNames.includes('type')) {
|
||||
console.log(`[Database Integrity] ⚠️ Missing column 'type' in 'prometheus_sources'. Adding it...`);
|
||||
await db.query("ALTER TABLE prometheus_sources ADD COLUMN type VARCHAR(50) DEFAULT 'prometheus' AFTER is_server_source");
|
||||
console.log(`[Database Integrity] ✅ Column 'type' added.`);
|
||||
}
|
||||
|
||||
// Check for new columns in site_settings
|
||||
const [columns] = await db.query("SHOW COLUMNS FROM site_settings");
|
||||
const columnNames = columns.map(c => c.Field);
|
||||
if (!columnNames.includes('show_95_bandwidth')) {
|
||||
console.log(`[Database Integrity] ⚠️ Missing column 'show_95_bandwidth' in 'site_settings'. Adding it...`);
|
||||
await db.query("ALTER TABLE site_settings ADD COLUMN show_95_bandwidth TINYINT(1) DEFAULT 0 AFTER default_theme");
|
||||
console.log(`[Database Integrity] ✅ Column 'show_95_bandwidth' added.`);
|
||||
}
|
||||
if (!columnNames.includes('p95_type')) {
|
||||
console.log(`[Database Integrity] ⚠️ Missing column 'p95_type' in 'site_settings'. Adding it...`);
|
||||
await db.query("ALTER TABLE site_settings ADD COLUMN p95_type VARCHAR(20) DEFAULT 'tx' AFTER show_95_bandwidth");
|
||||
console.log(`[Database Integrity] ✅ Column 'p95_type' added.`);
|
||||
}
|
||||
if (!columnNames.includes('blackbox_source_id')) {
|
||||
console.log(`[Database Integrity] ⚠️ Missing column 'blackbox_source_id' in 'site_settings'. Adding it...`);
|
||||
await db.query("ALTER TABLE site_settings ADD COLUMN blackbox_source_id INT AFTER p95_type");
|
||||
console.log(`[Database Integrity] ✅ Column 'blackbox_source_id' added.`);
|
||||
}
|
||||
if (!columnNames.includes('latency_source')) {
|
||||
console.log(`[Database Integrity] ⚠️ Missing column 'latency_source' in 'site_settings'. Adding it...`);
|
||||
await db.query("ALTER TABLE site_settings ADD COLUMN latency_source VARCHAR(100) AFTER blackbox_source_id");
|
||||
console.log(`[Database Integrity] ✅ Column 'latency_source' added.`);
|
||||
}
|
||||
if (!columnNames.includes('latency_dest')) {
|
||||
console.log(`[Database Integrity] ⚠️ Missing column 'latency_dest' in 'site_settings'. Adding it...`);
|
||||
await db.query("ALTER TABLE site_settings ADD COLUMN latency_dest VARCHAR(100) AFTER latency_source");
|
||||
console.log(`[Database Integrity] ✅ Column 'latency_dest' added.`);
|
||||
}
|
||||
if (!columnNames.includes('latency_target')) {
|
||||
console.log(`[Database Integrity] ⚠️ Missing column 'latency_target' in 'site_settings'. Adding it...`);
|
||||
await db.query("ALTER TABLE site_settings ADD COLUMN latency_target VARCHAR(255) AFTER latency_dest");
|
||||
console.log(`[Database Integrity] ✅ Column 'latency_target' added.`);
|
||||
}
|
||||
} catch (err) {
|
||||
if (err.code === 'ER_BAD_DB_ERROR') {
|
||||
console.log(`[Database Integrity] ⚠️ Database "${dbName}" does not exist.`);
|
||||
await recreateDatabase(dbHost, dbPort, dbUser, dbPass, dbName);
|
||||
} else {
|
||||
console.error('[Database Integrity] ❌ Error checking integrity:', err.message);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function recreateDatabase(host, port, user, password, dbName) {
|
||||
console.log(`[Database Integrity] 🔄 Re-initializing database "${dbName}"...`);
|
||||
|
||||
let connection;
|
||||
try {
|
||||
connection = await mysql.createConnection({ host, port, user, password });
|
||||
|
||||
// Drop and create database
|
||||
await connection.query(`DROP DATABASE IF EXISTS \`${dbName}\``);
|
||||
await connection.query(`CREATE DATABASE \`${dbName}\` CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci`);
|
||||
await connection.query(`USE \`${dbName}\``);
|
||||
|
||||
// Recreate all tables
|
||||
console.log(' - Creating table "users"...');
|
||||
await connection.query(`
|
||||
async function createTable(tableName) {
|
||||
console.log(` - Creating table "${tableName}"...`);
|
||||
switch (tableName) {
|
||||
case 'users':
|
||||
await db.query(`
|
||||
CREATE TABLE IF NOT EXISTS users (
|
||||
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||
username VARCHAR(255) NOT NULL UNIQUE,
|
||||
@@ -73,9 +105,9 @@ async function recreateDatabase(host, port, user, password, dbName) {
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci
|
||||
`);
|
||||
|
||||
console.log(' - Creating table "prometheus_sources"...');
|
||||
await connection.query(`
|
||||
break;
|
||||
case 'prometheus_sources':
|
||||
await db.query(`
|
||||
CREATE TABLE IF NOT EXISTS prometheus_sources (
|
||||
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||
name VARCHAR(255) NOT NULL,
|
||||
@@ -85,25 +117,31 @@ async function recreateDatabase(host, port, user, password, dbName) {
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci
|
||||
`);
|
||||
|
||||
console.log(' - Creating table "site_settings"...');
|
||||
await connection.query(`
|
||||
break;
|
||||
case 'site_settings':
|
||||
await db.query(`
|
||||
CREATE TABLE IF NOT EXISTS site_settings (
|
||||
id INT PRIMARY KEY DEFAULT 1,
|
||||
page_name VARCHAR(255) DEFAULT '数据可视化展示大屏',
|
||||
title VARCHAR(255) DEFAULT '数据可视化展示大屏',
|
||||
logo_url TEXT,
|
||||
default_theme VARCHAR(20) DEFAULT 'dark',
|
||||
show_95_bandwidth TINYINT(1) DEFAULT 0,
|
||||
p95_type VARCHAR(20) DEFAULT 'tx',
|
||||
blackbox_source_id INT,
|
||||
latency_source VARCHAR(100),
|
||||
latency_dest VARCHAR(100),
|
||||
latency_target VARCHAR(255),
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci
|
||||
`);
|
||||
await connection.query(`
|
||||
INSERT INTO site_settings (id, page_name, title, default_theme)
|
||||
VALUES (1, '数据可视化展示大屏', '数据可视化展示大屏', 'dark')
|
||||
await db.query(`
|
||||
INSERT IGNORE INTO site_settings (id, page_name, title, default_theme, show_95_bandwidth)
|
||||
VALUES (1, '数据可视化展示大屏', '数据可视化展示大屏', 'dark', 0)
|
||||
`);
|
||||
|
||||
console.log(' - Creating table "traffic_stats"...');
|
||||
await connection.query(`
|
||||
break;
|
||||
case 'traffic_stats':
|
||||
await db.query(`
|
||||
CREATE TABLE IF NOT EXISTS traffic_stats (
|
||||
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||
rx_bytes BIGINT UNSIGNED DEFAULT 0,
|
||||
@@ -114,16 +152,34 @@ async function recreateDatabase(host, port, user, password, dbName) {
|
||||
UNIQUE INDEX (timestamp)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci
|
||||
`);
|
||||
|
||||
console.log(`[Database Integrity] ✅ Re-initialization complete.`);
|
||||
|
||||
// Refresh db pool in the main app context
|
||||
db.initPool();
|
||||
|
||||
} catch (err) {
|
||||
console.error('[Database Integrity] ❌ Critical failure during re-initialization:', err.message);
|
||||
} finally {
|
||||
if (connection) await connection.end();
|
||||
break;
|
||||
case 'latency_routes':
|
||||
await db.query(`
|
||||
CREATE TABLE IF NOT EXISTS latency_routes (
|
||||
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||
source_id INT NOT NULL,
|
||||
latency_source VARCHAR(100) NOT NULL,
|
||||
latency_dest VARCHAR(100) NOT NULL,
|
||||
latency_target VARCHAR(255) NOT NULL,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci
|
||||
`);
|
||||
break;
|
||||
case 'server_locations':
|
||||
await db.query(`
|
||||
CREATE TABLE IF NOT EXISTS server_locations (
|
||||
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||
ip VARCHAR(255) NOT NULL UNIQUE,
|
||||
country CHAR(2),
|
||||
country_name VARCHAR(100),
|
||||
region VARCHAR(100),
|
||||
city VARCHAR(100),
|
||||
latitude DOUBLE,
|
||||
longitude DOUBLE,
|
||||
last_updated TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci
|
||||
`);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
13
server/db.js
13
server/db.js
@@ -18,9 +18,20 @@ function initPool() {
|
||||
});
|
||||
}
|
||||
|
||||
async function checkHealth() {
|
||||
try {
|
||||
if (!pool) return { status: 'down', error: 'Database pool not initialized' };
|
||||
await pool.query('SELECT 1');
|
||||
return { status: 'up' };
|
||||
} catch (err) {
|
||||
return { status: 'down', error: err.message };
|
||||
}
|
||||
}
|
||||
|
||||
initPool();
|
||||
|
||||
module.exports = {
|
||||
query: (...args) => pool.query(...args),
|
||||
initPool
|
||||
initPool,
|
||||
checkHealth
|
||||
};
|
||||
|
||||
138
server/geo-service.js
Normal file
138
server/geo-service.js
Normal file
@@ -0,0 +1,138 @@
|
||||
const axios = require('axios');
|
||||
const net = require('net');
|
||||
const dns = require('dns').promises;
|
||||
const db = require('./db');
|
||||
|
||||
/**
|
||||
* Geo Location Service
|
||||
* Resolves IP addresses to geographical coordinates and country info.
|
||||
* Caches results in the database to minimize API calls.
|
||||
*/
|
||||
|
||||
const ipInfoToken = process.env.IPINFO_TOKEN;
|
||||
|
||||
/**
|
||||
* Normalizes geo data for consistent display
|
||||
*/
|
||||
function normalizeGeo(geo) {
|
||||
if (!geo) return geo;
|
||||
|
||||
// Custom normalization for TW, HK, MO to "China, {CODE}"
|
||||
const specialRegions = ['TW'];
|
||||
if (specialRegions.includes(geo.country?.toUpperCase())) {
|
||||
return {
|
||||
...geo,
|
||||
city: `China, ${geo.country.toUpperCase()}`,
|
||||
country_name: 'China'
|
||||
};
|
||||
}
|
||||
return geo;
|
||||
}
|
||||
|
||||
async function getLocation(target) {
|
||||
// Normalize target (strip port if present)
|
||||
const cleanTarget = target.split(':')[0];
|
||||
|
||||
// 1. Check if we already have this IP/Domain in DB (FASTEST)
|
||||
try {
|
||||
const [rows] = await db.query('SELECT * FROM server_locations WHERE ip = ?', [cleanTarget]);
|
||||
if (rows.length > 0) {
|
||||
const data = rows[0];
|
||||
const age = Date.now() - new Date(data.last_updated).getTime();
|
||||
if (age < 30 * 24 * 60 * 60 * 1000) {
|
||||
return normalizeGeo(data);
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
// console.error(`[Geo Service] DB check failed for ${cleanTarget}`);
|
||||
}
|
||||
|
||||
// 2. Resolve domain to IP if needed
|
||||
let cleanIp = cleanTarget;
|
||||
if (net.isIP(cleanTarget) === 0) {
|
||||
try {
|
||||
const lookup = await dns.lookup(cleanTarget);
|
||||
cleanIp = lookup.address;
|
||||
|
||||
// Secondary DB check with resolved IP
|
||||
const [rows] = await db.query('SELECT * FROM server_locations WHERE ip = ?', [cleanIp]);
|
||||
if (rows.length > 0) {
|
||||
return normalizeGeo(rows[0]);
|
||||
}
|
||||
} catch (err) {
|
||||
// Quiet DNS failure for tokens (legacy bug mitigation)
|
||||
if (!/^[0-9a-f]{16}$/i.test(cleanTarget)) {
|
||||
console.error(`[Geo Service] DNS resolution failed for ${cleanTarget}:`, err.message);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
// 3. Skip local/reserved IPs
|
||||
if (isLocalIp(cleanIp)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// 4. Resolve via ipinfo.io (LAST RESORT)
|
||||
try {
|
||||
console.log(`[Geo Service] API lookup (ipinfo.io) for: ${cleanIp}`);
|
||||
const url = `https://ipinfo.io/${cleanIp}/json${ipInfoToken ? `?token=${ipInfoToken}` : ''}`;
|
||||
const response = await axios.get(url, { timeout: 5000 });
|
||||
const geo = normalizeGeo(response.data);
|
||||
|
||||
if (geo && geo.loc) {
|
||||
const [lat, lon] = geo.loc.split(',').map(Number);
|
||||
const locationData = {
|
||||
ip: cleanIp,
|
||||
country: geo.country,
|
||||
country_name: geo.country_name || geo.country, // ipinfo might not have country_name in basic response
|
||||
region: geo.region,
|
||||
city: geo.city,
|
||||
latitude: lat,
|
||||
longitude: lon
|
||||
};
|
||||
|
||||
// Save to DB
|
||||
await db.query(`
|
||||
INSERT INTO server_locations (ip, country, country_name, region, city, latitude, longitude)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?)
|
||||
ON DUPLICATE KEY UPDATE
|
||||
country = VALUES(country),
|
||||
country_name = VALUES(country_name),
|
||||
region = VALUES(region),
|
||||
city = VALUES(city),
|
||||
latitude = VALUES(latitude),
|
||||
longitude = VALUES(longitude)
|
||||
`, [
|
||||
locationData.ip,
|
||||
locationData.country,
|
||||
locationData.country_name,
|
||||
locationData.region,
|
||||
locationData.city,
|
||||
locationData.latitude,
|
||||
locationData.longitude
|
||||
]);
|
||||
|
||||
return locationData;
|
||||
}
|
||||
} catch (err) {
|
||||
console.error(`[Geo Service] Error resolving IP ${cleanIp}:`, err.message);
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
function isLocalIp(ip) {
|
||||
if (ip === 'localhost' || ip === '127.0.0.1' || ip === '::1') return true;
|
||||
|
||||
// RFC1918 private addresses
|
||||
const p1 = /^10\./;
|
||||
const p2 = /^172\.(1[6-9]|2[0-9]|3[0-1])\./;
|
||||
const p3 = /^192\.168\./;
|
||||
|
||||
return p1.test(ip) || p2.test(ip) || p3.test(ip);
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getLocation
|
||||
};
|
||||
516
server/index.js
516
server/index.js
@@ -5,7 +5,11 @@ const path = require('path');
|
||||
const db = require('./db');
|
||||
const prometheusService = require('./prometheus-service');
|
||||
const cache = require('./cache');
|
||||
const geoService = require('./geo-service');
|
||||
const latencyService = require('./latency-service');
|
||||
const checkAndFixDatabase = require('./db-integrity-check');
|
||||
const http = require('http');
|
||||
const WebSocket = require('ws');
|
||||
|
||||
const app = express();
|
||||
const PORT = process.env.PORT || 3000;
|
||||
@@ -50,6 +54,50 @@ async function checkDb() {
|
||||
}
|
||||
|
||||
checkDb();
|
||||
|
||||
// --- Health API ---
|
||||
app.get('/health', async (req, res) => {
|
||||
try {
|
||||
const dbStatus = await db.checkHealth();
|
||||
const cacheStatus = await cache.checkHealth();
|
||||
const isAllOk = dbStatus.status === 'up' && cacheStatus.status === 'up';
|
||||
|
||||
const healthInfo = {
|
||||
status: isAllOk ? 'ok' : 'error',
|
||||
timestamp: new Date().toISOString(),
|
||||
service: {
|
||||
status: 'running',
|
||||
uptime: Math.floor(process.uptime()),
|
||||
memory_usage: {
|
||||
rss: Math.floor(process.memoryUsage().rss / 1024 / 1024) + ' MB',
|
||||
heapTotal: Math.floor(process.memoryUsage().heapTotal / 1024 / 1024) + ' MB'
|
||||
},
|
||||
node_version: process.version
|
||||
},
|
||||
checks: {
|
||||
database: {
|
||||
name: 'MySQL',
|
||||
status: dbStatus.status,
|
||||
message: dbStatus.error || 'Connected'
|
||||
},
|
||||
valkey: {
|
||||
name: 'Valkey (Redis)',
|
||||
status: cacheStatus.status,
|
||||
message: cacheStatus.error || 'Connected'
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
if (isAllOk) {
|
||||
res.json(healthInfo);
|
||||
} else {
|
||||
res.status(500).json(healthInfo);
|
||||
}
|
||||
} catch (err) {
|
||||
res.status(500).json({ status: 'error', message: err.message });
|
||||
}
|
||||
});
|
||||
|
||||
// --- Auth API ---
|
||||
app.post('/api/auth/login', async (req, res) => {
|
||||
const { username, password } = req.body;
|
||||
@@ -80,6 +128,34 @@ app.post('/api/auth/logout', (req, res) => {
|
||||
res.json({ success: true });
|
||||
});
|
||||
|
||||
app.post('/api/auth/change-password', requireAuth, async (req, res) => {
|
||||
const { oldPassword, newPassword } = req.body;
|
||||
if (!oldPassword || !newPassword) {
|
||||
return res.status(400).json({ error: '需要输入旧密码和新密码' });
|
||||
}
|
||||
|
||||
try {
|
||||
const [rows] = await db.query('SELECT * FROM users WHERE id = ?', [req.user.id]);
|
||||
if (rows.length === 0) return res.status(404).json({ error: '用户不存在' });
|
||||
|
||||
const user = rows[0];
|
||||
const oldHash = crypto.pbkdf2Sync(oldPassword, user.salt, 1000, 64, 'sha512').toString('hex');
|
||||
|
||||
if (oldHash !== user.password) {
|
||||
return res.status(401).json({ error: '旧密码输入错误' });
|
||||
}
|
||||
|
||||
const newSalt = crypto.randomBytes(16).toString('hex');
|
||||
const newHash = crypto.pbkdf2Sync(newPassword, newSalt, 1000, 64, 'sha512').toString('hex');
|
||||
|
||||
await db.query('UPDATE users SET password = ?, salt = ? WHERE id = ?', [newHash, newSalt, user.id]);
|
||||
res.json({ success: true, message: '密码修改成功' });
|
||||
} catch (err) {
|
||||
console.error('Password update error:', err);
|
||||
res.status(500).json({ error: '服务器错误,修改失败' });
|
||||
}
|
||||
});
|
||||
|
||||
app.get('/api/auth/status', (req, res) => {
|
||||
const sessionId = getCookie(req, 'session_id');
|
||||
if (sessionId && sessions.has(sessionId)) {
|
||||
@@ -108,8 +184,29 @@ app.post('/api/setup/test', async (req, res) => {
|
||||
}
|
||||
});
|
||||
|
||||
app.post('/api/setup/test-valkey', async (req, res) => {
|
||||
const { host, port, password } = req.body;
|
||||
try {
|
||||
const Redis = require('ioredis');
|
||||
const redis = new Redis({
|
||||
host: host || 'localhost',
|
||||
port: parseInt(port) || 6379,
|
||||
password: password || undefined,
|
||||
lazyConnect: true,
|
||||
maxRetriesPerRequest: 1,
|
||||
connectTimeout: 5000
|
||||
});
|
||||
await redis.connect();
|
||||
await redis.ping();
|
||||
await redis.disconnect();
|
||||
res.json({ success: true, message: 'Valkey connection successful' });
|
||||
} catch (err) {
|
||||
res.status(400).json({ success: false, error: err.message });
|
||||
}
|
||||
});
|
||||
|
||||
app.post('/api/setup/init', async (req, res) => {
|
||||
const { host, port, user, password, database } = req.body;
|
||||
const { host, port, user, password, database, vHost, vPort, vPassword } = req.body;
|
||||
try {
|
||||
const mysql = require('mysql2/promise');
|
||||
const connection = await mysql.createConnection({
|
||||
@@ -166,12 +263,18 @@ app.post('/api/setup/init', async (req, res) => {
|
||||
title VARCHAR(255) DEFAULT '数据可视化展示大屏',
|
||||
logo_url TEXT,
|
||||
default_theme VARCHAR(20) DEFAULT 'dark',
|
||||
show_95_bandwidth TINYINT(1) DEFAULT 0,
|
||||
p95_type VARCHAR(20) DEFAULT 'tx',
|
||||
blackbox_source_id INT,
|
||||
latency_source VARCHAR(100),
|
||||
latency_dest VARCHAR(100),
|
||||
latency_target VARCHAR(255),
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci
|
||||
`);
|
||||
await connection.query(`
|
||||
INSERT IGNORE INTO site_settings (id, page_name, title, default_theme)
|
||||
VALUES (1, '数据可视化展示大屏', '数据可视化展示大屏', 'dark')
|
||||
INSERT IGNORE INTO site_settings (id, page_name, title, default_theme, show_95_bandwidth, p95_type)
|
||||
VALUES (1, '数据可视化展示大屏', '数据可视化展示大屏', 'dark', 0, 'tx')
|
||||
`);
|
||||
|
||||
await connection.end();
|
||||
@@ -182,6 +285,9 @@ MYSQL_PORT=${port || '3306'}
|
||||
MYSQL_USER=${user || 'root'}
|
||||
MYSQL_PASSWORD=${password || ''}
|
||||
MYSQL_DATABASE=${dbName}
|
||||
VALKEY_HOST=${vHost || 'localhost'}
|
||||
VALKEY_PORT=${vPort || '6379'}
|
||||
VALKEY_PASSWORD=${vPassword || ''}
|
||||
PORT=${process.env.PORT || 3000}
|
||||
HOST=${process.env.HOST || '0.0.0.0'}
|
||||
REFRESH_INTERVAL=${process.env.REFRESH_INTERVAL || 5000}
|
||||
@@ -194,9 +300,13 @@ REFRESH_INTERVAL=${process.env.REFRESH_INTERVAL || 5000}
|
||||
process.env.MYSQL_USER = user;
|
||||
process.env.MYSQL_PASSWORD = password;
|
||||
process.env.MYSQL_DATABASE = dbName;
|
||||
process.env.VALKEY_HOST = vHost;
|
||||
process.env.VALKEY_PORT = vPort;
|
||||
process.env.VALKEY_PASSWORD = vPassword;
|
||||
|
||||
// Re-initialize pool
|
||||
// Re-initialize pools
|
||||
db.initPool();
|
||||
cache.init();
|
||||
|
||||
isDbInitialized = true;
|
||||
res.json({ success: true, message: 'Initialization complete' });
|
||||
@@ -236,7 +346,15 @@ app.post('/api/setup/admin', async (req, res) => {
|
||||
const hash = crypto.pbkdf2Sync(password, salt, 1000, 64, 'sha512').toString('hex');
|
||||
|
||||
await db.query('INSERT INTO users (username, password, salt) VALUES (?, ?, ?)', [username, hash, salt]);
|
||||
res.json({ success: true, message: 'Admin account created' });
|
||||
const [userRows] = await db.query('SELECT id, username FROM users WHERE username = ?', [username]);
|
||||
const user = userRows[0];
|
||||
|
||||
// Auto-login after creation so the next setup steps (like adding Prometheus) work without 401
|
||||
const sessionId = crypto.randomBytes(32).toString('hex');
|
||||
sessions.set(sessionId, { id: user.id, username: user.username });
|
||||
res.setHeader('Set-Cookie', `session_id=${sessionId}; Path=/; HttpOnly; SameSite=Strict; Max-Age=86400`);
|
||||
|
||||
res.json({ success: true, message: 'Admin account created and logged in' });
|
||||
} catch (err) {
|
||||
console.error('Admin creation error:', err);
|
||||
res.status(500).json({ error: err.message });
|
||||
@@ -246,7 +364,7 @@ app.post('/api/setup/admin', async (req, res) => {
|
||||
// Middleware to protect routes & enforce setup
|
||||
app.use(async (req, res, next) => {
|
||||
// Allow system files and setup APIs
|
||||
if (req.path.startsWith('/api/setup') || req.path === '/init.html' || req.path.startsWith('/css/') || req.path.startsWith('/js/') || req.path.startsWith('/fonts/')) {
|
||||
if (req.path === '/health' || req.path.startsWith('/api/setup') || req.path === '/init.html' || req.path.startsWith('/css/') || req.path.startsWith('/js/') || req.path.startsWith('/fonts/')) {
|
||||
return next();
|
||||
}
|
||||
|
||||
@@ -290,7 +408,11 @@ const serveIndex = async (req, res) => {
|
||||
page_name: '数据可视化展示大屏',
|
||||
title: '数据可视化展示大屏',
|
||||
logo_url: null,
|
||||
default_theme: 'dark'
|
||||
default_theme: 'dark',
|
||||
blackbox_source_id: null,
|
||||
latency_source: null,
|
||||
latency_dest: null,
|
||||
latency_target: null
|
||||
};
|
||||
|
||||
if (isDbInitialized) {
|
||||
@@ -326,11 +448,18 @@ app.use(express.static(path.join(__dirname, '..', 'public'), { index: false }));
|
||||
// Get all Prometheus sources
|
||||
app.get('/api/sources', async (req, res) => {
|
||||
try {
|
||||
const [rows] = await db.query('SELECT * FROM prometheus_sources ORDER BY created_at DESC');
|
||||
const [rows] = await db.query('SELECT * FROM prometheus_sources ORDER BY is_server_source DESC, created_at DESC');
|
||||
// Test connectivity for each source
|
||||
const sourcesWithStatus = await Promise.all(rows.map(async (source) => {
|
||||
try {
|
||||
const response = await prometheusService.testConnection(source.url);
|
||||
let response;
|
||||
if (source.type === 'blackbox') {
|
||||
// Simple check for blackbox exporter
|
||||
const res = await fetch(`${source.url.replace(/\/+$/, '')}/metrics`, { timeout: 3000 }).catch(() => null);
|
||||
response = (res && res.ok) ? 'Blackbox Exporter Ready' : 'Connection Error';
|
||||
} else {
|
||||
response = await prometheusService.testConnection(source.url);
|
||||
}
|
||||
return { ...source, status: 'online', version: response };
|
||||
} catch (e) {
|
||||
return { ...source, status: 'offline', version: null };
|
||||
@@ -345,18 +474,21 @@ app.get('/api/sources', async (req, res) => {
|
||||
|
||||
// Add a new Prometheus source
|
||||
app.post('/api/sources', requireAuth, async (req, res) => {
|
||||
let { name, url, description } = req.body;
|
||||
let { name, url, description, is_server_source, type } = req.body;
|
||||
if (!name || !url) {
|
||||
return res.status(400).json({ error: 'Name and URL are required' });
|
||||
}
|
||||
if (!/^https?:\/\//i.test(url)) url = 'http://' + url;
|
||||
try {
|
||||
const [result] = await db.query(
|
||||
'INSERT INTO prometheus_sources (name, url, description) VALUES (?, ?, ?)',
|
||||
[name, url, description || '']
|
||||
'INSERT INTO prometheus_sources (name, url, description, is_server_source, type) VALUES (?, ?, ?, ?, ?)',
|
||||
[name, url, description || '', is_server_source === undefined ? 1 : (is_server_source ? 1 : 0), type || 'prometheus']
|
||||
);
|
||||
const [rows] = await db.query('SELECT * FROM prometheus_sources WHERE id = ?', [result.insertId]);
|
||||
|
||||
// Clear network history cache to force refresh
|
||||
await cache.del('network_history_all');
|
||||
|
||||
res.status(201).json(rows[0]);
|
||||
} catch (err) {
|
||||
console.error('Error adding source:', err);
|
||||
@@ -366,13 +498,16 @@ app.post('/api/sources', requireAuth, async (req, res) => {
|
||||
|
||||
// Update a Prometheus source
|
||||
app.put('/api/sources/:id', requireAuth, async (req, res) => {
|
||||
let { name, url, description } = req.body;
|
||||
let { name, url, description, is_server_source } = req.body;
|
||||
if (url && !/^https?:\/\//i.test(url)) url = 'http://' + url;
|
||||
try {
|
||||
await db.query(
|
||||
'UPDATE prometheus_sources SET name = ?, url = ?, description = ? WHERE id = ?',
|
||||
[name, url, description || '', req.params.id]
|
||||
'UPDATE prometheus_sources SET name = ?, url = ?, description = ?, is_server_source = ?, type = ? WHERE id = ?',
|
||||
[name, url, description || '', is_server_source ? 1 : 0, type || 'prometheus', req.params.id]
|
||||
);
|
||||
// Clear network history cache
|
||||
await cache.del('network_history_all');
|
||||
|
||||
const [rows] = await db.query('SELECT * FROM prometheus_sources WHERE id = ?', [req.params.id]);
|
||||
res.json(rows[0]);
|
||||
} catch (err) {
|
||||
@@ -385,6 +520,8 @@ app.put('/api/sources/:id', requireAuth, async (req, res) => {
|
||||
app.delete('/api/sources/:id', requireAuth, async (req, res) => {
|
||||
try {
|
||||
await db.query('DELETE FROM prometheus_sources WHERE id = ?', [req.params.id]);
|
||||
// Clear network history cache
|
||||
await cache.del('network_history_all');
|
||||
res.json({ message: 'Source deleted' });
|
||||
} catch (err) {
|
||||
console.error('Error deleting source:', err);
|
||||
@@ -394,11 +531,18 @@ app.delete('/api/sources/:id', requireAuth, async (req, res) => {
|
||||
|
||||
// Test connection to a Prometheus source
|
||||
app.post('/api/sources/test', async (req, res) => {
|
||||
let { url } = req.body;
|
||||
let { url, type } = req.body;
|
||||
if (url && !/^https?:\/\//i.test(url)) url = 'http://' + url;
|
||||
try {
|
||||
const version = await prometheusService.testConnection(url);
|
||||
res.json({ status: 'ok', version });
|
||||
let result;
|
||||
if (type === 'blackbox') {
|
||||
const resVal = await fetch(`${url.replace(/\/+$/, '')}/metrics`, { timeout: 5000 }).catch(() => null);
|
||||
result = (resVal && resVal.ok) ? 'Blackbox Exporter Ready' : 'Connection Failed';
|
||||
if (!resVal || !resVal.ok) throw new Error(result);
|
||||
} else {
|
||||
result = await prometheusService.testConnection(url);
|
||||
}
|
||||
res.json({ status: 'ok', version: result });
|
||||
} catch (err) {
|
||||
res.status(400).json({ status: 'error', message: err.message });
|
||||
}
|
||||
@@ -414,7 +558,13 @@ app.get('/api/settings', async (req, res) => {
|
||||
return res.json({
|
||||
page_name: '数据可视化展示大屏',
|
||||
title: '数据可视化展示大屏',
|
||||
logo_url: null
|
||||
logo_url: null,
|
||||
show_95_bandwidth: 0,
|
||||
p95_type: 'tx',
|
||||
blackbox_source_id: null,
|
||||
latency_source: null,
|
||||
latency_dest: null,
|
||||
latency_target: null
|
||||
});
|
||||
}
|
||||
res.json(rows[0]);
|
||||
@@ -426,17 +576,28 @@ app.get('/api/settings', async (req, res) => {
|
||||
|
||||
// Update site settings
|
||||
app.post('/api/settings', requireAuth, async (req, res) => {
|
||||
const { page_name, title, logo_url, default_theme } = req.body;
|
||||
const { page_name, title, logo_url, default_theme, show_95_bandwidth, p95_type, blackbox_source_id, latency_source, latency_dest, latency_target } = req.body;
|
||||
try {
|
||||
await db.query(
|
||||
`INSERT INTO site_settings (id, page_name, title, logo_url, default_theme)
|
||||
VALUES (1, ?, ?, ?, ?)
|
||||
`INSERT INTO site_settings (id, page_name, title, logo_url, default_theme, show_95_bandwidth, p95_type, blackbox_source_id, latency_source, latency_dest, latency_target)
|
||||
VALUES (1, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
ON DUPLICATE KEY UPDATE
|
||||
page_name = VALUES(page_name),
|
||||
title = VALUES(title),
|
||||
logo_url = VALUES(logo_url),
|
||||
default_theme = VALUES(default_theme)`,
|
||||
[page_name, title, logo_url, default_theme]
|
||||
default_theme = VALUES(default_theme),
|
||||
show_95_bandwidth = VALUES(show_95_bandwidth),
|
||||
p95_type = VALUES(p95_type),
|
||||
blackbox_source_id = VALUES(blackbox_source_id),
|
||||
latency_source = VALUES(latency_source),
|
||||
latency_dest = VALUES(latency_dest),
|
||||
latency_target = VALUES(latency_target)`,
|
||||
[
|
||||
page_name, title, logo_url, default_theme,
|
||||
show_95_bandwidth ? 1 : 0, p95_type || 'tx',
|
||||
blackbox_source_id || null, latency_source || null,
|
||||
latency_dest || null, latency_target || null
|
||||
]
|
||||
);
|
||||
res.json({ success: true });
|
||||
} catch (err) {
|
||||
@@ -447,20 +608,20 @@ app.post('/api/settings', requireAuth, async (req, res) => {
|
||||
|
||||
// ==================== Metrics Aggregation ====================
|
||||
|
||||
// Get all aggregated metrics from all Prometheus sources
|
||||
app.get('/api/metrics/overview', async (req, res) => {
|
||||
try {
|
||||
const [sources] = await db.query('SELECT * FROM prometheus_sources');
|
||||
// Reusable function to get overview metrics
|
||||
async function getOverview() {
|
||||
const [sources] = await db.query('SELECT * FROM prometheus_sources WHERE is_server_source = 1 AND type != "blackbox"');
|
||||
if (sources.length === 0) {
|
||||
return res.json({
|
||||
return {
|
||||
totalServers: 0,
|
||||
activeServers: 0,
|
||||
cpu: { used: 0, total: 0, percent: 0 },
|
||||
memory: { used: 0, total: 0, percent: 0 },
|
||||
disk: { used: 0, total: 0, percent: 0 },
|
||||
network: { total: 0, rx: 0, tx: 0 },
|
||||
traffic24h: { rx: 0, tx: 0, total: 0 },
|
||||
servers: []
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
const allMetrics = await Promise.all(sources.map(async (source) => {
|
||||
@@ -470,6 +631,8 @@ app.get('/api/metrics/overview', async (req, res) => {
|
||||
|
||||
try {
|
||||
const metrics = await prometheusService.getOverviewMetrics(source.url, source.name);
|
||||
// Don't set cache here if we want real-time WS push to be fresh,
|
||||
// but keeping it for REST API performance is fine.
|
||||
await cache.set(cacheKey, metrics, 15); // Cache for 15s
|
||||
return metrics;
|
||||
} catch (err) {
|
||||
@@ -482,6 +645,7 @@ app.get('/api/metrics/overview', async (req, res) => {
|
||||
|
||||
// Aggregate across all sources
|
||||
let totalServers = 0;
|
||||
let activeServers = 0;
|
||||
let cpuUsed = 0, cpuTotal = 0;
|
||||
let memUsed = 0, memTotal = 0;
|
||||
let diskUsed = 0, diskTotal = 0;
|
||||
@@ -491,6 +655,7 @@ app.get('/api/metrics/overview', async (req, res) => {
|
||||
|
||||
for (const m of validMetrics) {
|
||||
totalServers += m.totalServers;
|
||||
activeServers += (m.activeServers !== undefined ? m.activeServers : m.totalServers);
|
||||
cpuUsed += m.cpu.used;
|
||||
cpuTotal += m.cpu.total;
|
||||
memUsed += m.memory.used;
|
||||
@@ -504,22 +669,9 @@ app.get('/api/metrics/overview', async (req, res) => {
|
||||
allServers = allServers.concat(m.servers);
|
||||
}
|
||||
|
||||
// --- 24h Traffic from DB (Integrating Bandwidth) ---
|
||||
try {
|
||||
// Each record represents a 5-second interval
|
||||
const [sumRows] = await db.query('SELECT SUM(rx_bandwidth) as sumRx, SUM(tx_bandwidth) as sumTx FROM traffic_stats WHERE timestamp >= NOW() - INTERVAL 1 DAY');
|
||||
|
||||
if (sumRows.length > 0 && sumRows[0].sumRx !== null) {
|
||||
// Total bytes = Sum of (bytes/sec) * 5 seconds
|
||||
traffic24hRx = sumRows[0].sumRx * 5;
|
||||
traffic24hTx = sumRows[0].sumTx * 5;
|
||||
}
|
||||
} catch (err) {
|
||||
console.error('Error calculating 24h traffic from DB integration:', err);
|
||||
}
|
||||
|
||||
res.json({
|
||||
const overview = {
|
||||
totalServers,
|
||||
activeServers,
|
||||
cpu: {
|
||||
used: cpuUsed,
|
||||
total: cpuTotal,
|
||||
@@ -546,37 +698,89 @@ app.get('/api/metrics/overview', async (req, res) => {
|
||||
total: traffic24hRx + traffic24hTx
|
||||
},
|
||||
servers: allServers
|
||||
});
|
||||
};
|
||||
|
||||
// --- Add Geo Information to Servers ---
|
||||
const geoServers = await Promise.all(overview.servers.map(async (server) => {
|
||||
const realInstance = server.originalInstance || prometheusService.resolveToken(server.instance);
|
||||
const cleanIp = realInstance.split(':')[0];
|
||||
|
||||
let geoData = null;
|
||||
try {
|
||||
const [rows] = await db.query('SELECT * FROM server_locations WHERE ip = ?', [cleanIp]);
|
||||
if (rows.length > 0) {
|
||||
geoData = rows[0];
|
||||
} else {
|
||||
geoService.getLocation(cleanIp).catch(() => {});
|
||||
}
|
||||
} catch (e) {}
|
||||
|
||||
const { originalInstance, ...safeServer } = server;
|
||||
if (geoData) {
|
||||
return {
|
||||
...safeServer,
|
||||
country: geoData.country,
|
||||
countryName: geoData.country_name,
|
||||
city: geoData.city,
|
||||
lat: geoData.latitude,
|
||||
lng: geoData.longitude
|
||||
};
|
||||
}
|
||||
return safeServer;
|
||||
}));
|
||||
|
||||
overview.servers = geoServers;
|
||||
return overview;
|
||||
}
|
||||
|
||||
// Get all aggregated metrics from all Prometheus sources
|
||||
app.get('/api/metrics/overview', async (req, res) => {
|
||||
try {
|
||||
const overview = await getOverview();
|
||||
res.json(overview);
|
||||
} catch (err) {
|
||||
console.error('Error fetching overview metrics:', err);
|
||||
res.status(500).json({ error: 'Failed to fetch metrics' });
|
||||
}
|
||||
});
|
||||
|
||||
// Get network traffic history from DB (past 24h)
|
||||
// Get network traffic history (past 24h) from Prometheus
|
||||
app.get('/api/metrics/network-history', async (req, res) => {
|
||||
try {
|
||||
const [rows] = await db.query('SELECT rx_bandwidth, tx_bandwidth, UNIX_TIMESTAMP(timestamp) as ts FROM traffic_stats WHERE timestamp >= NOW() - INTERVAL 1 DAY ORDER BY ts ASC');
|
||||
const cacheKey = 'network_history_all';
|
||||
const cached = await cache.get(cacheKey);
|
||||
if (cached) return res.json(cached);
|
||||
|
||||
if (rows.length === 0) {
|
||||
const [sources] = await db.query('SELECT * FROM prometheus_sources WHERE is_server_source = 1 AND type != "blackbox"');
|
||||
if (sources.length === 0) {
|
||||
return res.json({ timestamps: [], rx: [], tx: [] });
|
||||
}
|
||||
|
||||
res.json({
|
||||
timestamps: rows.map(r => r.ts * 1000),
|
||||
rx: rows.map(r => r.rx_bandwidth),
|
||||
tx: rows.map(r => r.tx_bandwidth)
|
||||
});
|
||||
const histories = await Promise.all(sources.map(source =>
|
||||
prometheusService.getNetworkHistory(source.url).catch(err => {
|
||||
console.error(`Error fetching network history from ${source.name}:`, err.message);
|
||||
return null;
|
||||
})
|
||||
));
|
||||
|
||||
const validHistories = histories.filter(h => h !== null);
|
||||
if (validHistories.length === 0) {
|
||||
return res.json({ timestamps: [], rx: [], tx: [] });
|
||||
}
|
||||
|
||||
const merged = prometheusService.mergeNetworkHistories(validHistories);
|
||||
await cache.set(cacheKey, merged, 300); // Cache for 5 minutes
|
||||
res.json(merged);
|
||||
} catch (err) {
|
||||
console.error('Error fetching network history from DB:', err);
|
||||
res.status(500).json({ error: 'Failed to fetch network history' });
|
||||
console.error('Error fetching network history history:', err);
|
||||
res.status(500).json({ error: 'Failed to fetch network history history' });
|
||||
}
|
||||
});
|
||||
|
||||
// Get CPU usage history for sparklines
|
||||
app.get('/api/metrics/cpu-history', async (req, res) => {
|
||||
try {
|
||||
const [sources] = await db.query('SELECT * FROM prometheus_sources');
|
||||
const [sources] = await db.query('SELECT * FROM prometheus_sources WHERE is_server_source = 1 AND type != "blackbox"');
|
||||
if (sources.length === 0) {
|
||||
return res.json({ timestamps: [], values: [] });
|
||||
}
|
||||
@@ -601,6 +805,51 @@ app.get('/api/metrics/cpu-history', async (req, res) => {
|
||||
}
|
||||
});
|
||||
|
||||
// Get detailed metrics for a specific server
|
||||
app.get('/api/metrics/server-details', async (req, res) => {
|
||||
const { instance, job, source } = req.query;
|
||||
|
||||
if (!instance || !job || !source) {
|
||||
return res.status(400).json({ error: 'instance, job, and source name are required' });
|
||||
}
|
||||
|
||||
try {
|
||||
// Find the source URL by name
|
||||
const [rows] = await db.query('SELECT url FROM prometheus_sources WHERE name = ?', [source]);
|
||||
if (rows.length === 0) {
|
||||
return res.status(404).json({ error: 'Prometheus source not found' });
|
||||
}
|
||||
const sourceUrl = rows[0].url;
|
||||
|
||||
// Fetch detailed metrics
|
||||
const details = await prometheusService.getServerDetails(sourceUrl, instance, job);
|
||||
res.json(details);
|
||||
} catch (err) {
|
||||
console.error(`Error fetching server details for ${instance}:`, err.message);
|
||||
res.status(500).json({ error: 'Failed to fetch server details' });
|
||||
}
|
||||
});
|
||||
|
||||
// Get historical metrics for a specific server
|
||||
app.get('/api/metrics/server-history', async (req, res) => {
|
||||
const { instance, job, source, metric, range, start, end } = req.query;
|
||||
|
||||
if (!instance || !job || !source || !metric) {
|
||||
return res.status(400).json({ error: 'instance, job, source, and metric are required' });
|
||||
}
|
||||
|
||||
try {
|
||||
const [rows] = await db.query('SELECT url FROM prometheus_sources WHERE name = ?', [source]);
|
||||
if (rows.length === 0) return res.status(404).json({ error: 'Source not found' });
|
||||
const sourceUrl = rows[0].url;
|
||||
|
||||
const data = await prometheusService.getServerHistory(sourceUrl, instance, job, metric, range, start, end);
|
||||
res.json(data);
|
||||
} catch (err) {
|
||||
res.status(500).json({ error: err.message });
|
||||
}
|
||||
});
|
||||
|
||||
// SPA fallback
|
||||
app.get('*', (req, res, next) => {
|
||||
if (req.path.startsWith('/api/') || req.path.includes('.')) return next();
|
||||
@@ -608,68 +857,125 @@ app.get('*', (req, res, next) => {
|
||||
});
|
||||
|
||||
|
||||
async function recordTrafficStats() {
|
||||
if (!isDbInitialized) return;
|
||||
try {
|
||||
const [sources] = await db.query('SELECT * FROM prometheus_sources');
|
||||
if (sources.length === 0) return;
|
||||
// ==================== Latency Routes CRUD ====================
|
||||
|
||||
let totalRxBytes = 0;
|
||||
let totalTxBytes = 0;
|
||||
let totalRxBandwidth = 0;
|
||||
let totalTxBandwidth = 0;
|
||||
|
||||
const results = await Promise.all(sources.map(async source => {
|
||||
app.get('/api/latency-routes', async (req, res) => {
|
||||
try {
|
||||
const [rxBytesRes, txBytesRes, rxBWRes, txBWRes] = await Promise.all([
|
||||
prometheusService.query(source.url, 'sum(node_network_receive_bytes_total{device!~"lo|veth.*|docker.*|br-.*"})'),
|
||||
prometheusService.query(source.url, 'sum(node_network_transmit_bytes_total{device!~"lo|veth.*|docker.*|br-.*"})'),
|
||||
prometheusService.query(source.url, 'sum(rate(node_network_receive_bytes_total{device!~"lo|veth.*|docker.*|br-.*"}[1m]))'),
|
||||
prometheusService.query(source.url, 'sum(rate(node_network_transmit_bytes_total{device!~"lo|veth.*|docker.*|br-.*"}[1m]))')
|
||||
]);
|
||||
const [rows] = await db.query(`
|
||||
SELECT r.*, s.name as source_name
|
||||
FROM latency_routes r
|
||||
LEFT JOIN prometheus_sources s ON r.source_id = s.id
|
||||
ORDER BY r.created_at DESC
|
||||
`);
|
||||
res.json(rows);
|
||||
} catch (err) {
|
||||
res.status(500).json({ error: 'Failed to fetch latency routes' });
|
||||
}
|
||||
});
|
||||
|
||||
app.post('/api/latency-routes', requireAuth, async (req, res) => {
|
||||
const { source_id, latency_source, latency_dest, latency_target } = req.body;
|
||||
try {
|
||||
await db.query('INSERT INTO latency_routes (source_id, latency_source, latency_dest, latency_target) VALUES (?, ?, ?, ?)', [source_id, latency_source, latency_dest, latency_target]);
|
||||
res.json({ success: true });
|
||||
} catch (err) {
|
||||
res.status(500).json({ error: 'Failed to add latency route' });
|
||||
}
|
||||
});
|
||||
|
||||
app.delete('/api/latency-routes/:id', requireAuth, async (req, res) => {
|
||||
try {
|
||||
await db.query('DELETE FROM latency_routes WHERE id = ?', [req.params.id]);
|
||||
res.json({ success: true });
|
||||
} catch (err) {
|
||||
res.status(500).json({ error: 'Failed to delete latency route' });
|
||||
}
|
||||
});
|
||||
|
||||
app.put('/api/latency-routes/:id', requireAuth, async (req, res) => {
|
||||
const { source_id, latency_source, latency_dest, latency_target } = req.body;
|
||||
try {
|
||||
await db.query(
|
||||
'UPDATE latency_routes SET source_id = ?, latency_source = ?, latency_dest = ?, latency_target = ? WHERE id = ?',
|
||||
[source_id, latency_source, latency_dest, latency_target, req.params.id]
|
||||
);
|
||||
res.json({ success: true });
|
||||
} catch (err) {
|
||||
res.status(500).json({ error: 'Failed to update latency route' });
|
||||
}
|
||||
});
|
||||
|
||||
// ==================== Metrics Latency ====================
|
||||
|
||||
app.get('/api/metrics/latency', async (req, res) => {
|
||||
try {
|
||||
const [routes] = await db.query(`
|
||||
SELECT r.*, s.url, s.type as source_type
|
||||
FROM latency_routes r
|
||||
JOIN prometheus_sources s ON r.source_id = s.id
|
||||
`);
|
||||
|
||||
if (routes.length === 0) {
|
||||
return res.json({ routes: [] });
|
||||
}
|
||||
|
||||
const results = await Promise.all(routes.map(async (route) => {
|
||||
// Try to get from Valkey first (filled by background latencyService)
|
||||
let latency = await cache.get(`latency:route:${route.id}`);
|
||||
|
||||
// Fallback if not in cache (only for prometheus sources, blackbox sources rely on the background service)
|
||||
if (latency === null && route.source_type === 'prometheus') {
|
||||
latency = await prometheusService.getLatency(route.url, route.latency_target);
|
||||
}
|
||||
|
||||
return {
|
||||
rxBytes: (rxBytesRes.length > 0) ? parseFloat(rxBytesRes[0].value[1]) : 0,
|
||||
txBytes: (txBytesRes.length > 0) ? parseFloat(txBytesRes[0].value[1]) : 0,
|
||||
rxBW: (rxBWRes.length > 0) ? parseFloat(rxBWRes[0].value[1]) : 0,
|
||||
txBW: (txBWRes.length > 0) ? parseFloat(txBWRes[0].value[1]) : 0
|
||||
id: route.id,
|
||||
source: route.latency_source,
|
||||
dest: route.latency_dest,
|
||||
latency: latency
|
||||
};
|
||||
} catch (e) {
|
||||
return { rxBytes: 0, txBytes: 0, rxBW: 0, txBW: 0 };
|
||||
}
|
||||
}));
|
||||
|
||||
for (const r of results) {
|
||||
totalRxBytes += r.rxBytes;
|
||||
totalTxBytes += r.txBytes;
|
||||
totalRxBandwidth += r.rxBW;
|
||||
totalTxBandwidth += r.txBW;
|
||||
}
|
||||
|
||||
// Always insert a record if we have sources, so the timeline advances
|
||||
// Even if traffic is 0, we want to see 0 on the chart
|
||||
await db.query('INSERT INTO traffic_stats (rx_bytes, tx_bytes, rx_bandwidth, tx_bandwidth) VALUES (?, ?, ?, ?)', [
|
||||
Math.round(totalRxBytes),
|
||||
Math.round(totalTxBytes),
|
||||
totalRxBandwidth,
|
||||
totalTxBandwidth
|
||||
]);
|
||||
console.log(`[Traffic Recorder] Saved stats: BW_RX=${totalRxBandwidth.toFixed(2)}, BW_TX=${totalTxBandwidth.toFixed(2)}`);
|
||||
res.json({ routes: results });
|
||||
} catch (err) {
|
||||
console.error('[Traffic Recorder] Error recording stats:', err);
|
||||
console.error('Error fetching latencies:', err);
|
||||
res.status(500).json({ error: 'Failed to fetch latency' });
|
||||
}
|
||||
});
|
||||
|
||||
// ==================== WebSocket Server ====================
|
||||
|
||||
const server = http.createServer(app);
|
||||
const wss = new WebSocket.Server({ server });
|
||||
|
||||
function broadcast(data) {
|
||||
const message = JSON.stringify(data);
|
||||
wss.clients.forEach(client => {
|
||||
if (client.readyState === WebSocket.OPEN) {
|
||||
client.send(message);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Broadcast loop
|
||||
async function broadcastMetrics() {
|
||||
try {
|
||||
const overview = await getOverview();
|
||||
broadcast({ type: 'overview', data: overview });
|
||||
} catch (err) {
|
||||
// console.error('WS Broadcast error:', err.message);
|
||||
}
|
||||
}
|
||||
|
||||
// Check and fix database integrity on startup
|
||||
// Start services
|
||||
checkAndFixDatabase();
|
||||
latencyService.start();
|
||||
|
||||
// Record traffic every 5 seconds (17,280 points/day)
|
||||
setInterval(recordTrafficStats, 5 * 1000);
|
||||
// Initial record after a short delay
|
||||
setTimeout(recordTrafficStats, 10000);
|
||||
const REFRESH_INT = parseInt(process.env.REFRESH_INTERVAL) || 5000;
|
||||
setInterval(broadcastMetrics, REFRESH_INT);
|
||||
|
||||
app.listen(PORT, HOST, () => {
|
||||
console.log(`\n 🚀 Data Visualization Display Wall`);
|
||||
server.listen(PORT, HOST, () => {
|
||||
console.log(`\n 🚀 Data Visualization Display Wall (WebSocket Enabled)`);
|
||||
console.log(` 📊 Server running at http://${HOST === '0.0.0.0' ? 'localhost' : HOST}:${PORT}`);
|
||||
console.log(` ⚙️ Configure Prometheus sources at http://${HOST === '0.0.0.0' ? 'localhost' : HOST}:${PORT}/settings\n`);
|
||||
});
|
||||
|
||||
@@ -67,6 +67,22 @@ async function initDatabase() {
|
||||
`);
|
||||
console.log(' ✅ Table "site_settings" ready');
|
||||
|
||||
// Create server_locations table
|
||||
await connection.query(`
|
||||
CREATE TABLE IF NOT EXISTS server_locations (
|
||||
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||
ip VARCHAR(255) NOT NULL UNIQUE,
|
||||
country CHAR(2),
|
||||
country_name VARCHAR(100),
|
||||
region VARCHAR(100),
|
||||
city VARCHAR(100),
|
||||
latitude DOUBLE,
|
||||
longitude DOUBLE,
|
||||
last_updated TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci
|
||||
`);
|
||||
console.log(' ✅ Table "server_locations" ready');
|
||||
|
||||
console.log('\n🎉 Database initialization complete!\n');
|
||||
await connection.end();
|
||||
}
|
||||
|
||||
134
server/latency-service.js
Normal file
134
server/latency-service.js
Normal file
@@ -0,0 +1,134 @@
|
||||
const axios = require('axios');
|
||||
const cache = require('./cache');
|
||||
const db = require('./db');
|
||||
|
||||
const POLL_INTERVAL = 10000; // 10 seconds
|
||||
|
||||
async function pollLatency() {
|
||||
try {
|
||||
const [routes] = await db.query(`
|
||||
SELECT r.*, s.url
|
||||
FROM latency_routes r
|
||||
JOIN prometheus_sources s ON r.source_id = s.id
|
||||
WHERE s.type = 'blackbox'
|
||||
`);
|
||||
|
||||
if (routes.length === 0) return;
|
||||
|
||||
// Poll each route
|
||||
await Promise.allSettled(routes.map(async (route) => {
|
||||
try {
|
||||
// Blackbox exporter probe URL
|
||||
// We assume ICMP module for now. If target is a URL, maybe use http_2xx
|
||||
let module = 'icmp';
|
||||
let target = route.latency_target;
|
||||
|
||||
if (target.startsWith('http://') || target.startsWith('https://')) {
|
||||
module = 'http_2xx';
|
||||
}
|
||||
|
||||
const probeUrl = `${route.url.replace(/\/+$/, '')}/probe?module=${module}&target=${encodeURIComponent(target)}`;
|
||||
|
||||
const startTime = Date.now();
|
||||
const response = await axios.get(probeUrl, {
|
||||
timeout: 5000,
|
||||
responseType: 'text',
|
||||
validateStatus: false
|
||||
});
|
||||
|
||||
if (typeof response.data !== 'string') {
|
||||
throw new Error('Response data is not a string');
|
||||
}
|
||||
|
||||
const lines = response.data.split('\n').map(l => l.trim()).filter(l => l && !l.startsWith('#'));
|
||||
|
||||
// 1. Check if the probe was successful
|
||||
let isProbeSuccess = false;
|
||||
for (const line of lines) {
|
||||
if (/^probe_success(\{.*\})?\s+1/.test(line)) {
|
||||
isProbeSuccess = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// 2. Extract latency from priority metrics
|
||||
const targetMetrics = [
|
||||
'probe_icmp_duration_seconds',
|
||||
'probe_http_duration_seconds',
|
||||
'probe_duration_seconds'
|
||||
];
|
||||
|
||||
let foundLatency = null;
|
||||
for (const metricName of targetMetrics) {
|
||||
let bestLine = null;
|
||||
|
||||
// First pass: look for phase="rtt" which is the most accurate "ping"
|
||||
for (const line of lines) {
|
||||
if (line.startsWith(metricName) && line.includes('phase="rtt"')) {
|
||||
bestLine = line;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Second pass: if no rtt phase, look for a line without phases (legacy format) or just the first line
|
||||
if (!bestLine) {
|
||||
for (const line of lines) {
|
||||
if (line.startsWith(metricName)) {
|
||||
// Prefer lines without {} if possible, otherwise take the first one
|
||||
if (!line.includes('{')) {
|
||||
bestLine = line;
|
||||
break;
|
||||
}
|
||||
if (!bestLine) bestLine = line;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (bestLine) {
|
||||
// Regex to capture the number, including scientific notation
|
||||
const regex = new RegExp(`^${metricName}(?:\\{[^}]*\\})?\\s+([\\d.eE+-]+)`);
|
||||
const match = bestLine.match(regex);
|
||||
|
||||
if (match) {
|
||||
const val = parseFloat(match[1]);
|
||||
if (!isNaN(val)) {
|
||||
foundLatency = val * 1000; // convert to ms
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// 3. Final decision
|
||||
// If it's a success, use found latency. If success=0 or missing, handle carefully.
|
||||
let latency;
|
||||
if (isProbeSuccess && foundLatency !== null) {
|
||||
latency = foundLatency;
|
||||
} else {
|
||||
// If probe failed or metrics missing, do not show 0, show null (Measurement in progress/Error)
|
||||
latency = null;
|
||||
}
|
||||
|
||||
// Save to Valkey
|
||||
await cache.set(`latency:route:${route.id}`, latency, 60);
|
||||
} catch (err) {
|
||||
await cache.set(`latency:route:${route.id}`, null, 60);
|
||||
}
|
||||
}));
|
||||
} catch (err) {
|
||||
console.error('[Latency] Service error:', err.message);
|
||||
}
|
||||
}
|
||||
|
||||
let intervalId = null;
|
||||
|
||||
function start() {
|
||||
if (intervalId) clearInterval(intervalId);
|
||||
pollLatency(); // initial run
|
||||
intervalId = setInterval(pollLatency, POLL_INTERVAL);
|
||||
console.log('[Latency] Background service started (polling Blackbox Exporter directly)');
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
start
|
||||
};
|
||||
@@ -5,9 +5,21 @@ const https = require('https');
|
||||
const QUERY_TIMEOUT = 10000;
|
||||
|
||||
// Reusable agents to handle potential redirect issues and protocol mismatches
|
||||
const crypto = require('crypto');
|
||||
const httpAgent = new http.Agent({ keepAlive: true });
|
||||
const httpsAgent = new https.Agent({ keepAlive: true, rejectUnauthorized: false });
|
||||
|
||||
const serverIdMap = new Map(); // token -> { instance, job, source }
|
||||
const SECRET = process.env.APP_SECRET || 'prom-data-panel-stable-secret-key-123';
|
||||
|
||||
function getServerToken(instance, job, source) {
|
||||
const hash = crypto.createHmac('sha256', SECRET)
|
||||
.update(`${instance}:${job}:${source}`)
|
||||
.digest('hex')
|
||||
.substring(0, 16);
|
||||
return hash;
|
||||
}
|
||||
|
||||
/**
|
||||
* Normalize URL and ensure protocol
|
||||
*/
|
||||
@@ -98,6 +110,38 @@ async function query(baseUrl, expr) {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all targets from Prometheus
|
||||
*/
|
||||
async function getTargets(baseUrl) {
|
||||
const url = normalizeUrl(baseUrl);
|
||||
try {
|
||||
const controller = new AbortController();
|
||||
const timer = setTimeout(() => controller.abort(), QUERY_TIMEOUT);
|
||||
|
||||
const res = await fetch(`${url}/api/v1/targets`, {
|
||||
signal: controller.signal
|
||||
});
|
||||
|
||||
clearTimeout(timer);
|
||||
|
||||
if (!res.ok) {
|
||||
throw new Error(`Prometheus returned HTTP ${res.status}`);
|
||||
}
|
||||
|
||||
const data = await res.json();
|
||||
if (data.status !== 'success') {
|
||||
throw new Error(`Prometheus targets fetch failed: ${data.error || 'unknown error'}`);
|
||||
}
|
||||
return data.data.activeTargets || [];
|
||||
} catch (err) {
|
||||
if (err.name === 'AbortError') {
|
||||
throw new Error('Prometheus targets fetch timed out');
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute a Prometheus range query
|
||||
*/
|
||||
@@ -133,9 +177,6 @@ async function queryRange(baseUrl, expr, start, end, step) {
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* Get overview metrics from a single Prometheus source
|
||||
*/
|
||||
async function getOverviewMetrics(url, sourceName) {
|
||||
// Run all queries in parallel
|
||||
const [
|
||||
@@ -147,9 +188,7 @@ async function getOverviewMetrics(url, sourceName) {
|
||||
diskFreeResult,
|
||||
netRxResult,
|
||||
netTxResult,
|
||||
traffic24hRxResult,
|
||||
traffic24hTxResult,
|
||||
upResult
|
||||
targetsResult
|
||||
] = await Promise.all([
|
||||
// CPU usage per instance: 1 - avg idle
|
||||
query(url, '100 - (avg by (instance, job) (rate(node_cpu_seconds_total{mode="idle"}[1m])) * 100)').catch(() => []),
|
||||
@@ -159,31 +198,36 @@ async function getOverviewMetrics(url, sourceName) {
|
||||
query(url, 'node_memory_MemTotal_bytes').catch(() => []),
|
||||
// Memory available per instance
|
||||
query(url, 'node_memory_MemAvailable_bytes').catch(() => []),
|
||||
// Disk total per instance (root filesystem + /data)
|
||||
query(url, 'sum by (instance, job) (node_filesystem_size_bytes{mountpoint=~"/|/data",fstype!="tmpfs"})').catch(() => []),
|
||||
// Disk free per instance (root filesystem + /data)
|
||||
query(url, 'sum by (instance, job) (node_filesystem_free_bytes{mountpoint=~"/|/data",fstype!="tmpfs"})').catch(() => []),
|
||||
// Disk total per instance (excluding virtual fs and FUSE/rclone mounts)
|
||||
query(url, 'sum by (instance, job) (node_filesystem_size_bytes{fstype!~"tmpfs|autofs|proc|sysfs|fuse.*", mountpoint!~"/tmp.*|/var/lib/docker/.*|/run/.*"})').catch(() => []),
|
||||
// Disk free per instance
|
||||
query(url, 'sum by (instance, job) (node_filesystem_free_bytes{fstype!~"tmpfs|autofs|proc|sysfs|fuse.*", mountpoint!~"/tmp.*|/var/lib/docker/.*|/run/.*"})').catch(() => []),
|
||||
// Network receive rate (bytes/sec)
|
||||
query(url, 'sum by (instance, job) (rate(node_network_receive_bytes_total{device!~"lo|veth.*|docker.*|br-.*"}[1m]))').catch(() => []),
|
||||
// Network transmit rate (bytes/sec)
|
||||
query(url, 'sum by (instance, job) (rate(node_network_transmit_bytes_total{device!~"lo|veth.*|docker.*|br-.*"}[1m]))').catch(() => []),
|
||||
// Total traffic received in last 24h
|
||||
query(url, 'sum by (instance, job) (increase(node_network_receive_bytes_total{device!~"lo|veth.*|docker.*|br-.*"}[24h]))').catch(() => []),
|
||||
// Total traffic transmitted in last 24h
|
||||
query(url, 'sum by (instance, job) (increase(node_network_transmit_bytes_total{device!~"lo|veth.*|docker.*|br-.*"}[24h]))').catch(() => []),
|
||||
// Up instances (at least one successful scrape in last 5m)
|
||||
// We broaden the job filter to catch more variations of node-exporter jobs
|
||||
query(url, 'max_over_time(up{job=~".*node.*|.*exporter.*|.*host.*"}[5m])').catch(() => [])
|
||||
// Targets status from /api/v1/targets
|
||||
getTargets(url).catch(() => [])
|
||||
]);
|
||||
|
||||
// Fetch 24h detailed traffic using the A*duration logic
|
||||
const traffic24hSum = await get24hTrafficSum(url).catch(() => ({ rx: 0, tx: 0 }));
|
||||
|
||||
// Build per-instance data map
|
||||
const instances = new Map();
|
||||
|
||||
const getOrCreate = (metric) => {
|
||||
const key = metric.instance;
|
||||
if (!instances.has(key)) {
|
||||
instances.set(key, {
|
||||
instance: key,
|
||||
const originalInstance = metric.instance || 'Unknown';
|
||||
const job = metric.job || 'Unknown';
|
||||
const token = getServerToken(originalInstance, job, sourceName);
|
||||
|
||||
// Store mapping for detail queries
|
||||
serverIdMap.set(token, { instance: originalInstance, source: sourceName, job });
|
||||
|
||||
if (!instances.has(token)) {
|
||||
instances.set(token, {
|
||||
instance: token, // This is the masked IP SENT TO FRONTEND
|
||||
originalInstance, // Keep internal for aggregation/parsing
|
||||
job: metric.job || 'Unknown',
|
||||
source: sourceName,
|
||||
cpuPercent: 0,
|
||||
@@ -194,10 +238,12 @@ async function getOverviewMetrics(url, sourceName) {
|
||||
diskUsed: 0,
|
||||
netRx: 0,
|
||||
netTx: 0,
|
||||
up: false
|
||||
up: false,
|
||||
memPercent: 0,
|
||||
diskPercent: 0
|
||||
});
|
||||
}
|
||||
const inst = instances.get(key);
|
||||
const inst = instances.get(token);
|
||||
// If job was Unknown but we now have a job name, update it
|
||||
if (inst.job === 'Unknown' && metric.job) {
|
||||
inst.job = metric.job;
|
||||
@@ -205,10 +251,17 @@ async function getOverviewMetrics(url, sourceName) {
|
||||
return inst;
|
||||
};
|
||||
|
||||
// Parse UP status
|
||||
for (const r of upResult) {
|
||||
const inst = getOrCreate(r.metric);
|
||||
inst.up = parseFloat(r.value[1]) === 1;
|
||||
// Initialize instances from targets first (to ensure we have all servers even if they have no metrics)
|
||||
for (const target of targetsResult) {
|
||||
const labels = target.labels || {};
|
||||
const instance = labels.instance;
|
||||
const job = labels.job || '';
|
||||
|
||||
// Include every target from the activeTargets list
|
||||
if (instance) {
|
||||
const inst = getOrCreate(labels);
|
||||
inst.up = target.health === 'up';
|
||||
}
|
||||
}
|
||||
|
||||
// Parse CPU usage
|
||||
@@ -253,14 +306,18 @@ async function getOverviewMetrics(url, sourceName) {
|
||||
inst.netTx = parseFloat(r.value[1]) || 0;
|
||||
}
|
||||
|
||||
// Final check: If an instance has non-zero CPU or Memory total data but is marked offline,
|
||||
// it means we missed its 'up' metric due to job labels, but it's clearly sending data.
|
||||
for (const inst of instances.values()) {
|
||||
if (!inst.up && (inst.cpuPercent > 0 || inst.memTotal > 0)) {
|
||||
inst.up = true;
|
||||
}
|
||||
// Calculate percentages on backend
|
||||
inst.memPercent = inst.memTotal > 0 ? (inst.memUsed / inst.memTotal * 100) : 0;
|
||||
inst.diskPercent = inst.diskTotal > 0 ? (inst.diskUsed / inst.diskTotal * 100) : 0;
|
||||
}
|
||||
|
||||
const allInstancesList = Array.from(instances.values());
|
||||
const activeInstances = allInstancesList.filter(inst => inst.up);
|
||||
|
||||
// Aggregate
|
||||
let totalCpuUsed = 0, totalCpuCores = 0;
|
||||
let totalMemUsed = 0, totalMemTotal = 0;
|
||||
@@ -268,7 +325,7 @@ async function getOverviewMetrics(url, sourceName) {
|
||||
let totalNetRx = 0, totalNetTx = 0;
|
||||
let totalTraffic24hRx = 0, totalTraffic24hTx = 0;
|
||||
|
||||
for (const inst of instances.values()) {
|
||||
for (const inst of activeInstances) {
|
||||
totalCpuUsed += (inst.cpuPercent / 100) * inst.cpuCores;
|
||||
totalCpuCores += inst.cpuCores;
|
||||
totalMemUsed += inst.memUsed;
|
||||
@@ -279,16 +336,13 @@ async function getOverviewMetrics(url, sourceName) {
|
||||
totalNetTx += inst.netTx;
|
||||
}
|
||||
|
||||
// Parse 24h traffic
|
||||
for (const r of traffic24hRxResult) {
|
||||
totalTraffic24hRx += parseFloat(r.value[1]) || 0;
|
||||
}
|
||||
for (const r of traffic24hTxResult) {
|
||||
totalTraffic24hTx += parseFloat(r.value[1]) || 0;
|
||||
}
|
||||
// Use the pre-calculated 24h traffic
|
||||
totalTraffic24hRx = traffic24hSum.rx;
|
||||
totalTraffic24hTx = traffic24hSum.tx;
|
||||
|
||||
return {
|
||||
totalServers: instances.size,
|
||||
totalServers: allInstancesList.length,
|
||||
activeServers: activeInstances.length,
|
||||
cpu: {
|
||||
used: totalCpuUsed,
|
||||
total: totalCpuCores,
|
||||
@@ -311,19 +365,86 @@ async function getOverviewMetrics(url, sourceName) {
|
||||
},
|
||||
traffic24h: {
|
||||
rx: totalTraffic24hRx,
|
||||
tx: totalTraffic24hTx
|
||||
tx: totalTraffic24hTx,
|
||||
total: totalTraffic24hRx + totalTraffic24hTx
|
||||
},
|
||||
servers: Array.from(instances.values())
|
||||
servers: allInstancesList
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* Calculate total traffic from bandwidth data points using the A*duration logic
|
||||
*/
|
||||
function calculateTrafficFromHistory(values) {
|
||||
if (!values || values.length < 2) return 0;
|
||||
|
||||
let totalBytes = 0;
|
||||
for (let i = 0; i < values.length - 1; i++) {
|
||||
const [tsA, valA] = values[i];
|
||||
const [tsB] = values[i+1];
|
||||
const duration = tsB - tsA;
|
||||
totalBytes += parseFloat(valA) * duration;
|
||||
}
|
||||
return totalBytes;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get total traffic for the past 24h by fetching all points and integrating
|
||||
*/
|
||||
async function get24hTrafficSum(url) {
|
||||
const now = Math.floor(Date.now() / 1000);
|
||||
const start = now - 86400;
|
||||
const step = 60; // 1-minute points for calculation
|
||||
|
||||
const [rxResult, txResult] = await Promise.all([
|
||||
queryRange(url, 'sum(rate(node_network_receive_bytes_total{device!~"lo|veth.*|docker.*|br-.*"}[1m]))', start, now, step).catch(() => []),
|
||||
queryRange(url, 'sum(rate(node_network_transmit_bytes_total{device!~"lo|veth.*|docker.*|br-.*"}[1m]))', start, now, step).catch(() => [])
|
||||
]);
|
||||
|
||||
const rxValues = rxResult.length > 0 ? rxResult[0].values : [];
|
||||
const txValues = txResult.length > 0 ? txResult[0].values : [];
|
||||
|
||||
return {
|
||||
rx: calculateTrafficFromHistory(rxValues),
|
||||
tx: calculateTrafficFromHistory(txValues)
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Get network traffic history (past 24h, 15-min intervals)
|
||||
* Get total traffic for a specific server in the past 24h
|
||||
*/
|
||||
async function get24hServerTrafficSum(url, instance, job) {
|
||||
const node = resolveToken(instance);
|
||||
const now = Math.floor(Date.now() / 1000);
|
||||
const start = now - 86400;
|
||||
const step = 60;
|
||||
|
||||
const rxExpr = `sum(rate(node_network_receive_bytes_total{instance="${node}",job="${job}",device!~'tap.*|veth.*|br.*|docker.*|virbr*|podman.*|lo.*|vmbr.*|fwbr.|ip.*|gre.*|virbr.*|vnet.*'}[1m]))`;
|
||||
const txExpr = `sum(rate(node_network_transmit_bytes_total{instance="${node}",job="${job}",device!~'tap.*|veth.*|br.*|docker.*|virbr*|podman.*|lo.*|vmbr.*|fwbr.|ip.*|gre.*|virbr.*|vnet.*'}[1m]))`;
|
||||
|
||||
const [rxResult, txResult] = await Promise.all([
|
||||
queryRange(url, rxExpr, start, now, step).catch(() => []),
|
||||
queryRange(url, txExpr, start, now, step).catch(() => [])
|
||||
]);
|
||||
|
||||
const rxValues = rxResult.length > 0 ? rxResult[0].values : [];
|
||||
const txValues = txResult.length > 0 ? txResult[0].values : [];
|
||||
|
||||
return {
|
||||
rx: calculateTrafficFromHistory(rxValues),
|
||||
tx: calculateTrafficFromHistory(txValues)
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Get network traffic history (past 24h, 5-min intervals for chart)
|
||||
*/
|
||||
async function getNetworkHistory(url) {
|
||||
const now = Math.floor(Date.now() / 1000);
|
||||
const start = now - 86400; // 24h ago
|
||||
const step = 900; // 15 minutes
|
||||
const step = 300; // 5 minutes for better resolution on chart
|
||||
|
||||
const [rxResult, txResult] = await Promise.all([
|
||||
queryRange(url,
|
||||
@@ -412,13 +533,275 @@ function mergeCpuHistories(histories) {
|
||||
}
|
||||
|
||||
|
||||
function resolveToken(token) {
|
||||
if (serverIdMap.has(token)) {
|
||||
return serverIdMap.get(token).instance;
|
||||
}
|
||||
return token;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get detailed metrics for a specific server (node)
|
||||
*/
|
||||
async function getServerDetails(baseUrl, instance, job) {
|
||||
const url = normalizeUrl(baseUrl);
|
||||
const node = resolveToken(instance);
|
||||
|
||||
// Queries based on the requested dashboard structure
|
||||
const queries = {
|
||||
cpuIowait: `avg(rate(node_cpu_seconds_total{mode="iowait", instance="${node}"}[1m])) * 100`,
|
||||
cpuOther: `avg(rate(node_cpu_seconds_total{mode=~"nice|steal|guest|guest_nice", instance="${node}"}[1m])) * 100`,
|
||||
cpuBusy: `100 * (1 - avg(rate(node_cpu_seconds_total{mode="idle", instance="${node}"}[1m])))`,
|
||||
sysLoad: `node_load1{instance="${node}",job="${job}"} * 100 / count(count(node_cpu_seconds_total{instance="${node}",job="${job}"}) by (cpu))`,
|
||||
memUsedPct: `(1 - (node_memory_MemAvailable_bytes{instance="${node}", job="${job}"} / node_memory_MemTotal_bytes{instance="${node}", job="${job}"})) * 100`,
|
||||
swapUsedPct: `((node_memory_SwapTotal_bytes{instance="${node}",job="${job}"} - node_memory_SwapFree_bytes{instance="${node}",job="${job}"}) / (node_memory_SwapTotal_bytes{instance="${node}",job="${job}"})) * 100`,
|
||||
rootFsUsedPct: `100 - ((node_filesystem_avail_bytes{instance="${node}",job="${job}",mountpoint="/",fstype!~"rootfs|tmpfs"} * 100) / node_filesystem_size_bytes{instance="${node}",job="${job}",mountpoint="/",fstype!~"rootfs|tmpfs"})`,
|
||||
cpuCores: `count(count(node_cpu_seconds_total{instance="${node}",job="${job}"}) by (cpu))`,
|
||||
memTotal: `node_memory_MemTotal_bytes{instance="${node}",job="${job}"}`,
|
||||
uptime: `node_time_seconds{instance="${node}",job="${job}"} - node_boot_time_seconds{instance="${node}",job="${job}"}`,
|
||||
netRx: `sum(rate(node_network_receive_bytes_total{instance="${node}",job="${job}",device!~'tap.*|veth.*|br.*|docker.*|virbr*|podman.*|lo.*|vmbr.*|fwbr.|ip.*|gre.*|virbr.*|vnet.*'}[1m]))`,
|
||||
netTx: `sum(rate(node_network_transmit_bytes_total{instance="${node}",job="${job}",device!~'tap.*|veth.*|br.*|docker.*|virbr*|podman.*|lo.*|vmbr.*|fwbr.|ip.*|gre.*|virbr.*|vnet.*'}[1m]))`,
|
||||
sockstatTcp: `node_sockstat_TCP_inuse{instance="${node}",job="${job}"}`,
|
||||
sockstatTcpMem: `node_sockstat_TCP_mem{instance="${node}",job="${job}"} * 4096`,
|
||||
// Get individual partitions (excluding virtual and FUSE mounts)
|
||||
partitions_size: `node_filesystem_size_bytes{instance="${node}", job="${job}", fstype!~"tmpfs|autofs|proc|sysfs|fuse.*", mountpoint!~"/tmp.*|/var/lib/docker/.*|/run/.*"}`,
|
||||
partitions_free: `node_filesystem_free_bytes{instance="${node}", job="${job}", fstype!~"tmpfs|autofs|proc|sysfs|fuse.*", mountpoint!~"/tmp.*|/var/lib/docker/.*|/run/.*"}`
|
||||
};
|
||||
|
||||
const results = {};
|
||||
const queryPromises = Object.entries(queries).map(async ([key, expr]) => {
|
||||
try {
|
||||
const res = await query(url, expr);
|
||||
if (key.startsWith('partitions_')) {
|
||||
results[key] = res.map(r => ({
|
||||
mountpoint: r.metric.mountpoint,
|
||||
value: parseFloat(r.value[1]) || 0
|
||||
}));
|
||||
} else {
|
||||
results[key] = res.length > 0 ? parseFloat(res[0].value[1]) : 0;
|
||||
}
|
||||
} catch (e) {
|
||||
console.error(`[Prometheus] Error querying ${key} for ${node}:`, e.message);
|
||||
results[key] = key.startsWith('partitions_') ? [] : 0;
|
||||
}
|
||||
});
|
||||
|
||||
await Promise.all(queryPromises);
|
||||
|
||||
// Group partitions
|
||||
const partitionsMap = {};
|
||||
(results.partitions_size || []).forEach(p => {
|
||||
partitionsMap[p.mountpoint] = { mountpoint: p.mountpoint, size: p.value, free: 0 };
|
||||
});
|
||||
(results.partitions_free || []).forEach(p => {
|
||||
if (partitionsMap[p.mountpoint]) {
|
||||
partitionsMap[p.mountpoint].free = p.value;
|
||||
}
|
||||
});
|
||||
|
||||
results.partitions = Object.values(partitionsMap).map(p => ({
|
||||
...p,
|
||||
used: p.size - p.free,
|
||||
percent: p.size > 0 ? ((p.size - p.free) / p.size * 100) : 0
|
||||
})).sort((a, b) => a.mountpoint.localeCompare(b.mountpoint));
|
||||
|
||||
// Calculate total disk size
|
||||
results.totalDiskSize = results.partitions.reduce((sum, p) => sum + (p.size || 0), 0);
|
||||
|
||||
delete results.partitions_size;
|
||||
delete results.partitions_free;
|
||||
|
||||
// Add 24h traffic sum for this specific server
|
||||
try {
|
||||
const traffic24h = await get24hServerTrafficSum(baseUrl, instance, job);
|
||||
results.traffic24h = traffic24h;
|
||||
} catch (e) {
|
||||
console.error(`[Prometheus] Error fetching 24h traffic for ${node}:`, e.message);
|
||||
results.traffic24h = { rx: 0, tx: 0 };
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get historical metrics for a specific server (node)
|
||||
*/
|
||||
async function getServerHistory(baseUrl, instance, job, metric, range = '1h', start = null, end = null) {
|
||||
const url = normalizeUrl(baseUrl);
|
||||
const node = resolveToken(instance);
|
||||
|
||||
// CPU Busy history: 100 - idle
|
||||
if (metric === 'cpuBusy') {
|
||||
const expr = `100 * (1 - avg(rate(node_cpu_seconds_total{mode="idle", instance="${node}"}[1m])))`;
|
||||
const rangeObj = parseRange(range, start, end);
|
||||
const result = await queryRange(url, expr, rangeObj.queryStart, rangeObj.queryEnd, rangeObj.step);
|
||||
|
||||
if (!result || result.length === 0) return { timestamps: [], values: [] };
|
||||
|
||||
return {
|
||||
timestamps: result[0].values.map(v => v[0] * 1000),
|
||||
values: result[0].values.map(v => parseFloat(v[1]))
|
||||
};
|
||||
}
|
||||
|
||||
// Map metric keys to Prometheus expressions
|
||||
const metricMap = {
|
||||
sysLoad: `node_load1{instance="${node}",job="${job}"} * 100 / count(count(node_cpu_seconds_total{instance="${node}",job="${job}"}) by (cpu))`,
|
||||
memUsedPct: `(1 - (node_memory_MemAvailable_bytes{instance="${node}", job="${job}"} / node_memory_MemTotal_bytes{instance="${node}", job="${job}"})) * 100`,
|
||||
swapUsedPct: `((node_memory_SwapTotal_bytes{instance="${node}",job="${job}"} - node_memory_SwapFree_bytes{instance="${node}",job="${job}"}) / (node_memory_SwapTotal_bytes{instance="${node}",job="${job}"})) * 100`,
|
||||
rootFsUsedPct: `100 - ((node_filesystem_avail_bytes{instance="${node}",job="${job}",mountpoint="/",fstype!="rootfs"} * 100) / node_filesystem_size_bytes{instance="${node}",job="${job}",mountpoint="/",fstype!="rootfs"})`,
|
||||
netRx: `sum(rate(node_network_receive_bytes_total{instance="${node}",job="${job}",device!~'tap.*|veth.*|br.*|docker.*|virbr*|podman.*|lo.*|vmbr.*|fwbr.|ip.*|gre.*|virbr.*|vnet.*'}[1m]))`,
|
||||
netTx: `sum(rate(node_network_transmit_bytes_total{instance="${node}",job="${job}",device!~'tap.*|veth.*|br.*|docker.*|virbr*|podman.*|lo.*|vmbr.*|fwbr.|ip.*|gre.*|virbr.*|vnet.*'}[1m]))`,
|
||||
sockstatTcp: `node_sockstat_TCP_inuse{instance="${node}",job="${job}"}`,
|
||||
sockstatTcpMem: `node_sockstat_TCP_mem{instance="${node}",job="${job}"} * 4096`
|
||||
};
|
||||
|
||||
const rangeObj = parseRange(range, start, end);
|
||||
|
||||
if (metric === 'networkTrend') {
|
||||
const txExpr = metricMap.netTx;
|
||||
const rxExpr = metricMap.netRx;
|
||||
const [txResult, rxResult] = await Promise.all([
|
||||
queryRange(url, txExpr, rangeObj.queryStart, rangeObj.queryEnd, rangeObj.step),
|
||||
queryRange(url, rxExpr, rangeObj.queryStart, rangeObj.queryEnd, rangeObj.step)
|
||||
]);
|
||||
|
||||
if (txResult.length === 0 && rxResult.length === 0) return { timestamps: [], rx: [], tx: [] };
|
||||
|
||||
const timestamps = (txResult.length > 0 ? txResult[0] : rxResult[0]).values.map(v => v[0] * 1000);
|
||||
const tx = txResult.length > 0 ? txResult[0].values.map(v => parseFloat(v[1])) : new Array(timestamps.length).fill(0);
|
||||
const rx = rxResult.length > 0 ? rxResult[0].values.map(v => parseFloat(v[1])) : new Array(timestamps.length).fill(0);
|
||||
|
||||
// Calculate statistics on backend
|
||||
let rxTotal = 0;
|
||||
let txTotal = 0;
|
||||
for (let i = 0; i < timestamps.length - 1; i++) {
|
||||
const duration = (timestamps[i+1] - timestamps[i]) / 1000;
|
||||
rxTotal += (rx[i] || 0) * duration;
|
||||
txTotal += (tx[i] || 0) * duration;
|
||||
}
|
||||
|
||||
const sortedTx = [...tx].sort((a, b) => a - b);
|
||||
const p95Idx = Math.floor(sortedTx.length * 0.95);
|
||||
const p95 = sortedTx.length > 0 ? sortedTx[p95Idx] : 0;
|
||||
|
||||
return {
|
||||
timestamps,
|
||||
tx,
|
||||
rx,
|
||||
stats: {
|
||||
rxTotal,
|
||||
txTotal,
|
||||
p95,
|
||||
total: rxTotal + txTotal
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
const expr = metricMap[metric];
|
||||
if (!expr) throw new Error('Invalid metric for history');
|
||||
|
||||
try {
|
||||
const result = await queryRange(url, expr, rangeObj.queryStart, rangeObj.queryEnd, rangeObj.step);
|
||||
if (!result || result.length === 0) return { timestamps: [], values: [] };
|
||||
|
||||
return {
|
||||
timestamps: result[0].values.map(v => v[0] * 1000),
|
||||
values: result[0].values.map(v => parseFloat(v[1]))
|
||||
};
|
||||
} catch (err) {
|
||||
console.error(`[Prometheus] Error fetching history for ${metric} on ${node}:`, err.message);
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
function parseRange(range, start, end) {
|
||||
let duration, step, queryStart, queryEnd;
|
||||
|
||||
if (start && end) {
|
||||
queryStart = Math.floor(new Date(start).getTime() / 1000);
|
||||
queryEnd = Math.floor(new Date(end).getTime() / 1000);
|
||||
duration = queryEnd - queryStart;
|
||||
step = Math.max(15, Math.floor(duration / 100));
|
||||
} else {
|
||||
const rangeMap = {
|
||||
'15m': { duration: 900, step: 15 },
|
||||
'30m': { duration: 1800, step: 30 },
|
||||
'1h': { duration: 3600, step: 60 },
|
||||
'6h': { duration: 21600, step: 300 },
|
||||
'12h': { duration: 43200, step: 600 },
|
||||
'24h': { duration: 86400, step: 900 },
|
||||
'2d': { duration: 172800, step: 1800 },
|
||||
'7d': { duration: 604800, step: 3600 }
|
||||
};
|
||||
|
||||
if (rangeMap[range]) {
|
||||
duration = rangeMap[range].duration;
|
||||
step = rangeMap[range].step;
|
||||
} else {
|
||||
const match = range.match(/^(\d+)([smhd])$/);
|
||||
if (match) {
|
||||
const val = parseInt(match[1]);
|
||||
const unit = match[2];
|
||||
const multipliers = { s: 1, m: 60, h: 3600, d: 86400 };
|
||||
duration = val * (multipliers[unit] || 3600);
|
||||
step = Math.max(15, Math.floor(duration / 100));
|
||||
} else {
|
||||
duration = 3600;
|
||||
step = 60;
|
||||
}
|
||||
}
|
||||
queryEnd = Math.floor(Date.now() / 1000);
|
||||
queryStart = queryEnd - duration;
|
||||
}
|
||||
return { queryStart, queryEnd, step };
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
testConnection,
|
||||
query,
|
||||
queryRange,
|
||||
getTargets,
|
||||
getOverviewMetrics,
|
||||
get24hTrafficSum,
|
||||
getNetworkHistory,
|
||||
mergeNetworkHistories,
|
||||
getCpuHistory,
|
||||
mergeCpuHistories
|
||||
mergeCpuHistories,
|
||||
getServerDetails,
|
||||
getServerHistory,
|
||||
resolveToken,
|
||||
getLatency: async (blackboxUrl, target) => {
|
||||
if (!blackboxUrl || !target) return null;
|
||||
try {
|
||||
const normalized = blackboxUrl.trim().replace(/\/+$/, '');
|
||||
|
||||
// Construct a single optimized query searching for priority metrics and common labels
|
||||
// Prioritize probe_icmp_duration_seconds OVER probe_duration_seconds
|
||||
const queryExpr = `(
|
||||
probe_icmp_duration_seconds{phase="rtt", instance="${target}"} or
|
||||
probe_icmp_duration_seconds{phase="rtt", target="${target}"} or
|
||||
probe_http_duration_seconds{phase="rtt", instance="${target}"} or
|
||||
probe_http_duration_seconds{phase="rtt", target="${target}"} or
|
||||
probe_icmp_duration_seconds{instance="${target}"} or
|
||||
probe_icmp_duration_seconds{target="${target}"} or
|
||||
probe_duration_seconds{instance="${target}"} or
|
||||
probe_duration_seconds{target="${target}"}
|
||||
)`;
|
||||
|
||||
const params = new URLSearchParams({ query: queryExpr });
|
||||
const res = await fetch(`${normalized}/api/v1/query?${params.toString()}`);
|
||||
|
||||
if (res.ok) {
|
||||
const data = await res.json();
|
||||
if (data.status === 'success' && data.data.result.length > 0) {
|
||||
return parseFloat(data.data.result[0].value[1]) * 1000;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
} catch (err) {
|
||||
console.error(`[Prometheus] Error fetching latency for ${target}:`, err.message);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
Reference in New Issue
Block a user