Compare commits
121 Commits
47d25af469
...
PromdataPa
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
fd0a52368a | ||
|
|
650cc6f1b5 | ||
|
|
ff1c53ea40 | ||
|
|
b79655ccdc | ||
|
|
1f12197a91 | ||
|
|
d0455fb032 | ||
|
|
b2f6f7d2d0 | ||
|
|
ff439bb831 | ||
|
|
3980d66b49 | ||
| b16d910051 | |||
|
|
7e3a8e12d0 | ||
|
|
1d728d991e | ||
|
|
96de50285f | ||
|
|
225ec71ac3 | ||
|
|
90e72af72c | ||
|
|
d0bc646c22 | ||
|
|
90b31f9926 | ||
|
|
61748b8959 | ||
|
|
217510c07d | ||
|
|
e4b97be54e | ||
|
|
43d2b80fb2 | ||
|
|
9845f2fe5c | ||
|
|
607d71d1ca | ||
|
|
d7b6d3aebb | ||
|
|
98fdeca33b | ||
|
|
646696b8a0 | ||
|
|
58c5d36022 | ||
|
|
5fedaa299b | ||
|
|
c42d512dbd | ||
|
|
5baffb7e05 | ||
|
|
1ce174bd93 | ||
|
|
7fdac71062 | ||
|
|
6b61104641 | ||
|
|
40eeb0b9dd | ||
|
|
62747f0fcf | ||
|
|
b2f14528a9 | ||
|
|
cc3c67eae9 | ||
|
|
542258a271 | ||
|
|
a35dac78f8 | ||
|
|
e8b60ce28b | ||
|
|
d4d2927963 | ||
|
|
97e87409b5 | ||
|
|
dddf9dba65 | ||
|
|
4e953c01fc | ||
|
|
90c7bd80b1 | ||
|
|
322621a97b | ||
|
|
058a6c73a1 | ||
|
|
84972cdaeb | ||
|
|
28432c9c23 | ||
|
|
afe7361e06 | ||
|
|
2a8cb32d47 | ||
|
|
d7d650c5f9 | ||
|
|
469ef9e448 | ||
|
|
e8e23c5af8 | ||
|
|
a103c7dbf5 | ||
|
|
0c217963bb | ||
|
|
73807eaaaf | ||
|
|
d5b70edd11 | ||
|
|
d67815c7b6 | ||
|
|
8c25f1735d | ||
|
|
5e40c19ef1 | ||
|
|
6b9de37bf9 | ||
|
|
46ef8131c7 | ||
|
|
e91dcc8c02 | ||
|
|
7c1f0d4e63 | ||
|
|
464c3193d1 | ||
|
|
aed9147074 | ||
|
|
9e827c9831 | ||
|
|
34a10e3cd2 | ||
|
|
144b9b817d | ||
|
|
b2c37b8fe3 | ||
|
|
f997b6236c | ||
|
|
d557588b47 | ||
|
|
5238167212 | ||
|
|
dc1a8a1a44 | ||
|
|
d595397f08 | ||
|
|
dc865c6d9d | ||
|
|
a9fe0f219a | ||
|
|
035ebd8d40 | ||
|
|
d7f8db89a3 | ||
|
|
2149aa0208 | ||
|
|
e7b8000808 | ||
|
|
484a7a766f | ||
|
|
e55e6e8af6 | ||
|
|
236a548f58 | ||
|
|
0cf10a7e8a | ||
|
|
6b82cfb561 | ||
|
|
e66905e57f | ||
|
|
672ea11598 | ||
|
|
bea8ed607e | ||
|
|
37444eb6f4 | ||
|
|
0f4d3a2986 | ||
|
|
af83f42d26 | ||
|
|
2fc84f999c | ||
|
|
316e0e1b7e | ||
|
|
e50f95c325 | ||
|
|
b3580c15cc | ||
|
|
ded8d1b18d | ||
|
|
3d4b926b16 | ||
|
|
755bd45a0b | ||
|
|
f6fa253a11 | ||
|
|
4e8cce52ea | ||
|
|
c6e6c91e77 | ||
|
|
e9ca358eb1 | ||
|
|
a1703e72be | ||
|
|
f3f49f2c8e | ||
|
|
79779d6fcf | ||
|
|
e2dbf06601 | ||
|
|
0914881d26 | ||
|
|
e77bdbcc9e | ||
|
|
286eb1687d | ||
|
|
f4d7f129dd | ||
|
|
75736c0c4c | ||
| e9ecf164ee | |||
|
|
4f04227976 | ||
|
|
415334ad73 | ||
|
|
ba712f1907 | ||
|
|
d0bd05409d | ||
|
|
50d74916a6 | ||
|
|
4b98a910c7 | ||
|
|
1f4bdb7970 |
13
.env.example
13
.env.example
@@ -1,10 +1,9 @@
|
|||||||
|
# PromdataPanel Environment Configuration
|
||||||
|
# Note: Database and Cache settings will be automatically configured upon visiting /init.html
|
||||||
|
|
||||||
|
# Server Binding
|
||||||
HOST=0.0.0.0
|
HOST=0.0.0.0
|
||||||
PORT=3000
|
PORT=3000
|
||||||
REFRESH_INTERVAL=5000
|
|
||||||
|
|
||||||
# Valkey/Redis Cache Configuration
|
# Aggregation interval in milliseconds (default 5s)
|
||||||
VALKEY_HOST=localhost
|
REFRESH_INTERVAL=5000
|
||||||
VALKEY_PORT=6379
|
|
||||||
VALKEY_PASSWORD=
|
|
||||||
VALKEY_DB=dashboard
|
|
||||||
VALKEY_TTL=30
|
|
||||||
|
|||||||
94
Install.sh
94
Install.sh
@@ -1,94 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
|
|
||||||
# Data Visualization Display Wall - Systemd Installer
|
|
||||||
# Requirements: Node.js, NPM, Systemd (Linux)
|
|
||||||
|
|
||||||
# Colors for output
|
|
||||||
RED='\033[0;31m'
|
|
||||||
GREEN='\033[0;32m'
|
|
||||||
BLUE='\033[0;34m'
|
|
||||||
NC='\033[0m' # No Color
|
|
||||||
|
|
||||||
echo -e "${BLUE}=== Data Visualization Display Wall Installer ===${NC}"
|
|
||||||
|
|
||||||
# 1. Check permissions
|
|
||||||
if [ "$EUID" -ne 0 ]; then
|
|
||||||
echo -e "${RED}Please run as root (sudo ./Install.sh)${NC}"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
# 2. Get current directory and user
|
|
||||||
PROJECT_DIR=$(pwd)
|
|
||||||
REAL_USER=${SUDO_USER:-$USER}
|
|
||||||
USER_HOME=$(getent passwd "$REAL_USER" | cut -d: -f6)
|
|
||||||
|
|
||||||
echo -e "Project Directory: ${GREEN}$PROJECT_DIR${NC}"
|
|
||||||
echo -e "Running User: ${GREEN}$REAL_USER${NC}"
|
|
||||||
|
|
||||||
# 3. Check for dependencies
|
|
||||||
if ! command -v node &> /dev/null; then
|
|
||||||
echo -e "${RED}Node.js is not installed. Please install Node.js first.${NC}"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
if ! command -v npm &> /dev/null; then
|
|
||||||
echo -e "${RED}NPM is not installed. Please install NPM first.${NC}"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
# 4. Install NPM dependencies
|
|
||||||
echo -e "${BLUE}Installing dependencies...${NC}"
|
|
||||||
# Run npm install as the real user to avoid permission issues in node_modules
|
|
||||||
sudo -u "$REAL_USER" npm install
|
|
||||||
|
|
||||||
if [ $? -ne 0 ]; then
|
|
||||||
echo -e "${RED}NPM install failed.${NC}"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
# 5. Create Systemd Service File
|
|
||||||
SERVICE_FILE="/etc/systemd/system/data-wall.service"
|
|
||||||
NODE_PATH=$(command -v node)
|
|
||||||
|
|
||||||
echo -e "${BLUE}Creating systemd service at $SERVICE_FILE...${NC}"
|
|
||||||
|
|
||||||
cat <<EOF > "$SERVICE_FILE"
|
|
||||||
[Unit]
|
|
||||||
Description=Data Visualization Display Wall
|
|
||||||
After=network.target mysql.service
|
|
||||||
|
|
||||||
[Service]
|
|
||||||
Type=simple
|
|
||||||
User=$REAL_USER
|
|
||||||
WorkingDirectory=$PROJECT_DIR
|
|
||||||
ExecStart=$NODE_PATH server/index.js
|
|
||||||
Restart=always
|
|
||||||
RestartSec=10
|
|
||||||
StandardOutput=syslog
|
|
||||||
StandardError=syslog
|
|
||||||
SyslogIdentifier=data-wall
|
|
||||||
# Pass environment via .env file injection for flexibility
|
|
||||||
EnvironmentFile=-$PROJECT_DIR/.env
|
|
||||||
Environment=NODE_ENV=production
|
|
||||||
|
|
||||||
[Install]
|
|
||||||
WantedBy=multi-user.target
|
|
||||||
EOF
|
|
||||||
|
|
||||||
# 6. Reload Systemd and Start
|
|
||||||
echo -e "${BLUE}Reloading systemd and starting service...${NC}"
|
|
||||||
systemctl daemon-reload
|
|
||||||
systemctl enable data-wall
|
|
||||||
systemctl stop data-wall # Stop if already running
|
|
||||||
systemctl start data-wall
|
|
||||||
|
|
||||||
# 7. Check Status
|
|
||||||
if systemctl is-active --quiet data-wall; then
|
|
||||||
echo -e "${GREEN}SUCCESS: Service is now running.${NC}"
|
|
||||||
echo -e "You can access the dashboard at http://localhost:3000"
|
|
||||||
echo -e "View logs with: ${BLUE}journalctl -u data-wall -f${NC}"
|
|
||||||
else
|
|
||||||
echo -e "${RED}FAILED: Service failed to start. Check logs with 'journalctl -u data-wall -xe'${NC}"
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo -e "${BLUE}================================================${NC}"
|
|
||||||
112
README.md
112
README.md
@@ -1,78 +1,70 @@
|
|||||||
# 数据可视化展示大屏
|
# PromdataPanel
|
||||||
|
|
||||||
多源 Prometheus 服务器监控展示大屏,支持对接多个 Prometheus 实例,实时展示所有服务器的 CPU、内存、磁盘、网络等关键指标。
|
多源 Prometheus 服务器监控展示大屏。支持对接多个 Prometheus 实例,实时聚合展示所有服务器的 CPU、内存、磁盘、带宽等关键指标,并提供可视化节点分布图。
|
||||||
|
|
||||||
## 功能特性
|
## 功能特性
|
||||||
|
|
||||||
- 🔌 **多数据源管理** - MySQL 存储配置,支持对接多个 Prometheus 实例
|
- 🔌 **多数据源管理** - 支持对接多个 Prometheus 实例(Node_Exporter / BlackboxExporter)
|
||||||
- 📊 **NodeExporter 数据查询** - 自动聚合所有 Prometheus 中的 NodeExporter 数据
|
- 📊 **指标自动聚合** - 自动汇总所有数据源的 NodeExporter 指标,实时计算全网负载
|
||||||
- 🌐 **网络流量统计** - 24 小时网络流量趋势图,总流量统计
|
- 🌐 **网络流量统计** - 24 小时流量趋势图,实时带宽(Rx/Tx)求和显示
|
||||||
- ⚡ **实时带宽监控** - 所有服务器网络带宽求和,实时显示
|
- 🗺️ **节点分布可视化** - 自动识别服务器地理位置,并在全球地图上展示实时连接状态与延迟
|
||||||
- 💻 **资源使用概览** - CPU、内存、磁盘的总使用率和详细统计
|
- ⚡ **毫秒级实时性** - 深度优化查询逻辑,支持 5s 采集频率的实时动态展示
|
||||||
- 🖥️ **服务器列表** - 所有服务器的详细指标一览表
|
- 📱 **响应式与美学设计** - 现代 UI/UX 体验,支持暗色模式,极致性能优化
|
||||||
|
|
||||||
## 快速开始
|
## 快速安装
|
||||||
|
|
||||||
### 1. 环境要求
|
### 方式一:一键脚本安装 (推荐)
|
||||||
|
|
||||||
- Node.js >= 16
|
在 Linux 服务器上,您可以使用以下脚本一键完成下载、环境检测、依赖安装并将其注册为 Systemd 系统服务:
|
||||||
- MySQL >= 5.7
|
|
||||||
|
|
||||||
### 2. 配置
|
|
||||||
|
|
||||||
复制环境变量文件并修改:
|
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
cp .env.example .env
|
# 下载安装最新版本 (默认 v0.1.0)
|
||||||
|
VERSION=v0.1.0 curl -sSL https://git.littlediary.cn/CN-JS-HuiBai/PromdataPanel/raw/branch/main/install.sh | bash
|
||||||
```
|
```
|
||||||
|
|
||||||
编辑 `.env` 文件,配置 MySQL 连接信息:
|
### 方式二:手动安装
|
||||||
|
|
||||||
```env
|
#### 1. 环境要求
|
||||||
MYSQL_HOST=localhost
|
- **Node.js** >= 18
|
||||||
MYSQL_PORT=3306
|
- **MySQL** >= 8.0
|
||||||
MYSQL_USER=root
|
- **Valkey** >= 7.0 (或 Redis >= 6.0)
|
||||||
MYSQL_PASSWORD=your_password
|
|
||||||
MYSQL_DATABASE=display_wall
|
#### 2. 配置与启动
|
||||||
PORT=3000
|
1. 克隆代码库:`git clone https://git.littlediary.cn/CN-JS-HuiBai/PromdataPanel.git`
|
||||||
|
2. 复制配置文件:`cp .env.example .env`
|
||||||
|
3. 安装依赖:`npm install --production`
|
||||||
|
4. 启动服务:`npm start`
|
||||||
|
|
||||||
|
#### 3. 系统初始化
|
||||||
|
首次运行后,访问 `http://your-ip:3000/init.html`,按照引导完成 MySQL 数据库和 Valkey 缓存的连接。
|
||||||
|
|
||||||
|
## 使用指引
|
||||||
|
|
||||||
|
### 1. 添加 Prometheus 数据源
|
||||||
|
点击页面右上角的 ⚙️ 按钮进入设置,添加并测试您的 Prometheus HTTP 地址。
|
||||||
|
|
||||||
|
### 2. Prometheus 采集配置
|
||||||
|
建议在 `prometheus.yml` 中设置采集周期为 `5s` 以实现平滑的实时动态效果:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
global:
|
||||||
|
scrape_interval: 5s
|
||||||
|
|
||||||
|
scrape_configs:
|
||||||
|
- job_name: 'nodes'
|
||||||
|
static_configs:
|
||||||
|
- targets: ['your-server-ip:9100']
|
||||||
```
|
```
|
||||||
|
|
||||||
### 3. 初始化数据库
|
|
||||||
|
|
||||||
```bash
|
|
||||||
npm run init-db
|
|
||||||
```
|
|
||||||
|
|
||||||
### 4. 安装依赖并启动
|
|
||||||
|
|
||||||
```bash
|
|
||||||
npm install
|
|
||||||
npm run dev
|
|
||||||
```
|
|
||||||
|
|
||||||
访问 `http://localhost:3000` 即可看到展示大屏。
|
|
||||||
|
|
||||||
### 5. 配置 Prometheus 数据源
|
|
||||||
|
|
||||||
点击右上角的 ⚙️ 按钮,添加你的 Prometheus 地址(如 `http://prometheus.example.com:9090`)。
|
|
||||||
|
|
||||||
## 技术栈
|
## 技术栈
|
||||||
|
|
||||||
- **后端**: Node.js + Express
|
- **Runtime**: Node.js
|
||||||
- **数据库**: MySQL (mysql2)
|
- **Framework**: Express.js
|
||||||
- **数据源**: Prometheus HTTP API
|
- **Database**: MySQL 8.0+
|
||||||
- **前端**: 原生 HTML/CSS/JavaScript
|
- **Caching**: Valkey / Redis
|
||||||
- **图表**: 自定义 Canvas 渲染
|
- **Visualization**: ECharts / Canvas
|
||||||
|
- **Frontend**: Vanilla JS / CSS3
|
||||||
|
|
||||||
## API 接口
|
## LICENSE
|
||||||
|
|
||||||
| 方法 | 路径 | 说明 |
|
MIT License
|
||||||
|------|------|------|
|
|
||||||
| GET | `/api/sources` | 获取所有数据源 |
|
|
||||||
| POST | `/api/sources` | 添加数据源 |
|
|
||||||
| PUT | `/api/sources/:id` | 更新数据源 |
|
|
||||||
| DELETE | `/api/sources/:id` | 删除数据源 |
|
|
||||||
| POST | `/api/sources/test` | 测试数据源连接 |
|
|
||||||
| GET | `/api/metrics/overview` | 获取聚合指标概览 |
|
|
||||||
| GET | `/api/metrics/network-history` | 获取24h网络流量历史 |
|
|
||||||
| GET | `/api/metrics/cpu-history` | 获取CPU使用率历史 |
|
|
||||||
|
|||||||
197
install.sh
Normal file
197
install.sh
Normal file
@@ -0,0 +1,197 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# PromdataPanel - Multi-Prometheus Monitoring Dashboard Installer
|
||||||
|
# This script handles OS detection, Node.js installation, project setup, and Systemd configuration.
|
||||||
|
|
||||||
|
# Colors for output
|
||||||
|
RED='\033[0;31m'
|
||||||
|
GREEN='\033[0;32m'
|
||||||
|
YELLOW='\033[1;33m'
|
||||||
|
BLUE='\033[0;34m'
|
||||||
|
NC='\033[0m' # No Color
|
||||||
|
|
||||||
|
# 0. Configuration
|
||||||
|
VERSION=${VERSION:-"v0.1.0"}
|
||||||
|
DOWNLOAD_URL="https://git.littlediary.cn/CN-JS-HuiBai/PromdataPanel/archive/${VERSION}.zip"
|
||||||
|
MIN_NODE_VERSION=18
|
||||||
|
|
||||||
|
echo -e "${BLUE}================================================${NC}"
|
||||||
|
echo -e "${BLUE} PromdataPanel Auto-Installer ${NC}"
|
||||||
|
echo -e "${BLUE} Version: ${VERSION} ${NC}"
|
||||||
|
echo -e "${BLUE}================================================${NC}"
|
||||||
|
|
||||||
|
# 1. OS Detection
|
||||||
|
detect_os() {
|
||||||
|
if [ -f /etc/os-release ]; then
|
||||||
|
. /etc/os-release
|
||||||
|
OS_ID=$ID
|
||||||
|
OS_VER=$VERSION_ID
|
||||||
|
else
|
||||||
|
echo -e "${RED}Error: Cannot detect operating system type (/etc/os-release missing).${NC}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
echo -e "Detected OS: ${GREEN}${OS_ID} ${OS_VER}${NC}"
|
||||||
|
}
|
||||||
|
|
||||||
|
# 2. Node.js Installation/Verification
|
||||||
|
install_node() {
|
||||||
|
echo -e "${BLUE}Verifying Node.js environment...${NC}"
|
||||||
|
|
||||||
|
NODE_INSTALLED=false
|
||||||
|
if command -v node &> /dev/null; then
|
||||||
|
CURRENT_NODE_VER=$(node -v | cut -d'v' -f2 | cut -d'.' -f1)
|
||||||
|
if [ "$CURRENT_NODE_VER" -ge "$MIN_NODE_VERSION" ]; then
|
||||||
|
echo -e "${GREEN}Node.js v$(node -v) is already installed.${NC}"
|
||||||
|
NODE_INSTALLED=true
|
||||||
|
else
|
||||||
|
echo -e "${YELLOW}Existing Node.js version (v$(node -v)) is too old (Requires >= $MIN_NODE_VERSION).${NC}"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "$NODE_INSTALLED" = false ]; then
|
||||||
|
echo -e "${BLUE}Installing Node.js 20.x...${NC}"
|
||||||
|
case "$OS_ID" in
|
||||||
|
ubuntu|debian|raspbian)
|
||||||
|
sudo apt-get update
|
||||||
|
sudo apt-get install -y ca-certificates curl gnupg
|
||||||
|
curl -fsSL https://deb.nodesource.com/setup_20.x | sudo -E bash -
|
||||||
|
sudo apt-get install -y nodejs
|
||||||
|
;;
|
||||||
|
centos|rhel|almalinux|rocky)
|
||||||
|
curl -fsSL https://rpm.nodesource.com/setup_20.x | sudo bash -
|
||||||
|
sudo yum install -y nodejs
|
||||||
|
;;
|
||||||
|
fedora)
|
||||||
|
curl -fsSL https://rpm.nodesource.com/setup_20.x | sudo bash -
|
||||||
|
sudo dnf install -y nodejs
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
echo -e "${RED}Unsupported OS for automatic Node.js installation: $OS_ID${NC}"
|
||||||
|
echo -e "Please install Node.js >= 18 manually.${NC}"
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# 3. Download and Extract (If needed)
|
||||||
|
if [ ! -f "server/index.js" ]; then
|
||||||
|
echo -e "${YELLOW}Project files not found. Starting download...${NC}"
|
||||||
|
|
||||||
|
if ! command -v curl &> /dev/null; then
|
||||||
|
echo -e "${BLUE}Installing curl...${NC}"
|
||||||
|
[ "$OS_ID" = "ubuntu" ] || [ "$OS_ID" = "debian" ] && sudo apt-get install -y curl
|
||||||
|
[ "$OS_ID" = "centos" ] || [ "$OS_ID" = "rhel" ] && sudo yum install -y curl
|
||||||
|
fi
|
||||||
|
|
||||||
|
if ! command -v unzip &> /dev/null; then
|
||||||
|
echo -e "${BLUE}Installing unzip...${NC}"
|
||||||
|
[ "$OS_ID" = "ubuntu" ] || [ "$OS_ID" = "debian" ] && sudo apt-get install -y unzip
|
||||||
|
[ "$OS_ID" = "centos" ] || [ "$OS_ID" = "rhel" ] && sudo yum install -y unzip
|
||||||
|
fi
|
||||||
|
|
||||||
|
TEMP_ZIP="promdatapanel_${VERSION}.zip"
|
||||||
|
echo -e "${BLUE}Downloading ${DOWNLOAD_URL}...${NC}"
|
||||||
|
curl -L "$DOWNLOAD_URL" -o "$TEMP_ZIP"
|
||||||
|
|
||||||
|
if [ $? -ne 0 ]; then
|
||||||
|
echo -e "${RED}Download failed.${NC}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo -e "${BLUE}Extracting files...${NC}"
|
||||||
|
unzip -q "$TEMP_ZIP"
|
||||||
|
|
||||||
|
EXTRACTED_DIR=$(ls -d */ | grep -E "^PromdataPanel" | head -n 1)
|
||||||
|
if [ -d "$EXTRACTED_DIR" ]; then
|
||||||
|
cd "$EXTRACTED_DIR" || exit 1
|
||||||
|
else
|
||||||
|
EXTRACTED_DIR=$(ls -d */ | head -n 1)
|
||||||
|
[ -d "$EXTRACTED_DIR" ] && cd "$EXTRACTED_DIR" || exit 1
|
||||||
|
fi
|
||||||
|
rm "../$TEMP_ZIP" 2>/dev/null || rm "$TEMP_ZIP" 2>/dev/null
|
||||||
|
fi
|
||||||
|
|
||||||
|
# 4. Initialize Setup
|
||||||
|
# Permission check
|
||||||
|
if [ "$EUID" -eq 0 ]; then
|
||||||
|
REAL_USER=${SUDO_USER:-$USER}
|
||||||
|
else
|
||||||
|
REAL_USER=$USER
|
||||||
|
fi
|
||||||
|
|
||||||
|
detect_os
|
||||||
|
install_node
|
||||||
|
|
||||||
|
PROJECT_DIR=$(pwd)
|
||||||
|
echo -e "Project Directory: ${GREEN}$PROJECT_DIR${NC}"
|
||||||
|
echo -e "Running User: ${GREEN}$REAL_USER${NC}"
|
||||||
|
|
||||||
|
# Check for .env file
|
||||||
|
if [ ! -f ".env" ]; then
|
||||||
|
if [ -f ".env.example" ]; then
|
||||||
|
echo -e "${BLUE}Creating .env from .env.example...${NC}"
|
||||||
|
cp .env.example .env
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
# 5. Install Dependencies
|
||||||
|
echo -e "${BLUE}Installing NPM dependencies...${NC}"
|
||||||
|
npm install --production
|
||||||
|
|
||||||
|
if [ $? -ne 0 ]; then
|
||||||
|
echo -e "${RED}NPM install failed.${NC}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# 6. Create Systemd Service File
|
||||||
|
SERVICE_FILE="/etc/systemd/system/promdatapanel.service"
|
||||||
|
NODE_PATH=$(command -v node)
|
||||||
|
|
||||||
|
echo -e "${BLUE}Creating systemd service at $SERVICE_FILE...${NC}"
|
||||||
|
sudo bash -c "cat <<EOF > '$SERVICE_FILE'
|
||||||
|
[Unit]
|
||||||
|
Description=PromdataPanel Monitoring Dashboard
|
||||||
|
After=network.target mysql.service redis-server.service valkey-server.service
|
||||||
|
Wants=mysql.service
|
||||||
|
|
||||||
|
[Service]
|
||||||
|
Type=simple
|
||||||
|
User=$REAL_USER
|
||||||
|
WorkingDirectory=$PROJECT_DIR
|
||||||
|
ExecStart=$NODE_PATH server/index.js
|
||||||
|
Restart=always
|
||||||
|
RestartSec=10
|
||||||
|
StandardOutput=syslog
|
||||||
|
StandardError=syslog
|
||||||
|
SyslogIdentifier=promdatapanel
|
||||||
|
EnvironmentFile=-$PROJECT_DIR/.env
|
||||||
|
Environment=NODE_ENV=production
|
||||||
|
|
||||||
|
[Install]
|
||||||
|
WantedBy=multi-user.target
|
||||||
|
EOF"
|
||||||
|
|
||||||
|
# 7. Reload and Start
|
||||||
|
echo -e "${BLUE}Reloading systemd and restarting service...${NC}"
|
||||||
|
sudo systemctl daemon-reload
|
||||||
|
sudo systemctl enable promdatapanel
|
||||||
|
sudo systemctl restart promdatapanel
|
||||||
|
|
||||||
|
# 8. Check Status
|
||||||
|
echo -e "${BLUE}Checking service status...${NC}"
|
||||||
|
sleep 2
|
||||||
|
if sudo systemctl is-active --quiet promdatapanel; then
|
||||||
|
echo -e "${GREEN}SUCCESS: PromdataPanel is now running.${NC}"
|
||||||
|
PORT=$(grep "^PORT=" .env | cut -d'=' -f2)
|
||||||
|
PORT=${PORT:-3000}
|
||||||
|
IP_ADDR=$(hostname -I | awk '{print $1}')
|
||||||
|
echo -e "Dashboard URL: ${YELLOW}http://${IP_ADDR}:${PORT}${NC}"
|
||||||
|
else
|
||||||
|
echo -e "${RED}FAILED: Service failed to start.${NC}"
|
||||||
|
echo -e "Check logs with: ${BLUE}journalctl -u promdatapanel -xe${NC}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo -e "${BLUE}================================================${NC}"
|
||||||
|
echo -e "${GREEN}Installation completed!${NC}"
|
||||||
|
echo -e "${BLUE}================================================${NC}"
|
||||||
24
package-lock.json
generated
24
package-lock.json
generated
@@ -13,7 +13,8 @@
|
|||||||
"dotenv": "^16.4.0",
|
"dotenv": "^16.4.0",
|
||||||
"express": "^4.21.0",
|
"express": "^4.21.0",
|
||||||
"ioredis": "^5.10.1",
|
"ioredis": "^5.10.1",
|
||||||
"mysql2": "^3.11.0"
|
"mysql2": "^3.11.0",
|
||||||
|
"ws": "^8.20.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/@ioredis/commands": {
|
"node_modules/@ioredis/commands": {
|
||||||
@@ -1215,6 +1216,27 @@
|
|||||||
"engines": {
|
"engines": {
|
||||||
"node": ">= 0.8"
|
"node": ">= 0.8"
|
||||||
}
|
}
|
||||||
|
},
|
||||||
|
"node_modules/ws": {
|
||||||
|
"version": "8.20.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/ws/-/ws-8.20.0.tgz",
|
||||||
|
"integrity": "sha512-sAt8BhgNbzCtgGbt2OxmpuryO63ZoDk/sqaB/znQm94T4fCEsy/yV+7CdC1kJhOU9lboAEU7R3kquuycDoibVA==",
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=10.0.0"
|
||||||
|
},
|
||||||
|
"peerDependencies": {
|
||||||
|
"bufferutil": "^4.0.1",
|
||||||
|
"utf-8-validate": ">=5.0.2"
|
||||||
|
},
|
||||||
|
"peerDependenciesMeta": {
|
||||||
|
"bufferutil": {
|
||||||
|
"optional": true
|
||||||
|
},
|
||||||
|
"utf-8-validate": {
|
||||||
|
"optional": true
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
{
|
{
|
||||||
"name": "data-visualization-display-wall",
|
"name": "promdatapanel",
|
||||||
"version": "1.0.0",
|
"version": "1.0.0",
|
||||||
"description": "Data Visualization Display Wall - Multi-Prometheus Monitoring Dashboard",
|
"description": "Data Visualization Display Wall - Multi-Prometheus Monitoring Dashboard",
|
||||||
"main": "server/index.js",
|
"main": "server/index.js",
|
||||||
@@ -14,6 +14,7 @@
|
|||||||
"dotenv": "^16.4.0",
|
"dotenv": "^16.4.0",
|
||||||
"express": "^4.21.0",
|
"express": "^4.21.0",
|
||||||
"ioredis": "^5.10.1",
|
"ioredis": "^5.10.1",
|
||||||
"mysql2": "^3.11.0"
|
"mysql2": "^3.11.0",
|
||||||
|
"ws": "^8.20.0"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
1373
public/css/style.css
1373
public/css/style.css
File diff suppressed because it is too large
Load Diff
@@ -4,14 +4,15 @@
|
|||||||
<head>
|
<head>
|
||||||
<meta charset="UTF-8">
|
<meta charset="UTF-8">
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||||
<meta name="description" content="多源Prometheus服务器监控展示大屏 - 实时CPU、内存、磁盘、网络统计">
|
<meta name="description" content="LDNET-GA">
|
||||||
<title>数据可视化展示大屏</title>
|
<title>LDNET-GA</title>
|
||||||
<link rel="preconnect" href="https://fonts.googleapis.com">
|
<link rel="preconnect" href="https://fonts.googleapis.com">
|
||||||
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin>
|
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin>
|
||||||
<link
|
<link
|
||||||
href="https://fonts.googleapis.com/css2?family=Inter:wght@300;400;500;600;700;800;900&family=JetBrains+Mono:wght@400;500;600&display=swap"
|
href="https://fonts.googleapis.com/css2?family=Inter:wght@300;400;500;600;700;800;900&family=JetBrains+Mono:wght@400;500;600&display=swap"
|
||||||
rel="stylesheet">
|
rel="stylesheet">
|
||||||
<link rel="stylesheet" href="/css/style.css">
|
<link rel="stylesheet" href="/css/style.css">
|
||||||
|
<script src="https://cdn.jsdelivr.net/npm/echarts@5.4.3/dist/echarts.min.js"></script>
|
||||||
<script>
|
<script>
|
||||||
// Prevent theme flicker
|
// Prevent theme flicker
|
||||||
(function () {
|
(function () {
|
||||||
@@ -66,13 +67,6 @@
|
|||||||
</div>
|
</div>
|
||||||
<h1 class="logo-text" id="logoText">数据可视化展示大屏</h1>
|
<h1 class="logo-text" id="logoText">数据可视化展示大屏</h1>
|
||||||
</div>
|
</div>
|
||||||
<div class="header-meta">
|
|
||||||
<span class="server-count" id="serverCount">
|
|
||||||
<span class="dot dot-pulse"></span>
|
|
||||||
<span id="serverCountText">0 台服务器</span>
|
|
||||||
</span>
|
|
||||||
<span class="source-count" id="sourceCount">0 个数据源</span>
|
|
||||||
</div>
|
|
||||||
</div>
|
</div>
|
||||||
<div class="header-right">
|
<div class="header-right">
|
||||||
|
|
||||||
@@ -128,7 +122,7 @@
|
|||||||
</svg>
|
</svg>
|
||||||
</div>
|
</div>
|
||||||
<div class="stat-card-content">
|
<div class="stat-card-content">
|
||||||
<span class="stat-card-label">服务器总数</span>
|
<span class="stat-card-label" id="totalServersLabel">服务器总数</span>
|
||||||
<span class="stat-card-value" id="totalServers">0</span>
|
<span class="stat-card-value" id="totalServers">0</span>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
@@ -187,9 +181,12 @@
|
|||||||
</svg>
|
</svg>
|
||||||
</div>
|
</div>
|
||||||
<div class="stat-card-content">
|
<div class="stat-card-content">
|
||||||
<span class="stat-card-label">实时总带宽</span>
|
<span class="stat-card-label">实时带宽 (MB/s ↑/↓)</span>
|
||||||
<span class="stat-card-value" id="totalBandwidth">0 B/s</span>
|
<div class="stat-card-value-group">
|
||||||
<span class="stat-card-sub" id="bandwidthDetail">↓ 0 ↑ 0</span>
|
<span class="stat-card-value" id="totalBandwidthTx">0.00</span>
|
||||||
|
<span class="stat-card-separator">/</span>
|
||||||
|
<span class="stat-card-value" id="totalBandwidthRx">0.00</span>
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</section>
|
</section>
|
||||||
@@ -206,12 +203,15 @@
|
|||||||
</svg>
|
</svg>
|
||||||
网络流量趋势 (24h)
|
网络流量趋势 (24h)
|
||||||
</h2>
|
</h2>
|
||||||
<span class="title-time" id="gaugesTime"></span>
|
|
||||||
</div>
|
</div>
|
||||||
<div class="chart-legend">
|
<div class="chart-legend">
|
||||||
<span class="legend-item"><span class="legend-dot legend-rx"></span>接收 (RX)</span>
|
<span class="legend-item" id="legendRx" style="cursor: pointer;" title="点击切换 接收 (RX) 显示/隐藏"><span
|
||||||
<span class="legend-item"><span class="legend-dot legend-tx"></span>发送 (TX)</span>
|
class="legend-dot legend-rx"></span>接收 (RX)</span>
|
||||||
<span class="legend-item"><span class="legend-dot legend-p95"></span>95计费 (P95)</span>
|
<span class="legend-item" id="legendTx" style="cursor: pointer;" title="点击切换 发送 (TX) 显示/隐藏"><span
|
||||||
|
class="legend-dot legend-tx"></span>发送 (TX)</span>
|
||||||
|
<span class="legend-item disabled" id="legendP95" style="cursor: pointer;" title="点击切换 P95 线显示/隐藏">
|
||||||
|
<span class="legend-dot legend-p95"></span>95计费 (<span id="p95LabelText">上行</span>)
|
||||||
|
</span>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<div class="chart-body">
|
<div class="chart-body">
|
||||||
@@ -227,7 +227,7 @@
|
|||||||
<span class="traffic-value" id="traffic24hTx">0 B</span>
|
<span class="traffic-value" id="traffic24hTx">0 B</span>
|
||||||
</div>
|
</div>
|
||||||
<div class="traffic-stat traffic-stat-p95">
|
<div class="traffic-stat traffic-stat-p95">
|
||||||
<span class="traffic-label">95计费带宽</span>
|
<span class="traffic-label">95计费 (上行)</span>
|
||||||
<span class="traffic-value" id="trafficP95">0 B/s</span>
|
<span class="traffic-value" id="trafficP95">0 B/s</span>
|
||||||
</div>
|
</div>
|
||||||
<div class="traffic-stat traffic-stat-total">
|
<div class="traffic-stat traffic-stat-total">
|
||||||
@@ -240,6 +240,43 @@
|
|||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
<!-- Global Traffic 3D Globe -->
|
||||||
|
<div class="chart-card globe-card" id="globeCard">
|
||||||
|
<div class="chart-card-header">
|
||||||
|
<h2 class="chart-title">
|
||||||
|
<svg viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="1.5" class="chart-title-icon">
|
||||||
|
<circle cx="12" cy="12" r="10" />
|
||||||
|
<path
|
||||||
|
d="M2 12h20M12 2a15.3 15.3 0 0 1 4 10 15.3 15.3 0 0 1-4 10 15.3 15.3 0 0 1-4-10 15.3 15.3 0 0 1 4-10z" />
|
||||||
|
</svg>
|
||||||
|
全球服务器分布
|
||||||
|
</h2>
|
||||||
|
<div class="chart-header-actions">
|
||||||
|
<button class="btn-icon" id="btnExpandGlobe" title="放大显示">
|
||||||
|
<svg viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2"
|
||||||
|
style="width: 18px; height: 18px;">
|
||||||
|
<path d="M15 3h6v6M9 21H3v-6M21 3l-7 7M3 21l7-7" />
|
||||||
|
</svg>
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="globe-body" id="globeContainer"></div>
|
||||||
|
<div class="chart-footer">
|
||||||
|
<div class="traffic-stat">
|
||||||
|
<span class="traffic-label">全球节点总数</span>
|
||||||
|
<span class="traffic-value" id="globeTotalNodes">0</span>
|
||||||
|
</div>
|
||||||
|
<div class="traffic-stat">
|
||||||
|
<span class="traffic-label">覆盖地区/国家</span>
|
||||||
|
<span class="traffic-value" id="globeTotalRegions">0</span>
|
||||||
|
</div>
|
||||||
|
<div class="traffic-stat">
|
||||||
|
<span class="traffic-label">实时活跃状态</span>
|
||||||
|
<span class="traffic-value" style="color: var(--accent-emerald);">Active</span>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
</section>
|
</section>
|
||||||
|
|
||||||
<!-- Server List -->
|
<!-- Server List -->
|
||||||
@@ -255,19 +292,40 @@
|
|||||||
</svg>
|
</svg>
|
||||||
服务器详情
|
服务器详情
|
||||||
</h2>
|
</h2>
|
||||||
|
<div class="chart-header-right">
|
||||||
|
<div class="search-box">
|
||||||
|
<input type="search" id="serverSearchFilter" name="q-filter-server" placeholder="检索服务器名称..."
|
||||||
|
autocomplete="one-time-code" spellcheck="false">
|
||||||
|
<svg viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round"
|
||||||
|
stroke-linejoin="round" class="search-icon">
|
||||||
|
<circle cx="11" cy="11" r="8"></circle>
|
||||||
|
<line x1="21" y1="21" x2="16.65" y2="16.65"></line>
|
||||||
|
</svg>
|
||||||
|
</div>
|
||||||
|
<button id="btnResetSort" class="btn-icon-sm" title="重置筛选与排序">
|
||||||
|
<svg viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round"
|
||||||
|
stroke-linejoin="round">
|
||||||
|
<path d="M3 12a9 9 0 1 0 9-9 9.75 9.75 0 0 0-6.74 2.74L3 8"></path>
|
||||||
|
<path d="M3 3v5h5"></path>
|
||||||
|
</svg>
|
||||||
|
</button>
|
||||||
|
<select id="sourceFilter" class="source-select">
|
||||||
|
<option value="all">所有数据源</option>
|
||||||
|
</select>
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<div class="server-table-wrap">
|
<div class="server-table-wrap">
|
||||||
<table class="server-table" id="serverTable">
|
<table class="server-table" id="serverTable">
|
||||||
<thead>
|
<thead>
|
||||||
<tr>
|
<tr>
|
||||||
<th>状态</th>
|
<th class="sortable active" data-sort="up">状态 <span class="sort-icon"></span></th>
|
||||||
<th>Job / 实例</th>
|
<th class="sortable" data-sort="job">Job / 实例 <span class="sort-icon"></span></th>
|
||||||
<th>数据源</th>
|
<th class="sortable" data-sort="source">数据源 <span class="sort-icon"></span></th>
|
||||||
<th>CPU</th>
|
<th class="sortable" data-sort="cpu">CPU <span class="sort-icon"></span></th>
|
||||||
<th>内存</th>
|
<th class="sortable" data-sort="mem">内存 <span class="sort-icon"></span></th>
|
||||||
<th>磁盘</th>
|
<th class="sortable" data-sort="disk">磁盘 <span class="sort-icon"></span></th>
|
||||||
<th>网络 ↓</th>
|
<th class="sortable" data-sort="netRx">网络 ↓ <span class="sort-icon"></span></th>
|
||||||
<th>网络 ↑</th>
|
<th class="sortable" data-sort="netTx">网络 ↑ <span class="sort-icon"></span></th>
|
||||||
</tr>
|
</tr>
|
||||||
</thead>
|
</thead>
|
||||||
<tbody id="serverTableBody">
|
<tbody id="serverTableBody">
|
||||||
@@ -277,6 +335,21 @@
|
|||||||
</tbody>
|
</tbody>
|
||||||
</table>
|
</table>
|
||||||
</div>
|
</div>
|
||||||
|
<div class="pagination-footer">
|
||||||
|
<div class="page-size-selector">
|
||||||
|
<span>每页显示</span>
|
||||||
|
<select id="pageSizeSelect" class="source-select">
|
||||||
|
<option value="10">10</option>
|
||||||
|
<option value="20" selected>20</option>
|
||||||
|
<option value="50">50</option>
|
||||||
|
<option value="100">100</option>
|
||||||
|
</select>
|
||||||
|
<span>条</span>
|
||||||
|
</div>
|
||||||
|
<div class="pagination-controls" id="paginationControls">
|
||||||
|
<!-- Pagination buttons will be injected here -->
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</section>
|
</section>
|
||||||
</main>
|
</main>
|
||||||
@@ -288,6 +361,8 @@
|
|||||||
<div class="modal-tabs">
|
<div class="modal-tabs">
|
||||||
<button class="modal-tab active" data-tab="prom">数据源管理</button>
|
<button class="modal-tab active" data-tab="prom">数据源管理</button>
|
||||||
<button class="modal-tab" data-tab="site">大屏设置</button>
|
<button class="modal-tab" data-tab="site">大屏设置</button>
|
||||||
|
<button class="modal-tab" data-tab="latency">延迟线路管理</button>
|
||||||
|
<button class="modal-tab" data-tab="auth">账号安全</button>
|
||||||
</div>
|
</div>
|
||||||
<button class="modal-close" id="modalClose">×</button>
|
<button class="modal-close" id="modalClose">×</button>
|
||||||
</div>
|
</div>
|
||||||
@@ -298,13 +373,21 @@
|
|||||||
<div class="add-source-form" id="addSourceForm">
|
<div class="add-source-form" id="addSourceForm">
|
||||||
<h3>添加数据源</h3>
|
<h3>添加数据源</h3>
|
||||||
<div class="form-row">
|
<div class="form-row">
|
||||||
<div class="form-group">
|
<div class="form-group" style="flex: 0.8;">
|
||||||
|
<label for="sourceType">类型</label>
|
||||||
|
<select id="sourceType"
|
||||||
|
style="padding: 10px 14px; background: var(--bg-input); border: 1px solid var(--border-color); border-radius: var(--radius-sm); color: var(--text-primary); outline: none;">
|
||||||
|
<option value="prometheus">Prometheus</option>
|
||||||
|
<option value="blackbox">Blackbox Exporter</option>
|
||||||
|
</select>
|
||||||
|
</div>
|
||||||
|
<div class="form-group" style="flex: 1;">
|
||||||
<label for="sourceName">名称</label>
|
<label for="sourceName">名称</label>
|
||||||
<input type="text" id="sourceName" placeholder="例:生产环境" autocomplete="off">
|
<input type="text" id="sourceName" placeholder="例:生产环境" autocomplete="off">
|
||||||
</div>
|
</div>
|
||||||
<div class="form-group form-group-wide">
|
<div class="form-group form-group-wide">
|
||||||
<label for="sourceUrl">Prometheus URL</label>
|
<label for="sourceUrl">URL 地址</label>
|
||||||
<input type="url" id="sourceUrl" placeholder="http://prometheus.example.com:9090" autocomplete="off">
|
<input type="url" id="sourceUrl" placeholder="http://1.2.3.4:9090" autocomplete="off">
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<div class="form-row">
|
<div class="form-row">
|
||||||
@@ -312,6 +395,15 @@
|
|||||||
<label for="sourceDesc">描述 (可选)</label>
|
<label for="sourceDesc">描述 (可选)</label>
|
||||||
<input type="text" id="sourceDesc" placeholder="数据源描述" autocomplete="off">
|
<input type="text" id="sourceDesc" placeholder="数据源描述" autocomplete="off">
|
||||||
</div>
|
</div>
|
||||||
|
<div class="form-group" id="serverSourceOption"
|
||||||
|
style="display: flex; align-items: flex-end; padding-bottom: 8px;">
|
||||||
|
<label
|
||||||
|
style="display: flex; align-items: center; gap: 8px; cursor: pointer; font-size: 0.85rem; color: var(--text-secondary); white-space: nowrap;">
|
||||||
|
<input type="checkbox" id="isServerSource" checked
|
||||||
|
style="width: 16px; height: 16px; accent-color: var(--accent-indigo);">
|
||||||
|
<span>用于服务器展示</span>
|
||||||
|
</label>
|
||||||
|
</div>
|
||||||
<div class="form-actions">
|
<div class="form-actions">
|
||||||
<button class="btn btn-test" id="btnTest">测试连接</button>
|
<button class="btn btn-test" id="btnTest">测试连接</button>
|
||||||
<button class="btn btn-add" id="btnAdd">添加</button>
|
<button class="btn btn-add" id="btnAdd">添加</button>
|
||||||
@@ -345,21 +437,177 @@
|
|||||||
<label for="logoUrlInput">Logo URL (图片链接,为空则显示默认图标)</label>
|
<label for="logoUrlInput">Logo URL (图片链接,为空则显示默认图标)</label>
|
||||||
<input type="url" id="logoUrlInput" placeholder="https://example.com/logo.png">
|
<input type="url" id="logoUrlInput" placeholder="https://example.com/logo.png">
|
||||||
</div>
|
</div>
|
||||||
<div class="form-group" style="margin-top: 15px;">
|
<div class="settings-section" style="margin-top: 25px; border-top: 1px solid var(--border-color); padding-top: 20px;">
|
||||||
<label for="defaultThemeInput">默认主题</label>
|
<h4 style="font-size: 0.85rem; color: var(--accent-indigo); margin-bottom: 15px; text-transform: uppercase; letter-spacing: 0.5px;">界面外观 (Appearance)</h4>
|
||||||
|
<div class="form-group">
|
||||||
|
<label for="defaultThemeInput">色彩主题模式</label>
|
||||||
<select id="defaultThemeInput"
|
<select id="defaultThemeInput"
|
||||||
|
style="padding: 10px 14px; background: var(--bg-input); border: 1px solid var(--border-color); border-radius: var(--radius-sm); color: var(--text-primary); width: 100%;">
|
||||||
|
<option value="auto">跟随系统主题 (Sync with OS)</option>
|
||||||
|
<option value="dark">强制深色模式 (Always Dark)</option>
|
||||||
|
<option value="light">强制浅色模式 (Always Light)</option>
|
||||||
|
</select>
|
||||||
|
<p style="font-size: 0.72rem; color: var(--text-muted); margin-top: 6px;">选择“跟随系统”后,应用将自动同步您操作系统或浏览器的黑暗/白天模式设置。</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="form-group" style="margin-top: 15px;">
|
||||||
|
<label for="show95BandwidthInput">24h趋势图默认显示 95计费线</label>
|
||||||
|
<select id="show95BandwidthInput"
|
||||||
style="padding: 10px 14px; background: var(--bg-input); border: 1px solid var(--border-color); border-radius: var(--radius-sm); color: var(--text-primary);">
|
style="padding: 10px 14px; background: var(--bg-input); border: 1px solid var(--border-color); border-radius: var(--radius-sm); color: var(--text-primary);">
|
||||||
<option value="dark">默认夜间模式</option>
|
<option value="1">显示</option>
|
||||||
<option value="light">默认白天模式</option>
|
<option value="0">不显示</option>
|
||||||
<option value="auto">跟随浏览器/系统</option>
|
</select>
|
||||||
|
</div>
|
||||||
|
<div class="form-group" style="margin-top: 15px;">
|
||||||
|
<label for="p95TypeSelect">95带宽计费统计类型</label>
|
||||||
|
<select id="p95TypeSelect"
|
||||||
|
style="padding: 10px 14px; background: var(--bg-input); border: 1px solid var(--border-color); border-radius: var(--radius-sm); color: var(--text-primary);">
|
||||||
|
<option value="tx">仅统计上行 (TX)</option>
|
||||||
|
<option value="rx">仅统计下行 (RX)</option>
|
||||||
|
<option value="both">统计上行+下行 (Sum)</option>
|
||||||
</select>
|
</select>
|
||||||
</div>
|
</div>
|
||||||
<div class="form-actions" style="margin-top: 25px; display: flex; justify-content: flex-end;">
|
<div class="form-actions" style="margin-top: 25px; display: flex; justify-content: flex-end;">
|
||||||
<button class="btn btn-add" id="btnSaveSiteSettings">保存设置</button>
|
<button class="btn btn-add" id="btnSaveSiteSettings">保存基础设置</button>
|
||||||
</div>
|
</div>
|
||||||
<div class="form-message" id="siteSettingsMessage"></div>
|
<div class="form-message" id="siteSettingsMessage"></div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
<!-- Latency Routes Tab -->
|
||||||
|
<div class="tab-content" id="tab-latency">
|
||||||
|
<div class="latency-settings-form">
|
||||||
|
<h3>Blackbox 延迟连线管理</h3>
|
||||||
|
<div class="latency-routes-manager">
|
||||||
|
<!-- Add Route Form -->
|
||||||
|
<div class="add-route-mini-form"
|
||||||
|
style="background: rgba(255,255,255,0.02); padding: 15px; border-radius: 8px; margin-bottom: 20px; border: 1px solid var(--border-color);">
|
||||||
|
<div class="form-row">
|
||||||
|
<div class="form-group" style="flex: 1.5;">
|
||||||
|
<label>探测用服务器</label>
|
||||||
|
<select id="routeSourceSelect"
|
||||||
|
style="padding: 10px 14px; background: var(--bg-input); border: 1px solid var(--border-color); border-radius: var(--radius-sm); color: var(--text-primary);">
|
||||||
|
<option value="">-- 选择数据源 --</option>
|
||||||
|
</select>
|
||||||
|
</div>
|
||||||
|
<div class="form-group">
|
||||||
|
<label>起航点</label>
|
||||||
|
<input type="text" id="routeSourceInput" placeholder="例:China">
|
||||||
|
</div>
|
||||||
|
<div class="form-group">
|
||||||
|
<label>目的地</label>
|
||||||
|
<input type="text" id="routeDestInput" placeholder="例:United States">
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="form-row" style="margin-top: 10px; align-items: flex-end;">
|
||||||
|
<div class="form-group" style="flex: 2;">
|
||||||
|
<label>Blackbox 探测目标 (IP 或 域名)</label>
|
||||||
|
<input type="text" id="routeTargetInput" placeholder="例:1.1.1.1 或 google.com">
|
||||||
|
</div>
|
||||||
|
<div class="form-actions" style="padding-bottom: 0; display: flex; gap: 8px;">
|
||||||
|
<button class="btn btn-add" id="btnAddRoute" style="padding: 10px 24px;">添加线路</button>
|
||||||
|
<button class="btn btn-test" id="btnCancelEditRoute"
|
||||||
|
style="display: none; padding: 10px 15px; background: rgba(0,0,0,0.3);">取消</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- Routes List -->
|
||||||
|
<div class="latency-routes-list-container">
|
||||||
|
<h4
|
||||||
|
style="font-size: 0.75rem; color: var(--text-muted); text-transform: uppercase;; margin-bottom: 10px;">
|
||||||
|
已配置线路</h4>
|
||||||
|
<div id="latencyRoutesList" class="latency-routes-list"
|
||||||
|
style="display: flex; flex-direction: column; gap: 10px;">
|
||||||
|
<!-- Routes will be injected here -->
|
||||||
|
<div class="route-empty"
|
||||||
|
style="text-align: center; padding: 20px; color: var(--text-muted); font-size: 0.85rem; background: rgba(0,0,0,0.1); border-radius: 8px;">
|
||||||
|
暂无线路</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- Account Security Tab -->
|
||||||
|
<div class="tab-content" id="tab-auth">
|
||||||
|
<div class="security-settings-form">
|
||||||
|
<h3>修改登录密码</h3>
|
||||||
|
<div class="form-group">
|
||||||
|
<label for="oldPassword">当前密码</label>
|
||||||
|
<input type="password" id="oldPassword" placeholder="请输入当前旧密码">
|
||||||
|
</div>
|
||||||
|
<div class="form-group" style="margin-top: 15px;">
|
||||||
|
<label for="newPassword">新密码</label>
|
||||||
|
<input type="password" id="newPassword" placeholder="请输入要设置的新密码">
|
||||||
|
</div>
|
||||||
|
<div class="form-group" style="margin-top: 15px;">
|
||||||
|
<label for="confirmNewPassword">确认新密码</label>
|
||||||
|
<input type="password" id="confirmNewPassword" placeholder="请再次确认新密码">
|
||||||
|
</div>
|
||||||
|
<div class="form-actions" style="margin-top: 25px; display: flex; justify-content: flex-end;">
|
||||||
|
<button class="btn btn-add" id="btnChangePassword">提交修改</button>
|
||||||
|
</div>
|
||||||
|
<div class="form-message" id="changePasswordMessage"></div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- Server Detail Modal -->
|
||||||
|
<div class="modal-overlay" id="serverDetailModal">
|
||||||
|
<div class="modal" style="max-width: 800px; width: 95%;">
|
||||||
|
<div class="modal-header">
|
||||||
|
<div style="display: flex; flex-direction: column;">
|
||||||
|
<h2 id="serverDetailTitle" style="margin-bottom: 0;">服务器详情</h2>
|
||||||
|
</div>
|
||||||
|
<button class="modal-close" id="serverDetailClose">×</button>
|
||||||
|
</div>
|
||||||
|
<div class="modal-body" id="serverDetailBody" style="padding: 0;">
|
||||||
|
<div id="detailLoading" style="text-align: center; padding: 40px; display: none;">
|
||||||
|
<div class="dot dot-pulse"
|
||||||
|
style="display: inline-block; width: 12px; height: 12px; background: var(--accent-indigo);"></div>
|
||||||
|
<span style="margin-left: 10px; color: var(--text-secondary);">正在从数据源读取详情...</span>
|
||||||
|
</div>
|
||||||
|
<div class="detail-container" id="detailContainer">
|
||||||
|
<!-- Metric Items are injected here -->
|
||||||
|
<div class="detail-metrics-list" id="detailMetricsList"></div>
|
||||||
|
|
||||||
|
<div class="detail-partitions-container metric-item" id="detailPartitionsContainer" style="display: none;">
|
||||||
|
<div class="metric-item-header" id="partitionHeader">
|
||||||
|
<div class="metric-label-group">
|
||||||
|
<span class="metric-label">磁盘分区详情 (已挂载)</span>
|
||||||
|
<span class="metric-value" id="partitionSummary">读取中...</span>
|
||||||
|
</div>
|
||||||
|
<svg class="chevron-icon" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2">
|
||||||
|
<polyline points="6 9 12 15 18 9"></polyline>
|
||||||
|
</svg>
|
||||||
|
</div>
|
||||||
|
<div class="metric-item-content" id="partitionContent">
|
||||||
|
<div class="detail-partitions-list" id="detailPartitionsList"></div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="detail-info-grid" id="detailInfoGrid">
|
||||||
|
<div class="info-item">
|
||||||
|
<span class="info-label">CPU 核心总数</span>
|
||||||
|
<span class="info-value" id="detailCpuCores">0 核心</span>
|
||||||
|
</div>
|
||||||
|
<div class="info-item">
|
||||||
|
<span class="info-label">物理内存总量</span>
|
||||||
|
<span class="info-value" id="detailMemTotal">0 GB</span>
|
||||||
|
</div>
|
||||||
|
<div class="info-item">
|
||||||
|
<span class="info-label">运行时间 (Uptime)</span>
|
||||||
|
<span class="info-value" id="detailUptime">0天 0小时</span>
|
||||||
|
</div>
|
||||||
|
<div class="info-item">
|
||||||
|
<span class="info-label">硬盘总量统计</span>
|
||||||
|
<span class="info-value" id="detailDiskTotal">0 GB</span>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@@ -70,6 +70,33 @@
|
|||||||
justify-content: center;
|
justify-content: center;
|
||||||
padding: 10px 0;
|
padding: 10px 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@media (max-width: 480px) {
|
||||||
|
body {
|
||||||
|
align-items: flex-start;
|
||||||
|
padding: 16px 12px;
|
||||||
|
}
|
||||||
|
.init-container {
|
||||||
|
padding: 24px 18px;
|
||||||
|
border-radius: 10px;
|
||||||
|
max-width: 100%;
|
||||||
|
}
|
||||||
|
.init-header h2 {
|
||||||
|
font-size: 18px;
|
||||||
|
}
|
||||||
|
.init-header p {
|
||||||
|
font-size: 12px;
|
||||||
|
}
|
||||||
|
.form-row {
|
||||||
|
flex-direction: column;
|
||||||
|
}
|
||||||
|
.actions {
|
||||||
|
flex-direction: column;
|
||||||
|
}
|
||||||
|
.actions .btn {
|
||||||
|
width: 100%;
|
||||||
|
}
|
||||||
|
}
|
||||||
</style>
|
</style>
|
||||||
</head>
|
</head>
|
||||||
<body>
|
<body>
|
||||||
@@ -117,11 +144,34 @@
|
|||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
<div class="init-header" style="margin: 24px 0 16px 0; text-align: left;">
|
||||||
|
<h3 style="font-size: 16px; color: var(--text-main); margin: 0;">Valkey / Redis 缓存配置 (可选)</h3>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="form-row">
|
||||||
|
<div class="form-group" style="flex: 2;">
|
||||||
|
<label for="vHost">Valkey 地址</label>
|
||||||
|
<input type="text" id="vHost" value="localhost" placeholder="localhost" autocomplete="off">
|
||||||
|
</div>
|
||||||
|
<div class="form-group" style="flex: 1;">
|
||||||
|
<label for="vPort">端口</label>
|
||||||
|
<input type="number" id="vPort" value="6379" placeholder="6379" autocomplete="off">
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="form-row">
|
||||||
|
<div class="form-group form-group-wide">
|
||||||
|
<label for="vPassword">Valkey 密码</label>
|
||||||
|
<input type="password" id="vPassword" placeholder="留空则无密码">
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
<div class="form-message" id="messageBox"></div>
|
<div class="form-message" id="messageBox"></div>
|
||||||
|
|
||||||
<div class="actions">
|
<div class="actions" style="flex-wrap: wrap;">
|
||||||
<button class="btn btn-test" id="btnTest">测试连接</button>
|
<button class="btn btn-test" id="btnTest" style="flex: 1 1 45%;">测试 MySQL</button>
|
||||||
<button class="btn btn-add" id="btnInit">初始化数据库</button>
|
<button class="btn btn-test" id="btnTestValkey" style="flex: 1 1 45%;">测试 Valkey</button>
|
||||||
|
<button class="btn btn-add" id="btnInit" style="flex: 1 1 100%;">确认并初始化系统</button>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
@@ -165,8 +215,8 @@
|
|||||||
</div>
|
</div>
|
||||||
<div class="form-row">
|
<div class="form-row">
|
||||||
<div class="form-group form-group-wide">
|
<div class="form-group form-group-wide">
|
||||||
<label for="promName">数据源名称</label>
|
<label for="promSourceName">数据源名称</label>
|
||||||
<input type="text" id="promName" placeholder="例如:生产环境" autocomplete="off">
|
<input type="text" id="promSourceName" name="p-source-name-init" placeholder="例如:生产环境" autocomplete="one-time-code">
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
|||||||
1575
public/js/app.js
1575
public/js/app.js
File diff suppressed because it is too large
Load Diff
@@ -9,10 +9,18 @@ class AreaChart {
|
|||||||
this.data = { timestamps: [], rx: [], tx: [] };
|
this.data = { timestamps: [], rx: [], tx: [] };
|
||||||
this.animProgress = 0;
|
this.animProgress = 0;
|
||||||
this.animFrame = null;
|
this.animFrame = null;
|
||||||
|
this.showP95 = false;
|
||||||
|
this.showRx = true;
|
||||||
|
this.showTx = true;
|
||||||
|
this.p95Type = 'tx'; // 'tx', 'rx', 'both'
|
||||||
this.dpr = window.devicePixelRatio || 1;
|
this.dpr = window.devicePixelRatio || 1;
|
||||||
this.padding = { top: 20, right: 16, bottom: 32, left: 56 };
|
this.padding = { top: 20, right: 16, bottom: 32, left: 56 };
|
||||||
|
|
||||||
this._resize = this.resize.bind(this);
|
this.prevMaxVal = 0;
|
||||||
|
this.currentMaxVal = 0;
|
||||||
|
|
||||||
|
// Use debounced resize for performance and safety
|
||||||
|
this._resize = typeof debounce === 'function' ? debounce(this.resize.bind(this), 100) : this.resize.bind(this);
|
||||||
window.addEventListener('resize', this._resize);
|
window.addEventListener('resize', this._resize);
|
||||||
this.resize();
|
this.resize();
|
||||||
}
|
}
|
||||||
@@ -32,6 +40,21 @@ class AreaChart {
|
|||||||
setData(data) {
|
setData(data) {
|
||||||
if (!data || !data.timestamps) return;
|
if (!data || !data.timestamps) return;
|
||||||
|
|
||||||
|
// Store old data for smooth transition before updating this.data
|
||||||
|
// Only clone if there is data to clone; otherwise use empty set
|
||||||
|
if (this.data && this.data.timestamps && this.data.timestamps.length > 0) {
|
||||||
|
this.prevData = {
|
||||||
|
timestamps: [...this.data.timestamps],
|
||||||
|
rx: [...this.data.rx],
|
||||||
|
tx: [...this.data.tx]
|
||||||
|
};
|
||||||
|
} else {
|
||||||
|
this.prevData = { timestamps: [], rx: [], tx: [] };
|
||||||
|
}
|
||||||
|
|
||||||
|
// Smoothly transition max value context too
|
||||||
|
this.prevMaxVal = this.currentMaxVal || 0;
|
||||||
|
|
||||||
// Downsample if data is too dense (target ~1500 points for performance)
|
// Downsample if data is too dense (target ~1500 points for performance)
|
||||||
const MAX_POINTS = 1500;
|
const MAX_POINTS = 1500;
|
||||||
if (data.timestamps.length > MAX_POINTS) {
|
if (data.timestamps.length > MAX_POINTS) {
|
||||||
@@ -47,10 +70,24 @@ class AreaChart {
|
|||||||
this.data = data;
|
this.data = data;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Refresh currentMaxVal target for interpolation in draw()
|
||||||
|
let rawMax = 1024;
|
||||||
|
for (let i = 0; i < this.data.rx.length; i++) {
|
||||||
|
if (this.showRx) rawMax = Math.max(rawMax, this.data.rx[i] || 0);
|
||||||
|
if (this.showTx) rawMax = Math.max(rawMax, this.data.tx[i] || 0);
|
||||||
|
}
|
||||||
|
this.currentMaxVal = rawMax;
|
||||||
|
|
||||||
// Calculate P95 (95th percentile)
|
// Calculate P95 (95th percentile)
|
||||||
// Common standard: 95th percentile of the peak (max of rx/tx or sum)
|
let combined = [];
|
||||||
// We'll use max(rx, tx) at each point which is common for billing
|
if (this.p95Type === 'tx') {
|
||||||
const combined = data.rx.map((r, i) => Math.max(r || 0, data.tx[i] || 0));
|
combined = data.tx.map(t => t || 0);
|
||||||
|
} else if (this.p95Type === 'rx') {
|
||||||
|
combined = data.rx.map(r => r || 0);
|
||||||
|
} else {
|
||||||
|
combined = data.tx.map((t, i) => (t || 0) + (data.rx[i] || 0));
|
||||||
|
}
|
||||||
|
|
||||||
if (combined.length > 0) {
|
if (combined.length > 0) {
|
||||||
const sorted = [...combined].sort((a, b) => a - b);
|
const sorted = [...combined].sort((a, b) => a - b);
|
||||||
const p95Idx = Math.floor(sorted.length * 0.95);
|
const p95Idx = Math.floor(sorted.length * 0.95);
|
||||||
@@ -102,13 +139,15 @@ class AreaChart {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Find max raw value
|
// Determine consistent unit based on max data value
|
||||||
let maxDataVal = 0;
|
let maxDataVal = 1024;
|
||||||
for (let i = 0; i < rx.length; i++) {
|
if (this.prevMaxVal && this.animProgress < 1) {
|
||||||
maxDataVal = Math.max(maxDataVal, rx[i] || 0, tx[i] || 0);
|
// Interpolate the max value context to keep vertical scale smooth
|
||||||
|
maxDataVal = this.prevMaxVal + (this.currentMaxVal - this.prevMaxVal) * (this.animProgress || 0);
|
||||||
|
} else {
|
||||||
|
maxDataVal = this.currentMaxVal;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Determine consistent unit based on max data value
|
|
||||||
const k = 1024;
|
const k = 1024;
|
||||||
const sizes = ['B/s', 'KB/s', 'MB/s', 'GB/s', 'TB/s'];
|
const sizes = ['B/s', 'KB/s', 'MB/s', 'GB/s', 'TB/s'];
|
||||||
let unitIdx = Math.floor(Math.log(Math.max(1, maxDataVal)) / Math.log(k));
|
let unitIdx = Math.floor(Math.log(Math.max(1, maxDataVal)) / Math.log(k));
|
||||||
@@ -138,9 +177,13 @@ class AreaChart {
|
|||||||
const len = timestamps.length;
|
const len = timestamps.length;
|
||||||
const xStep = chartW / (len - 1);
|
const xStep = chartW / (len - 1);
|
||||||
|
|
||||||
// Helper to get point
|
// Helper to get point with smooth value transition
|
||||||
const getX = (i) => p.left + i * xStep;
|
const getX = (i) => p.left + i * xStep;
|
||||||
const getY = (val) => p.top + chartH - (val / (maxVal || 1)) * chartH * this.animProgress;
|
const getY = (val, prevVal = 0) => {
|
||||||
|
// Interpolate value from previous state to new state
|
||||||
|
const actualVal = prevVal + (val - prevVal) * this.animProgress;
|
||||||
|
return p.top + chartH - (actualVal / (maxVal || 1)) * chartH;
|
||||||
|
};
|
||||||
|
|
||||||
// Draw grid lines
|
// Draw grid lines
|
||||||
ctx.strokeStyle = 'rgba(99, 102, 241, 0.08)';
|
ctx.strokeStyle = 'rgba(99, 102, 241, 0.08)';
|
||||||
@@ -176,18 +219,24 @@ class AreaChart {
|
|||||||
// Always show last label
|
// Always show last label
|
||||||
ctx.fillText(formatTime(timestamps[len - 1]), getX(len - 1), h - 8);
|
ctx.fillText(formatTime(timestamps[len - 1]), getX(len - 1), h - 8);
|
||||||
|
|
||||||
|
const getPVal = (arr, i) => (arr && i < arr.length) ? arr[i] : 0;
|
||||||
|
|
||||||
// Draw TX area
|
// Draw TX area
|
||||||
this.drawArea(ctx, tx, getX, getY, chartH, p,
|
if (this.showTx) {
|
||||||
|
this.drawArea(ctx, tx, this.prevData ? this.prevData.tx : null, getX, getY, chartH, p,
|
||||||
'rgba(99, 102, 241, 0.25)', 'rgba(99, 102, 241, 0.02)',
|
'rgba(99, 102, 241, 0.25)', 'rgba(99, 102, 241, 0.02)',
|
||||||
'#6366f1', len);
|
'#6366f1', len);
|
||||||
|
}
|
||||||
|
|
||||||
// Draw RX area (on top)
|
// Draw RX area (on top)
|
||||||
this.drawArea(ctx, rx, getX, getY, chartH, p,
|
if (this.showRx) {
|
||||||
|
this.drawArea(ctx, rx, this.prevData ? this.prevData.rx : null, getX, getY, chartH, p,
|
||||||
'rgba(6, 182, 212, 0.25)', 'rgba(6, 182, 212, 0.02)',
|
'rgba(6, 182, 212, 0.25)', 'rgba(6, 182, 212, 0.02)',
|
||||||
'#06b6d4', len);
|
'#06b6d4', len);
|
||||||
|
}
|
||||||
|
|
||||||
// Draw P95 line
|
// Draw P95 line
|
||||||
if (this.p95 && this.animProgress === 1) {
|
if (this.showP95 && this.p95 && this.animProgress === 1) {
|
||||||
const p95Y = getY(this.p95);
|
const p95Y = getY(this.p95);
|
||||||
// Only draw if within visible range
|
// Only draw if within visible range
|
||||||
if (p95Y >= p.top && p95Y <= p.top + chartH) {
|
if (p95Y >= p.top && p95Y <= p.top + chartH) {
|
||||||
@@ -216,22 +265,23 @@ class AreaChart {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
drawArea(ctx, values, getX, getY, chartH, p, fillColorTop, fillColorBottom, strokeColor, len) {
|
drawArea(ctx, values, prevValues, getX, getY, chartH, p, fillColorTop, fillColorBottom, strokeColor, len) {
|
||||||
if (!values || values.length === 0) return;
|
if (!values || values.length === 0) return;
|
||||||
|
|
||||||
const useSimple = len > 500;
|
const useSimple = len > 250;
|
||||||
|
const getPVal = (i) => (prevValues && i < prevValues.length) ? prevValues[i] : 0;
|
||||||
|
|
||||||
// Fill
|
// Fill
|
||||||
ctx.beginPath();
|
ctx.beginPath();
|
||||||
ctx.moveTo(getX(0), getY(values[0] || 0));
|
ctx.moveTo(getX(0), getY(values[0] || 0, getPVal(0)));
|
||||||
for (let i = 1; i < len; i++) {
|
for (let i = 1; i < len; i++) {
|
||||||
|
const currY = getY(values[i] || 0, getPVal(i));
|
||||||
if (useSimple) {
|
if (useSimple) {
|
||||||
ctx.lineTo(getX(i), getY(values[i] || 0));
|
ctx.lineTo(getX(i), currY);
|
||||||
} else {
|
} else {
|
||||||
const prevX = getX(i - 1);
|
const prevX = getX(i - 1);
|
||||||
const currX = getX(i);
|
const currX = getX(i);
|
||||||
const prevY = getY(values[i - 1] || 0);
|
const prevY = getY(values[i - 1] || 0, getPVal(i - 1));
|
||||||
const currY = getY(values[i] || 0);
|
|
||||||
const midX = (prevX + currX) / 2;
|
const midX = (prevX + currX) / 2;
|
||||||
ctx.bezierCurveTo(midX, prevY, midX, currY, currX, currY);
|
ctx.bezierCurveTo(midX, prevY, midX, currY, currX, currY);
|
||||||
}
|
}
|
||||||
@@ -248,15 +298,15 @@ class AreaChart {
|
|||||||
|
|
||||||
// Stroke
|
// Stroke
|
||||||
ctx.beginPath();
|
ctx.beginPath();
|
||||||
ctx.moveTo(getX(0), getY(values[0] || 0));
|
ctx.moveTo(getX(0), getY(values[0] || 0, getPVal(0)));
|
||||||
for (let i = 1; i < len; i++) {
|
for (let i = 1; i < len; i++) {
|
||||||
|
const currY = getY(values[i] || 0, getPVal(i));
|
||||||
if (useSimple) {
|
if (useSimple) {
|
||||||
ctx.lineTo(getX(i), getY(values[i] || 0));
|
ctx.lineTo(getX(i), currY);
|
||||||
} else {
|
} else {
|
||||||
const prevX = getX(i - 1);
|
const prevX = getX(i - 1);
|
||||||
const currX = getX(i);
|
const currX = getX(i);
|
||||||
const prevY = getY(values[i - 1] || 0);
|
const prevY = getY(values[i - 1] || 0, getPVal(i - 1));
|
||||||
const currY = getY(values[i] || 0);
|
|
||||||
const midX = (prevX + currX) / 2;
|
const midX = (prevX + currX) / 2;
|
||||||
ctx.bezierCurveTo(midX, prevY, midX, currY, currX, currY);
|
ctx.bezierCurveTo(midX, prevY, midX, currY, currX, currY);
|
||||||
}
|
}
|
||||||
@@ -272,3 +322,272 @@ class AreaChart {
|
|||||||
if (this.animFrame) cancelAnimationFrame(this.animFrame);
|
if (this.animFrame) cancelAnimationFrame(this.animFrame);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
class MetricChart {
|
||||||
|
constructor(canvas, unit = '') {
|
||||||
|
this.canvas = canvas;
|
||||||
|
this.ctx = canvas.getContext('2d');
|
||||||
|
this.data = { timestamps: [], values: [], series: null };
|
||||||
|
this.unit = unit; // '%', 'B/s', etc.
|
||||||
|
this.dpr = window.devicePixelRatio || 1;
|
||||||
|
this.padding = { top: 10, right: 10, bottom: 35, left: 60 };
|
||||||
|
this.animProgress = 0;
|
||||||
|
|
||||||
|
this.prevMaxVal = 0;
|
||||||
|
this.currentMaxVal = 0;
|
||||||
|
|
||||||
|
// Use debounced resize for performance and safety
|
||||||
|
this._resize = typeof debounce === 'function' ? debounce(this.resize.bind(this), 100) : this.resize.bind(this);
|
||||||
|
window.addEventListener('resize', this._resize);
|
||||||
|
this.resize();
|
||||||
|
}
|
||||||
|
|
||||||
|
resize() {
|
||||||
|
const parent = this.canvas.parentElement;
|
||||||
|
if (!parent) return;
|
||||||
|
const rect = parent.getBoundingClientRect();
|
||||||
|
if (rect.width === 0) return;
|
||||||
|
this.width = rect.width;
|
||||||
|
this.height = rect.height;
|
||||||
|
this.canvas.width = this.width * this.dpr;
|
||||||
|
this.canvas.height = this.height * this.dpr;
|
||||||
|
this.canvas.style.width = this.width + 'px';
|
||||||
|
this.canvas.style.height = this.height + 'px';
|
||||||
|
this.ctx.setTransform(this.dpr, 0, 0, this.dpr, 0, 0);
|
||||||
|
this.draw();
|
||||||
|
}
|
||||||
|
|
||||||
|
setData(data) {
|
||||||
|
if (this.data && this.data.values && this.data.values.length > 0) {
|
||||||
|
this.prevData = JSON.parse(JSON.stringify(this.data));
|
||||||
|
} else {
|
||||||
|
this.prevData = { timestamps: [], values: [], series: null };
|
||||||
|
}
|
||||||
|
|
||||||
|
this.prevMaxVal = this.currentMaxVal || 0;
|
||||||
|
this.data = data || { timestamps: [], values: [], series: null };
|
||||||
|
|
||||||
|
// Target max
|
||||||
|
if (this.data.series) {
|
||||||
|
this.currentMaxVal = 100;
|
||||||
|
} else {
|
||||||
|
const raw = Math.max(...(this.data.values || []), 0.1);
|
||||||
|
if (this.unit === '%' && raw <= 100) {
|
||||||
|
if (raw > 80) this.currentMaxVal = 100;
|
||||||
|
else if (raw > 40) this.currentMaxVal = 80;
|
||||||
|
else if (raw > 20) this.currentMaxVal = 50;
|
||||||
|
else this.currentMaxVal = 25;
|
||||||
|
} else {
|
||||||
|
this.currentMaxVal = raw * 1.25;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
this.animate();
|
||||||
|
}
|
||||||
|
|
||||||
|
animate() {
|
||||||
|
if (this.animFrame) cancelAnimationFrame(this.animFrame);
|
||||||
|
const start = performance.now();
|
||||||
|
const duration = 500;
|
||||||
|
const step = (now) => {
|
||||||
|
const elapsed = now - start;
|
||||||
|
this.animProgress = Math.min(elapsed / duration, 1);
|
||||||
|
this.animProgress = 1 - Math.pow(1 - this.animProgress, 3);
|
||||||
|
this.draw();
|
||||||
|
if (elapsed < duration) this.animFrame = requestAnimationFrame(step);
|
||||||
|
};
|
||||||
|
this.animFrame = requestAnimationFrame(step);
|
||||||
|
}
|
||||||
|
|
||||||
|
draw() {
|
||||||
|
const ctx = this.ctx;
|
||||||
|
const w = this.width;
|
||||||
|
const h = this.height;
|
||||||
|
const p = this.padding;
|
||||||
|
const chartW = w - p.left - p.right;
|
||||||
|
const chartH = h - p.top - p.bottom;
|
||||||
|
|
||||||
|
ctx.clearRect(0, 0, w, h);
|
||||||
|
|
||||||
|
const { timestamps, values, series } = this.data;
|
||||||
|
if (!timestamps || timestamps.length < 2) {
|
||||||
|
ctx.fillStyle = '#5a6380';
|
||||||
|
ctx.font = '11px sans-serif';
|
||||||
|
ctx.textAlign = 'center';
|
||||||
|
ctx.fillText('正在加载或暂无数据...', w / 2, h / 2);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Determine Y max (interpolated)
|
||||||
|
const targetMax = this.currentMaxVal || 0.1;
|
||||||
|
const startMax = this.prevMaxVal || targetMax;
|
||||||
|
const maxVal = startMax + (targetMax - startMax) * this.animProgress;
|
||||||
|
|
||||||
|
const len = timestamps.length;
|
||||||
|
const xStep = chartW / (len - 1);
|
||||||
|
const getX = (i) => p.left + i * xStep;
|
||||||
|
const getY = (val, prevVal = 0) => {
|
||||||
|
const actualVal = prevVal + (val - prevVal) * this.animProgress;
|
||||||
|
return p.top + chartH - (actualVal / (maxVal || 1)) * chartH;
|
||||||
|
};
|
||||||
|
|
||||||
|
// Grid
|
||||||
|
ctx.strokeStyle = 'rgba(99, 102, 241, 0.05)';
|
||||||
|
ctx.lineWidth = 1;
|
||||||
|
for (let i = 0; i <= 3; i++) {
|
||||||
|
const y = p.top + (chartH / 3) * i;
|
||||||
|
ctx.beginPath();
|
||||||
|
ctx.moveTo(p.left, y);
|
||||||
|
ctx.lineTo(p.left + chartW, y);
|
||||||
|
ctx.stroke();
|
||||||
|
|
||||||
|
const v = (maxVal * (1 - i / 3));
|
||||||
|
ctx.fillStyle = '#5a6380';
|
||||||
|
ctx.font = '9px "JetBrains Mono", monospace';
|
||||||
|
ctx.textAlign = 'right';
|
||||||
|
|
||||||
|
let label = '';
|
||||||
|
if (this.unit === 'B/s' || this.unit === 'B') {
|
||||||
|
const isRate = this.unit === 'B/s';
|
||||||
|
if (window.formatBandwidth && isRate) {
|
||||||
|
label = window.formatBandwidth(v);
|
||||||
|
} else if (window.formatBytes) {
|
||||||
|
label = window.formatBytes(v) + (isRate ? '/s' : '');
|
||||||
|
} else {
|
||||||
|
label = v.toFixed(0) + this.unit;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
label = (v >= 1000 ? (v / 1000).toFixed(1) + 'k' : v.toFixed(v < 10 && v > 0 ? 1 : 0)) + this.unit;
|
||||||
|
}
|
||||||
|
ctx.fillText(label, p.left - 8, y + 3);
|
||||||
|
}
|
||||||
|
|
||||||
|
// X-axis Timeline
|
||||||
|
ctx.fillStyle = '#5a6380';
|
||||||
|
ctx.font = '9px "JetBrains Mono", monospace';
|
||||||
|
ctx.textAlign = 'center';
|
||||||
|
const labelInterval = Math.max(1, Math.floor(len / 5));
|
||||||
|
for (let i = 0; i < len; i += labelInterval) {
|
||||||
|
const x = getX(i);
|
||||||
|
ctx.fillText(formatTime(timestamps[i]), x, h - 8);
|
||||||
|
}
|
||||||
|
// Always show last label if not already shown
|
||||||
|
if ((len - 1) % labelInterval !== 0) {
|
||||||
|
ctx.fillText(formatTime(timestamps[len - 1]), getX(len - 1), h - 8);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (series) {
|
||||||
|
// Draw Stacked Area
|
||||||
|
const modes = [
|
||||||
|
{ name: 'idle', color: 'rgba(34, 197, 94, 0.4)', stroke: '#22c55e' }, // Green
|
||||||
|
{ name: 'other', color: 'rgba(168, 85, 247, 0.4)', stroke: '#a855f7' }, // Purple
|
||||||
|
{ name: 'irq', color: 'rgba(249, 115, 22, 0.4)', stroke: '#f97316' }, // Orange
|
||||||
|
{ name: 'iowait', color: 'rgba(239, 68, 68, 0.4)', stroke: '#ef4444' }, // Red
|
||||||
|
{ name: 'system', color: 'rgba(234, 179, 8, 0.4)', stroke: '#eab308' }, // Yellow
|
||||||
|
{ name: 'user', color: 'rgba(99, 102, 241, 0.4)', stroke: '#6366f1' } // Indigo
|
||||||
|
];
|
||||||
|
|
||||||
|
let currentBase = new Array(len).fill(0);
|
||||||
|
let prevBase = new Array(len).fill(0);
|
||||||
|
|
||||||
|
modes.forEach(mode => {
|
||||||
|
const vals = series[mode.name];
|
||||||
|
if (!vals) return;
|
||||||
|
|
||||||
|
const prevVals = (this.prevData && this.prevData.series) ? this.prevData.series[mode.name] : null;
|
||||||
|
const getPVal = (arr, idx) => (arr && idx < arr.length) ? arr[idx] : 0;
|
||||||
|
|
||||||
|
ctx.beginPath();
|
||||||
|
ctx.moveTo(getX(0), getY(currentBase[0] + vals[0], getPVal(prevBase, 0) + getPVal(prevVals, 0)));
|
||||||
|
for (let i = 1; i < len; i++) {
|
||||||
|
ctx.lineTo(getX(i), getY(currentBase[i] + vals[i], getPVal(prevBase, i) + getPVal(prevVals, i)));
|
||||||
|
}
|
||||||
|
ctx.lineTo(getX(len - 1), getY(currentBase[len - 1], getPVal(prevBase, len - 1)));
|
||||||
|
for (let i = len - 1; i >= 0; i--) {
|
||||||
|
ctx.lineTo(getX(i), getY(currentBase[i], getPVal(prevBase, i)));
|
||||||
|
}
|
||||||
|
ctx.closePath();
|
||||||
|
ctx.fillStyle = mode.color;
|
||||||
|
ctx.fill();
|
||||||
|
|
||||||
|
// Stroke
|
||||||
|
ctx.beginPath();
|
||||||
|
ctx.moveTo(getX(0), getY(currentBase[0] + vals[0], getPVal(prevBase, 0) + getPVal(prevVals, 0)));
|
||||||
|
for (let i = 1; i < len; i++) {
|
||||||
|
ctx.lineTo(getX(i), getY(currentBase[i] + vals[i], getPVal(prevBase, i) + getPVal(prevVals, i)));
|
||||||
|
}
|
||||||
|
ctx.strokeStyle = mode.stroke;
|
||||||
|
ctx.lineWidth = 1;
|
||||||
|
ctx.stroke();
|
||||||
|
|
||||||
|
// Update boxes for next series
|
||||||
|
for (let i = 0; i < len; i++) {
|
||||||
|
currentBase[i] += vals[i];
|
||||||
|
if (prevBase) prevBase[i] = (prevBase[i] || 0) + getPVal(prevVals, i);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Add Legend at bottom right (moved up slightly)
|
||||||
|
ctx.font = '9px sans-serif';
|
||||||
|
ctx.textAlign = 'right';
|
||||||
|
let lx = w - 10;
|
||||||
|
let ly = h - 20; // Increased padding from bottom
|
||||||
|
[...modes].reverse().forEach(m => {
|
||||||
|
ctx.fillStyle = m.stroke;
|
||||||
|
ctx.fillRect(lx - 10, ly - 8, 8, 8);
|
||||||
|
ctx.fillStyle = '#5a6380';
|
||||||
|
ctx.fillText(m.name.charAt(0).toUpperCase() + m.name.slice(1), lx - 15, ly - 1);
|
||||||
|
lx -= 70; // Increased gap for safety
|
||||||
|
});
|
||||||
|
|
||||||
|
} else {
|
||||||
|
const useSimple = len > 250;
|
||||||
|
const prevVals = this.prevData ? this.prevData.values : null;
|
||||||
|
const getPVal = (i) => (prevVals && i < prevVals.length) ? prevVals[i] : 0;
|
||||||
|
|
||||||
|
ctx.beginPath();
|
||||||
|
ctx.moveTo(getX(0), getY(values[0], getPVal(0)));
|
||||||
|
for (let i = 1; i < len; i++) {
|
||||||
|
const currY = getY(values[i], getPVal(i));
|
||||||
|
if (useSimple) {
|
||||||
|
ctx.lineTo(getX(i), currY);
|
||||||
|
} else {
|
||||||
|
const prevX = getX(i - 1);
|
||||||
|
const currX = getX(i);
|
||||||
|
const prevY = getY(values[i - 1], getPVal(i - 1));
|
||||||
|
const midX = (prevX + currX) / 2;
|
||||||
|
ctx.bezierCurveTo(midX, prevY, midX, currY, currX, currY);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Stroke
|
||||||
|
ctx.strokeStyle = '#6366f1';
|
||||||
|
ctx.lineWidth = 2;
|
||||||
|
ctx.lineJoin = 'round';
|
||||||
|
ctx.stroke();
|
||||||
|
|
||||||
|
// Fill
|
||||||
|
ctx.lineTo(getX(len - 1), p.top + chartH);
|
||||||
|
ctx.lineTo(getX(0), p.top + chartH);
|
||||||
|
ctx.closePath();
|
||||||
|
const grad = ctx.createLinearGradient(0, p.top, 0, p.top + chartH);
|
||||||
|
grad.addColorStop(0, 'rgba(99, 102, 241, 0.15)');
|
||||||
|
grad.addColorStop(1, 'rgba(99, 102, 241, 0)');
|
||||||
|
ctx.fillStyle = grad;
|
||||||
|
ctx.fill();
|
||||||
|
|
||||||
|
// Last point pulse
|
||||||
|
const lastX = getX(len - 1);
|
||||||
|
const lastY = getY(values[len - 1]);
|
||||||
|
ctx.beginPath();
|
||||||
|
ctx.arc(lastX, lastY, 3, 0, Math.PI * 2);
|
||||||
|
ctx.fillStyle = '#6366f1';
|
||||||
|
ctx.fill();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
destroy() {
|
||||||
|
window.removeEventListener('resize', this._resize);
|
||||||
|
if (this.animFrame) cancelAnimationFrame(this.animFrame);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@@ -4,8 +4,12 @@ document.addEventListener('DOMContentLoaded', () => {
|
|||||||
const userInput = document.getElementById('user');
|
const userInput = document.getElementById('user');
|
||||||
const passwordInput = document.getElementById('password');
|
const passwordInput = document.getElementById('password');
|
||||||
const databaseInput = document.getElementById('database');
|
const databaseInput = document.getElementById('database');
|
||||||
|
const vHostInput = document.getElementById('vHost');
|
||||||
|
const vPortInput = document.getElementById('vPort');
|
||||||
|
const vPasswordInput = document.getElementById('vPassword');
|
||||||
|
|
||||||
const btnTest = document.getElementById('btnTest');
|
const btnTest = document.getElementById('btnTest');
|
||||||
|
const btnTestValkey = document.getElementById('btnTestValkey');
|
||||||
const btnInit = document.getElementById('btnInit');
|
const btnInit = document.getElementById('btnInit');
|
||||||
const messageBox = document.getElementById('messageBox');
|
const messageBox = document.getElementById('messageBox');
|
||||||
|
|
||||||
@@ -14,7 +18,7 @@ document.addEventListener('DOMContentLoaded', () => {
|
|||||||
|
|
||||||
const promForm = document.getElementById('promForm');
|
const promForm = document.getElementById('promForm');
|
||||||
const initForm = document.getElementById('initForm');
|
const initForm = document.getElementById('initForm');
|
||||||
const promName = document.getElementById('promName');
|
const promName = document.getElementById('promSourceName');
|
||||||
const promUrl = document.getElementById('promUrl');
|
const promUrl = document.getElementById('promUrl');
|
||||||
const promDesc = document.getElementById('promDesc');
|
const promDesc = document.getElementById('promDesc');
|
||||||
const btnPromTest = document.getElementById('btnPromTest');
|
const btnPromTest = document.getElementById('btnPromTest');
|
||||||
@@ -65,6 +69,7 @@ document.addEventListener('DOMContentLoaded', () => {
|
|||||||
promForm.style.display = 'block';
|
promForm.style.display = 'block';
|
||||||
initHeaderTitle.textContent = '配置 Prometheus';
|
initHeaderTitle.textContent = '配置 Prometheus';
|
||||||
initHeaderDesc.textContent = '配置您的第一个 Prometheus 数据源监控连接';
|
initHeaderDesc.textContent = '配置您的第一个 Prometheus 数据源监控连接';
|
||||||
|
if (promName) promName.value = ''; // Ensure it's clear on load
|
||||||
}
|
}
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
initForm.style.display = 'block';
|
initForm.style.display = 'block';
|
||||||
@@ -102,6 +107,34 @@ document.addEventListener('DOMContentLoaded', () => {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
btnTestValkey.addEventListener('click', async () => {
|
||||||
|
btnTestValkey.disabled = true;
|
||||||
|
const oldText = btnTestValkey.textContent;
|
||||||
|
btnTestValkey.textContent = '测试中...';
|
||||||
|
try {
|
||||||
|
const res = await fetch('/api/setup/test-valkey', {
|
||||||
|
method: 'POST',
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
body: JSON.stringify({
|
||||||
|
host: vHostInput.value,
|
||||||
|
port: vPortInput.value,
|
||||||
|
password: vPasswordInput.value
|
||||||
|
})
|
||||||
|
});
|
||||||
|
const data = await res.json();
|
||||||
|
if (data.success) {
|
||||||
|
showMessage('Valkey 连接成功!');
|
||||||
|
} else {
|
||||||
|
showMessage('Valkey 连接失败: ' + (data.error || '未知错误'), true);
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
showMessage('Valkey 请求失败: ' + err.message, true);
|
||||||
|
} finally {
|
||||||
|
btnTestValkey.disabled = false;
|
||||||
|
btnTestValkey.textContent = oldText;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
btnInit.addEventListener('click', async () => {
|
btnInit.addEventListener('click', async () => {
|
||||||
btnInit.disabled = true;
|
btnInit.disabled = true;
|
||||||
const oldText = btnInit.textContent;
|
const oldText = btnInit.textContent;
|
||||||
@@ -115,7 +148,10 @@ document.addEventListener('DOMContentLoaded', () => {
|
|||||||
port: portInput.value,
|
port: portInput.value,
|
||||||
user: userInput.value,
|
user: userInput.value,
|
||||||
password: passwordInput.value,
|
password: passwordInput.value,
|
||||||
database: databaseInput.value
|
database: databaseInput.value,
|
||||||
|
vHost: vHostInput.value,
|
||||||
|
vPort: vPortInput.value,
|
||||||
|
vPassword: vPasswordInput.value
|
||||||
})
|
})
|
||||||
});
|
});
|
||||||
const data = await res.json();
|
const data = await res.json();
|
||||||
|
|||||||
@@ -28,6 +28,15 @@ function formatBandwidth(bytesPerSec, decimals = 2) {
|
|||||||
return value.toFixed(decimals) + ' ' + sizes[i];
|
return value.toFixed(decimals) + ' ' + sizes[i];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Convert bytes per second to MB/s (numeric string)
|
||||||
|
*/
|
||||||
|
function toMBps(bytesPerSec, decimals = 2) {
|
||||||
|
if (!bytesPerSec || bytesPerSec === 0) return '0.00';
|
||||||
|
const mbps = bytesPerSec / (1024 * 1024);
|
||||||
|
return mbps.toFixed(decimals);
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Format percentage
|
* Format percentage
|
||||||
*/
|
*/
|
||||||
@@ -102,3 +111,17 @@ function animateValue(element, start, end, duration = 600) {
|
|||||||
|
|
||||||
requestAnimationFrame(update);
|
requestAnimationFrame(update);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Debounce function to limit execution frequency
|
||||||
|
*/
|
||||||
|
function debounce(fn, delay) {
|
||||||
|
let timer = null;
|
||||||
|
return function (...args) {
|
||||||
|
if (timer) clearTimeout(timer);
|
||||||
|
timer = setTimeout(() => {
|
||||||
|
fn.apply(this, args);
|
||||||
|
timer = null;
|
||||||
|
}, delay);
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,12 +1,18 @@
|
|||||||
const Redis = require('ioredis');
|
const Redis = require('ioredis');
|
||||||
|
|
||||||
|
let redis = null;
|
||||||
|
let ttl = 30;
|
||||||
|
|
||||||
|
function init() {
|
||||||
|
if (redis) {
|
||||||
|
redis.disconnect();
|
||||||
|
}
|
||||||
|
|
||||||
const host = process.env.VALKEY_HOST || 'localhost';
|
const host = process.env.VALKEY_HOST || 'localhost';
|
||||||
const port = parseInt(process.env.VALKEY_PORT) || 6379;
|
const port = parseInt(process.env.VALKEY_PORT) || 6379;
|
||||||
const password = process.env.VALKEY_PASSWORD || undefined;
|
const password = process.env.VALKEY_PASSWORD || undefined;
|
||||||
const db = parseInt(process.env.VALKEY_DB) || 0;
|
const db = parseInt(process.env.VALKEY_DB) || 0;
|
||||||
const ttl = parseInt(process.env.VALKEY_TTL) || 30;
|
ttl = parseInt(process.env.VALKEY_TTL) || 30;
|
||||||
|
|
||||||
let redis = null;
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
redis = new Redis({
|
redis = new Redis({
|
||||||
@@ -25,8 +31,12 @@ try {
|
|||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.warn('[Cache] Valkey init failed:', err.message);
|
console.warn('[Cache] Valkey init failed:', err.message);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
init();
|
||||||
|
|
||||||
const cache = {
|
const cache = {
|
||||||
|
init,
|
||||||
async get(key) {
|
async get(key) {
|
||||||
if (!redis) return null;
|
if (!redis) return null;
|
||||||
try {
|
try {
|
||||||
@@ -53,6 +63,17 @@ const cache = {
|
|||||||
} catch (e) {
|
} catch (e) {
|
||||||
// ignore
|
// ignore
|
||||||
}
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
async checkHealth() {
|
||||||
|
if (!redis) return { status: 'down', error: 'Valkey client not initialized' };
|
||||||
|
try {
|
||||||
|
const result = await redis.ping();
|
||||||
|
if (result === 'PONG') return { status: 'up' };
|
||||||
|
return { status: 'down', error: 'Invalid ping response' };
|
||||||
|
} catch (e) {
|
||||||
|
return { status: 'down', error: e.message };
|
||||||
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
@@ -13,20 +13,15 @@ const REQUIRED_TABLES = [
|
|||||||
'users',
|
'users',
|
||||||
'prometheus_sources',
|
'prometheus_sources',
|
||||||
'site_settings',
|
'site_settings',
|
||||||
'traffic_stats'
|
'traffic_stats',
|
||||||
|
'server_locations',
|
||||||
|
'latency_routes'
|
||||||
];
|
];
|
||||||
|
|
||||||
async function checkAndFixDatabase() {
|
async function checkAndFixDatabase() {
|
||||||
// Only run if .env is already configured
|
|
||||||
const envPath = path.join(__dirname, '..', '.env');
|
const envPath = path.join(__dirname, '..', '.env');
|
||||||
if (!fs.existsSync(envPath)) return;
|
if (!fs.existsSync(envPath)) return;
|
||||||
|
|
||||||
const dbHost = process.env.MYSQL_HOST || 'localhost';
|
|
||||||
const dbUser = process.env.MYSQL_USER || 'root';
|
|
||||||
const dbPass = process.env.MYSQL_PASSWORD || '';
|
|
||||||
const dbPort = parseInt(process.env.MYSQL_PORT) || 3306;
|
|
||||||
const dbName = process.env.MYSQL_DATABASE || 'display_wall';
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Check tables
|
// Check tables
|
||||||
const [rows] = await db.query("SHOW TABLES");
|
const [rows] = await db.query("SHOW TABLES");
|
||||||
@@ -35,36 +30,73 @@ async function checkAndFixDatabase() {
|
|||||||
const missingTables = REQUIRED_TABLES.filter(t => !existingTables.includes(t));
|
const missingTables = REQUIRED_TABLES.filter(t => !existingTables.includes(t));
|
||||||
|
|
||||||
if (missingTables.length > 0) {
|
if (missingTables.length > 0) {
|
||||||
console.log(`[Database Integrity] ⚠️ Missing tables: ${missingTables.join(', ')}`);
|
console.log(`[Database Integrity] ⚠️ Missing tables: ${missingTables.join(', ')}. Creating them...`);
|
||||||
await recreateDatabase(dbHost, dbPort, dbUser, dbPass, dbName);
|
|
||||||
} else {
|
for (const table of missingTables) {
|
||||||
// console.log(`[Database Integrity] ✅ All tables accounted for.`);
|
await createTable(table);
|
||||||
|
}
|
||||||
|
console.log(`[Database Integrity] ✅ Missing tables created.`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for is_server_source and type in prometheus_sources
|
||||||
|
const [promColumns] = await db.query("SHOW COLUMNS FROM prometheus_sources");
|
||||||
|
const promColumnNames = promColumns.map(c => c.Field);
|
||||||
|
|
||||||
|
if (!promColumnNames.includes('is_server_source')) {
|
||||||
|
console.log(`[Database Integrity] ⚠️ Missing column 'is_server_source' in 'prometheus_sources'. Adding it...`);
|
||||||
|
await db.query("ALTER TABLE prometheus_sources ADD COLUMN is_server_source TINYINT(1) DEFAULT 1 AFTER description");
|
||||||
|
console.log(`[Database Integrity] ✅ Column 'is_server_source' added.`);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!promColumnNames.includes('type')) {
|
||||||
|
console.log(`[Database Integrity] ⚠️ Missing column 'type' in 'prometheus_sources'. Adding it...`);
|
||||||
|
await db.query("ALTER TABLE prometheus_sources ADD COLUMN type VARCHAR(50) DEFAULT 'prometheus' AFTER is_server_source");
|
||||||
|
console.log(`[Database Integrity] ✅ Column 'type' added.`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for new columns in site_settings
|
||||||
|
const [columns] = await db.query("SHOW COLUMNS FROM site_settings");
|
||||||
|
const columnNames = columns.map(c => c.Field);
|
||||||
|
if (!columnNames.includes('show_95_bandwidth')) {
|
||||||
|
console.log(`[Database Integrity] ⚠️ Missing column 'show_95_bandwidth' in 'site_settings'. Adding it...`);
|
||||||
|
await db.query("ALTER TABLE site_settings ADD COLUMN show_95_bandwidth TINYINT(1) DEFAULT 0 AFTER default_theme");
|
||||||
|
console.log(`[Database Integrity] ✅ Column 'show_95_bandwidth' added.`);
|
||||||
|
}
|
||||||
|
if (!columnNames.includes('p95_type')) {
|
||||||
|
console.log(`[Database Integrity] ⚠️ Missing column 'p95_type' in 'site_settings'. Adding it...`);
|
||||||
|
await db.query("ALTER TABLE site_settings ADD COLUMN p95_type VARCHAR(20) DEFAULT 'tx' AFTER show_95_bandwidth");
|
||||||
|
console.log(`[Database Integrity] ✅ Column 'p95_type' added.`);
|
||||||
|
}
|
||||||
|
if (!columnNames.includes('blackbox_source_id')) {
|
||||||
|
console.log(`[Database Integrity] ⚠️ Missing column 'blackbox_source_id' in 'site_settings'. Adding it...`);
|
||||||
|
await db.query("ALTER TABLE site_settings ADD COLUMN blackbox_source_id INT AFTER p95_type");
|
||||||
|
console.log(`[Database Integrity] ✅ Column 'blackbox_source_id' added.`);
|
||||||
|
}
|
||||||
|
if (!columnNames.includes('latency_source')) {
|
||||||
|
console.log(`[Database Integrity] ⚠️ Missing column 'latency_source' in 'site_settings'. Adding it...`);
|
||||||
|
await db.query("ALTER TABLE site_settings ADD COLUMN latency_source VARCHAR(100) AFTER blackbox_source_id");
|
||||||
|
console.log(`[Database Integrity] ✅ Column 'latency_source' added.`);
|
||||||
|
}
|
||||||
|
if (!columnNames.includes('latency_dest')) {
|
||||||
|
console.log(`[Database Integrity] ⚠️ Missing column 'latency_dest' in 'site_settings'. Adding it...`);
|
||||||
|
await db.query("ALTER TABLE site_settings ADD COLUMN latency_dest VARCHAR(100) AFTER latency_source");
|
||||||
|
console.log(`[Database Integrity] ✅ Column 'latency_dest' added.`);
|
||||||
|
}
|
||||||
|
if (!columnNames.includes('latency_target')) {
|
||||||
|
console.log(`[Database Integrity] ⚠️ Missing column 'latency_target' in 'site_settings'. Adding it...`);
|
||||||
|
await db.query("ALTER TABLE site_settings ADD COLUMN latency_target VARCHAR(255) AFTER latency_dest");
|
||||||
|
console.log(`[Database Integrity] ✅ Column 'latency_target' added.`);
|
||||||
}
|
}
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
if (err.code === 'ER_BAD_DB_ERROR') {
|
|
||||||
console.log(`[Database Integrity] ⚠️ Database "${dbName}" does not exist.`);
|
|
||||||
await recreateDatabase(dbHost, dbPort, dbUser, dbPass, dbName);
|
|
||||||
} else {
|
|
||||||
console.error('[Database Integrity] ❌ Error checking integrity:', err.message);
|
console.error('[Database Integrity] ❌ Error checking integrity:', err.message);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
async function recreateDatabase(host, port, user, password, dbName) {
|
async function createTable(tableName) {
|
||||||
console.log(`[Database Integrity] 🔄 Re-initializing database "${dbName}"...`);
|
console.log(` - Creating table "${tableName}"...`);
|
||||||
|
switch (tableName) {
|
||||||
let connection;
|
case 'users':
|
||||||
try {
|
await db.query(`
|
||||||
connection = await mysql.createConnection({ host, port, user, password });
|
|
||||||
|
|
||||||
// Drop and create database
|
|
||||||
await connection.query(`DROP DATABASE IF EXISTS \`${dbName}\``);
|
|
||||||
await connection.query(`CREATE DATABASE \`${dbName}\` CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci`);
|
|
||||||
await connection.query(`USE \`${dbName}\``);
|
|
||||||
|
|
||||||
// Recreate all tables
|
|
||||||
console.log(' - Creating table "users"...');
|
|
||||||
await connection.query(`
|
|
||||||
CREATE TABLE IF NOT EXISTS users (
|
CREATE TABLE IF NOT EXISTS users (
|
||||||
id INT AUTO_INCREMENT PRIMARY KEY,
|
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||||
username VARCHAR(255) NOT NULL UNIQUE,
|
username VARCHAR(255) NOT NULL UNIQUE,
|
||||||
@@ -73,9 +105,9 @@ async function recreateDatabase(host, port, user, password, dbName) {
|
|||||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci
|
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci
|
||||||
`);
|
`);
|
||||||
|
break;
|
||||||
console.log(' - Creating table "prometheus_sources"...');
|
case 'prometheus_sources':
|
||||||
await connection.query(`
|
await db.query(`
|
||||||
CREATE TABLE IF NOT EXISTS prometheus_sources (
|
CREATE TABLE IF NOT EXISTS prometheus_sources (
|
||||||
id INT AUTO_INCREMENT PRIMARY KEY,
|
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||||
name VARCHAR(255) NOT NULL,
|
name VARCHAR(255) NOT NULL,
|
||||||
@@ -85,25 +117,31 @@ async function recreateDatabase(host, port, user, password, dbName) {
|
|||||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP
|
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP
|
||||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci
|
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci
|
||||||
`);
|
`);
|
||||||
|
break;
|
||||||
console.log(' - Creating table "site_settings"...');
|
case 'site_settings':
|
||||||
await connection.query(`
|
await db.query(`
|
||||||
CREATE TABLE IF NOT EXISTS site_settings (
|
CREATE TABLE IF NOT EXISTS site_settings (
|
||||||
id INT PRIMARY KEY DEFAULT 1,
|
id INT PRIMARY KEY DEFAULT 1,
|
||||||
page_name VARCHAR(255) DEFAULT '数据可视化展示大屏',
|
page_name VARCHAR(255) DEFAULT '数据可视化展示大屏',
|
||||||
title VARCHAR(255) DEFAULT '数据可视化展示大屏',
|
title VARCHAR(255) DEFAULT '数据可视化展示大屏',
|
||||||
logo_url TEXT,
|
logo_url TEXT,
|
||||||
default_theme VARCHAR(20) DEFAULT 'dark',
|
default_theme VARCHAR(20) DEFAULT 'dark',
|
||||||
|
show_95_bandwidth TINYINT(1) DEFAULT 0,
|
||||||
|
p95_type VARCHAR(20) DEFAULT 'tx',
|
||||||
|
blackbox_source_id INT,
|
||||||
|
latency_source VARCHAR(100),
|
||||||
|
latency_dest VARCHAR(100),
|
||||||
|
latency_target VARCHAR(255),
|
||||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP
|
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP
|
||||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci
|
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci
|
||||||
`);
|
`);
|
||||||
await connection.query(`
|
await db.query(`
|
||||||
INSERT INTO site_settings (id, page_name, title, default_theme)
|
INSERT IGNORE INTO site_settings (id, page_name, title, default_theme, show_95_bandwidth)
|
||||||
VALUES (1, '数据可视化展示大屏', '数据可视化展示大屏', 'dark')
|
VALUES (1, '数据可视化展示大屏', '数据可视化展示大屏', 'dark', 0)
|
||||||
`);
|
`);
|
||||||
|
break;
|
||||||
console.log(' - Creating table "traffic_stats"...');
|
case 'traffic_stats':
|
||||||
await connection.query(`
|
await db.query(`
|
||||||
CREATE TABLE IF NOT EXISTS traffic_stats (
|
CREATE TABLE IF NOT EXISTS traffic_stats (
|
||||||
id INT AUTO_INCREMENT PRIMARY KEY,
|
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||||
rx_bytes BIGINT UNSIGNED DEFAULT 0,
|
rx_bytes BIGINT UNSIGNED DEFAULT 0,
|
||||||
@@ -114,16 +152,34 @@ async function recreateDatabase(host, port, user, password, dbName) {
|
|||||||
UNIQUE INDEX (timestamp)
|
UNIQUE INDEX (timestamp)
|
||||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci
|
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci
|
||||||
`);
|
`);
|
||||||
|
break;
|
||||||
console.log(`[Database Integrity] ✅ Re-initialization complete.`);
|
case 'latency_routes':
|
||||||
|
await db.query(`
|
||||||
// Refresh db pool in the main app context
|
CREATE TABLE IF NOT EXISTS latency_routes (
|
||||||
db.initPool();
|
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||||
|
source_id INT NOT NULL,
|
||||||
} catch (err) {
|
latency_source VARCHAR(100) NOT NULL,
|
||||||
console.error('[Database Integrity] ❌ Critical failure during re-initialization:', err.message);
|
latency_dest VARCHAR(100) NOT NULL,
|
||||||
} finally {
|
latency_target VARCHAR(255) NOT NULL,
|
||||||
if (connection) await connection.end();
|
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||||
|
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci
|
||||||
|
`);
|
||||||
|
break;
|
||||||
|
case 'server_locations':
|
||||||
|
await db.query(`
|
||||||
|
CREATE TABLE IF NOT EXISTS server_locations (
|
||||||
|
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||||
|
ip VARCHAR(255) NOT NULL UNIQUE,
|
||||||
|
country CHAR(2),
|
||||||
|
country_name VARCHAR(100),
|
||||||
|
region VARCHAR(100),
|
||||||
|
city VARCHAR(100),
|
||||||
|
latitude DOUBLE,
|
||||||
|
longitude DOUBLE,
|
||||||
|
last_updated TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP
|
||||||
|
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci
|
||||||
|
`);
|
||||||
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
13
server/db.js
13
server/db.js
@@ -18,9 +18,20 @@ function initPool() {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async function checkHealth() {
|
||||||
|
try {
|
||||||
|
if (!pool) return { status: 'down', error: 'Database pool not initialized' };
|
||||||
|
await pool.query('SELECT 1');
|
||||||
|
return { status: 'up' };
|
||||||
|
} catch (err) {
|
||||||
|
return { status: 'down', error: err.message };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
initPool();
|
initPool();
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
query: (...args) => pool.query(...args),
|
query: (...args) => pool.query(...args),
|
||||||
initPool
|
initPool,
|
||||||
|
checkHealth
|
||||||
};
|
};
|
||||||
|
|||||||
138
server/geo-service.js
Normal file
138
server/geo-service.js
Normal file
@@ -0,0 +1,138 @@
|
|||||||
|
const axios = require('axios');
|
||||||
|
const net = require('net');
|
||||||
|
const dns = require('dns').promises;
|
||||||
|
const db = require('./db');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Geo Location Service
|
||||||
|
* Resolves IP addresses to geographical coordinates and country info.
|
||||||
|
* Caches results in the database to minimize API calls.
|
||||||
|
*/
|
||||||
|
|
||||||
|
const ipInfoToken = process.env.IPINFO_TOKEN;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Normalizes geo data for consistent display
|
||||||
|
*/
|
||||||
|
function normalizeGeo(geo) {
|
||||||
|
if (!geo) return geo;
|
||||||
|
|
||||||
|
// Custom normalization for TW, HK, MO to "China, {CODE}"
|
||||||
|
const specialRegions = ['TW'];
|
||||||
|
if (specialRegions.includes(geo.country?.toUpperCase())) {
|
||||||
|
return {
|
||||||
|
...geo,
|
||||||
|
city: `China, ${geo.country.toUpperCase()}`,
|
||||||
|
country_name: 'China'
|
||||||
|
};
|
||||||
|
}
|
||||||
|
return geo;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function getLocation(target) {
|
||||||
|
// Normalize target (strip port if present)
|
||||||
|
const cleanTarget = target.split(':')[0];
|
||||||
|
|
||||||
|
// 1. Check if we already have this IP/Domain in DB (FASTEST)
|
||||||
|
try {
|
||||||
|
const [rows] = await db.query('SELECT * FROM server_locations WHERE ip = ?', [cleanTarget]);
|
||||||
|
if (rows.length > 0) {
|
||||||
|
const data = rows[0];
|
||||||
|
const age = Date.now() - new Date(data.last_updated).getTime();
|
||||||
|
if (age < 30 * 24 * 60 * 60 * 1000) {
|
||||||
|
return normalizeGeo(data);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
// console.error(`[Geo Service] DB check failed for ${cleanTarget}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// 2. Resolve domain to IP if needed
|
||||||
|
let cleanIp = cleanTarget;
|
||||||
|
if (net.isIP(cleanTarget) === 0) {
|
||||||
|
try {
|
||||||
|
const lookup = await dns.lookup(cleanTarget);
|
||||||
|
cleanIp = lookup.address;
|
||||||
|
|
||||||
|
// Secondary DB check with resolved IP
|
||||||
|
const [rows] = await db.query('SELECT * FROM server_locations WHERE ip = ?', [cleanIp]);
|
||||||
|
if (rows.length > 0) {
|
||||||
|
return normalizeGeo(rows[0]);
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
// Quiet DNS failure for tokens (legacy bug mitigation)
|
||||||
|
if (!/^[0-9a-f]{16}$/i.test(cleanTarget)) {
|
||||||
|
console.error(`[Geo Service] DNS resolution failed for ${cleanTarget}:`, err.message);
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 3. Skip local/reserved IPs
|
||||||
|
if (isLocalIp(cleanIp)) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// 4. Resolve via ipinfo.io (LAST RESORT)
|
||||||
|
try {
|
||||||
|
console.log(`[Geo Service] API lookup (ipinfo.io) for: ${cleanIp}`);
|
||||||
|
const url = `https://ipinfo.io/${cleanIp}/json${ipInfoToken ? `?token=${ipInfoToken}` : ''}`;
|
||||||
|
const response = await axios.get(url, { timeout: 5000 });
|
||||||
|
const geo = normalizeGeo(response.data);
|
||||||
|
|
||||||
|
if (geo && geo.loc) {
|
||||||
|
const [lat, lon] = geo.loc.split(',').map(Number);
|
||||||
|
const locationData = {
|
||||||
|
ip: cleanIp,
|
||||||
|
country: geo.country,
|
||||||
|
country_name: geo.country_name || geo.country, // ipinfo might not have country_name in basic response
|
||||||
|
region: geo.region,
|
||||||
|
city: geo.city,
|
||||||
|
latitude: lat,
|
||||||
|
longitude: lon
|
||||||
|
};
|
||||||
|
|
||||||
|
// Save to DB
|
||||||
|
await db.query(`
|
||||||
|
INSERT INTO server_locations (ip, country, country_name, region, city, latitude, longitude)
|
||||||
|
VALUES (?, ?, ?, ?, ?, ?, ?)
|
||||||
|
ON DUPLICATE KEY UPDATE
|
||||||
|
country = VALUES(country),
|
||||||
|
country_name = VALUES(country_name),
|
||||||
|
region = VALUES(region),
|
||||||
|
city = VALUES(city),
|
||||||
|
latitude = VALUES(latitude),
|
||||||
|
longitude = VALUES(longitude)
|
||||||
|
`, [
|
||||||
|
locationData.ip,
|
||||||
|
locationData.country,
|
||||||
|
locationData.country_name,
|
||||||
|
locationData.region,
|
||||||
|
locationData.city,
|
||||||
|
locationData.latitude,
|
||||||
|
locationData.longitude
|
||||||
|
]);
|
||||||
|
|
||||||
|
return locationData;
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
console.error(`[Geo Service] Error resolving IP ${cleanIp}:`, err.message);
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
function isLocalIp(ip) {
|
||||||
|
if (ip === 'localhost' || ip === '127.0.0.1' || ip === '::1') return true;
|
||||||
|
|
||||||
|
// RFC1918 private addresses
|
||||||
|
const p1 = /^10\./;
|
||||||
|
const p2 = /^172\.(1[6-9]|2[0-9]|3[0-1])\./;
|
||||||
|
const p3 = /^192\.168\./;
|
||||||
|
|
||||||
|
return p1.test(ip) || p2.test(ip) || p3.test(ip);
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
getLocation
|
||||||
|
};
|
||||||
516
server/index.js
516
server/index.js
@@ -5,7 +5,11 @@ const path = require('path');
|
|||||||
const db = require('./db');
|
const db = require('./db');
|
||||||
const prometheusService = require('./prometheus-service');
|
const prometheusService = require('./prometheus-service');
|
||||||
const cache = require('./cache');
|
const cache = require('./cache');
|
||||||
|
const geoService = require('./geo-service');
|
||||||
|
const latencyService = require('./latency-service');
|
||||||
const checkAndFixDatabase = require('./db-integrity-check');
|
const checkAndFixDatabase = require('./db-integrity-check');
|
||||||
|
const http = require('http');
|
||||||
|
const WebSocket = require('ws');
|
||||||
|
|
||||||
const app = express();
|
const app = express();
|
||||||
const PORT = process.env.PORT || 3000;
|
const PORT = process.env.PORT || 3000;
|
||||||
@@ -50,6 +54,50 @@ async function checkDb() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
checkDb();
|
checkDb();
|
||||||
|
|
||||||
|
// --- Health API ---
|
||||||
|
app.get('/health', async (req, res) => {
|
||||||
|
try {
|
||||||
|
const dbStatus = await db.checkHealth();
|
||||||
|
const cacheStatus = await cache.checkHealth();
|
||||||
|
const isAllOk = dbStatus.status === 'up' && cacheStatus.status === 'up';
|
||||||
|
|
||||||
|
const healthInfo = {
|
||||||
|
status: isAllOk ? 'ok' : 'error',
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
service: {
|
||||||
|
status: 'running',
|
||||||
|
uptime: Math.floor(process.uptime()),
|
||||||
|
memory_usage: {
|
||||||
|
rss: Math.floor(process.memoryUsage().rss / 1024 / 1024) + ' MB',
|
||||||
|
heapTotal: Math.floor(process.memoryUsage().heapTotal / 1024 / 1024) + ' MB'
|
||||||
|
},
|
||||||
|
node_version: process.version
|
||||||
|
},
|
||||||
|
checks: {
|
||||||
|
database: {
|
||||||
|
name: 'MySQL',
|
||||||
|
status: dbStatus.status,
|
||||||
|
message: dbStatus.error || 'Connected'
|
||||||
|
},
|
||||||
|
valkey: {
|
||||||
|
name: 'Valkey (Redis)',
|
||||||
|
status: cacheStatus.status,
|
||||||
|
message: cacheStatus.error || 'Connected'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
if (isAllOk) {
|
||||||
|
res.json(healthInfo);
|
||||||
|
} else {
|
||||||
|
res.status(500).json(healthInfo);
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
res.status(500).json({ status: 'error', message: err.message });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
// --- Auth API ---
|
// --- Auth API ---
|
||||||
app.post('/api/auth/login', async (req, res) => {
|
app.post('/api/auth/login', async (req, res) => {
|
||||||
const { username, password } = req.body;
|
const { username, password } = req.body;
|
||||||
@@ -80,6 +128,34 @@ app.post('/api/auth/logout', (req, res) => {
|
|||||||
res.json({ success: true });
|
res.json({ success: true });
|
||||||
});
|
});
|
||||||
|
|
||||||
|
app.post('/api/auth/change-password', requireAuth, async (req, res) => {
|
||||||
|
const { oldPassword, newPassword } = req.body;
|
||||||
|
if (!oldPassword || !newPassword) {
|
||||||
|
return res.status(400).json({ error: '需要输入旧密码和新密码' });
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const [rows] = await db.query('SELECT * FROM users WHERE id = ?', [req.user.id]);
|
||||||
|
if (rows.length === 0) return res.status(404).json({ error: '用户不存在' });
|
||||||
|
|
||||||
|
const user = rows[0];
|
||||||
|
const oldHash = crypto.pbkdf2Sync(oldPassword, user.salt, 1000, 64, 'sha512').toString('hex');
|
||||||
|
|
||||||
|
if (oldHash !== user.password) {
|
||||||
|
return res.status(401).json({ error: '旧密码输入错误' });
|
||||||
|
}
|
||||||
|
|
||||||
|
const newSalt = crypto.randomBytes(16).toString('hex');
|
||||||
|
const newHash = crypto.pbkdf2Sync(newPassword, newSalt, 1000, 64, 'sha512').toString('hex');
|
||||||
|
|
||||||
|
await db.query('UPDATE users SET password = ?, salt = ? WHERE id = ?', [newHash, newSalt, user.id]);
|
||||||
|
res.json({ success: true, message: '密码修改成功' });
|
||||||
|
} catch (err) {
|
||||||
|
console.error('Password update error:', err);
|
||||||
|
res.status(500).json({ error: '服务器错误,修改失败' });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
app.get('/api/auth/status', (req, res) => {
|
app.get('/api/auth/status', (req, res) => {
|
||||||
const sessionId = getCookie(req, 'session_id');
|
const sessionId = getCookie(req, 'session_id');
|
||||||
if (sessionId && sessions.has(sessionId)) {
|
if (sessionId && sessions.has(sessionId)) {
|
||||||
@@ -108,8 +184,29 @@ app.post('/api/setup/test', async (req, res) => {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
app.post('/api/setup/test-valkey', async (req, res) => {
|
||||||
|
const { host, port, password } = req.body;
|
||||||
|
try {
|
||||||
|
const Redis = require('ioredis');
|
||||||
|
const redis = new Redis({
|
||||||
|
host: host || 'localhost',
|
||||||
|
port: parseInt(port) || 6379,
|
||||||
|
password: password || undefined,
|
||||||
|
lazyConnect: true,
|
||||||
|
maxRetriesPerRequest: 1,
|
||||||
|
connectTimeout: 5000
|
||||||
|
});
|
||||||
|
await redis.connect();
|
||||||
|
await redis.ping();
|
||||||
|
await redis.disconnect();
|
||||||
|
res.json({ success: true, message: 'Valkey connection successful' });
|
||||||
|
} catch (err) {
|
||||||
|
res.status(400).json({ success: false, error: err.message });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
app.post('/api/setup/init', async (req, res) => {
|
app.post('/api/setup/init', async (req, res) => {
|
||||||
const { host, port, user, password, database } = req.body;
|
const { host, port, user, password, database, vHost, vPort, vPassword } = req.body;
|
||||||
try {
|
try {
|
||||||
const mysql = require('mysql2/promise');
|
const mysql = require('mysql2/promise');
|
||||||
const connection = await mysql.createConnection({
|
const connection = await mysql.createConnection({
|
||||||
@@ -166,12 +263,18 @@ app.post('/api/setup/init', async (req, res) => {
|
|||||||
title VARCHAR(255) DEFAULT '数据可视化展示大屏',
|
title VARCHAR(255) DEFAULT '数据可视化展示大屏',
|
||||||
logo_url TEXT,
|
logo_url TEXT,
|
||||||
default_theme VARCHAR(20) DEFAULT 'dark',
|
default_theme VARCHAR(20) DEFAULT 'dark',
|
||||||
|
show_95_bandwidth TINYINT(1) DEFAULT 0,
|
||||||
|
p95_type VARCHAR(20) DEFAULT 'tx',
|
||||||
|
blackbox_source_id INT,
|
||||||
|
latency_source VARCHAR(100),
|
||||||
|
latency_dest VARCHAR(100),
|
||||||
|
latency_target VARCHAR(255),
|
||||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP
|
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP
|
||||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci
|
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci
|
||||||
`);
|
`);
|
||||||
await connection.query(`
|
await connection.query(`
|
||||||
INSERT IGNORE INTO site_settings (id, page_name, title, default_theme)
|
INSERT IGNORE INTO site_settings (id, page_name, title, default_theme, show_95_bandwidth, p95_type)
|
||||||
VALUES (1, '数据可视化展示大屏', '数据可视化展示大屏', 'dark')
|
VALUES (1, '数据可视化展示大屏', '数据可视化展示大屏', 'dark', 0, 'tx')
|
||||||
`);
|
`);
|
||||||
|
|
||||||
await connection.end();
|
await connection.end();
|
||||||
@@ -182,6 +285,9 @@ MYSQL_PORT=${port || '3306'}
|
|||||||
MYSQL_USER=${user || 'root'}
|
MYSQL_USER=${user || 'root'}
|
||||||
MYSQL_PASSWORD=${password || ''}
|
MYSQL_PASSWORD=${password || ''}
|
||||||
MYSQL_DATABASE=${dbName}
|
MYSQL_DATABASE=${dbName}
|
||||||
|
VALKEY_HOST=${vHost || 'localhost'}
|
||||||
|
VALKEY_PORT=${vPort || '6379'}
|
||||||
|
VALKEY_PASSWORD=${vPassword || ''}
|
||||||
PORT=${process.env.PORT || 3000}
|
PORT=${process.env.PORT || 3000}
|
||||||
HOST=${process.env.HOST || '0.0.0.0'}
|
HOST=${process.env.HOST || '0.0.0.0'}
|
||||||
REFRESH_INTERVAL=${process.env.REFRESH_INTERVAL || 5000}
|
REFRESH_INTERVAL=${process.env.REFRESH_INTERVAL || 5000}
|
||||||
@@ -194,9 +300,13 @@ REFRESH_INTERVAL=${process.env.REFRESH_INTERVAL || 5000}
|
|||||||
process.env.MYSQL_USER = user;
|
process.env.MYSQL_USER = user;
|
||||||
process.env.MYSQL_PASSWORD = password;
|
process.env.MYSQL_PASSWORD = password;
|
||||||
process.env.MYSQL_DATABASE = dbName;
|
process.env.MYSQL_DATABASE = dbName;
|
||||||
|
process.env.VALKEY_HOST = vHost;
|
||||||
|
process.env.VALKEY_PORT = vPort;
|
||||||
|
process.env.VALKEY_PASSWORD = vPassword;
|
||||||
|
|
||||||
// Re-initialize pool
|
// Re-initialize pools
|
||||||
db.initPool();
|
db.initPool();
|
||||||
|
cache.init();
|
||||||
|
|
||||||
isDbInitialized = true;
|
isDbInitialized = true;
|
||||||
res.json({ success: true, message: 'Initialization complete' });
|
res.json({ success: true, message: 'Initialization complete' });
|
||||||
@@ -236,7 +346,15 @@ app.post('/api/setup/admin', async (req, res) => {
|
|||||||
const hash = crypto.pbkdf2Sync(password, salt, 1000, 64, 'sha512').toString('hex');
|
const hash = crypto.pbkdf2Sync(password, salt, 1000, 64, 'sha512').toString('hex');
|
||||||
|
|
||||||
await db.query('INSERT INTO users (username, password, salt) VALUES (?, ?, ?)', [username, hash, salt]);
|
await db.query('INSERT INTO users (username, password, salt) VALUES (?, ?, ?)', [username, hash, salt]);
|
||||||
res.json({ success: true, message: 'Admin account created' });
|
const [userRows] = await db.query('SELECT id, username FROM users WHERE username = ?', [username]);
|
||||||
|
const user = userRows[0];
|
||||||
|
|
||||||
|
// Auto-login after creation so the next setup steps (like adding Prometheus) work without 401
|
||||||
|
const sessionId = crypto.randomBytes(32).toString('hex');
|
||||||
|
sessions.set(sessionId, { id: user.id, username: user.username });
|
||||||
|
res.setHeader('Set-Cookie', `session_id=${sessionId}; Path=/; HttpOnly; SameSite=Strict; Max-Age=86400`);
|
||||||
|
|
||||||
|
res.json({ success: true, message: 'Admin account created and logged in' });
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error('Admin creation error:', err);
|
console.error('Admin creation error:', err);
|
||||||
res.status(500).json({ error: err.message });
|
res.status(500).json({ error: err.message });
|
||||||
@@ -246,7 +364,7 @@ app.post('/api/setup/admin', async (req, res) => {
|
|||||||
// Middleware to protect routes & enforce setup
|
// Middleware to protect routes & enforce setup
|
||||||
app.use(async (req, res, next) => {
|
app.use(async (req, res, next) => {
|
||||||
// Allow system files and setup APIs
|
// Allow system files and setup APIs
|
||||||
if (req.path.startsWith('/api/setup') || req.path === '/init.html' || req.path.startsWith('/css/') || req.path.startsWith('/js/') || req.path.startsWith('/fonts/')) {
|
if (req.path === '/health' || req.path.startsWith('/api/setup') || req.path === '/init.html' || req.path.startsWith('/css/') || req.path.startsWith('/js/') || req.path.startsWith('/fonts/')) {
|
||||||
return next();
|
return next();
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -290,7 +408,11 @@ const serveIndex = async (req, res) => {
|
|||||||
page_name: '数据可视化展示大屏',
|
page_name: '数据可视化展示大屏',
|
||||||
title: '数据可视化展示大屏',
|
title: '数据可视化展示大屏',
|
||||||
logo_url: null,
|
logo_url: null,
|
||||||
default_theme: 'dark'
|
default_theme: 'dark',
|
||||||
|
blackbox_source_id: null,
|
||||||
|
latency_source: null,
|
||||||
|
latency_dest: null,
|
||||||
|
latency_target: null
|
||||||
};
|
};
|
||||||
|
|
||||||
if (isDbInitialized) {
|
if (isDbInitialized) {
|
||||||
@@ -326,11 +448,18 @@ app.use(express.static(path.join(__dirname, '..', 'public'), { index: false }));
|
|||||||
// Get all Prometheus sources
|
// Get all Prometheus sources
|
||||||
app.get('/api/sources', async (req, res) => {
|
app.get('/api/sources', async (req, res) => {
|
||||||
try {
|
try {
|
||||||
const [rows] = await db.query('SELECT * FROM prometheus_sources ORDER BY created_at DESC');
|
const [rows] = await db.query('SELECT * FROM prometheus_sources ORDER BY is_server_source DESC, created_at DESC');
|
||||||
// Test connectivity for each source
|
// Test connectivity for each source
|
||||||
const sourcesWithStatus = await Promise.all(rows.map(async (source) => {
|
const sourcesWithStatus = await Promise.all(rows.map(async (source) => {
|
||||||
try {
|
try {
|
||||||
const response = await prometheusService.testConnection(source.url);
|
let response;
|
||||||
|
if (source.type === 'blackbox') {
|
||||||
|
// Simple check for blackbox exporter
|
||||||
|
const res = await fetch(`${source.url.replace(/\/+$/, '')}/metrics`, { timeout: 3000 }).catch(() => null);
|
||||||
|
response = (res && res.ok) ? 'Blackbox Exporter Ready' : 'Connection Error';
|
||||||
|
} else {
|
||||||
|
response = await prometheusService.testConnection(source.url);
|
||||||
|
}
|
||||||
return { ...source, status: 'online', version: response };
|
return { ...source, status: 'online', version: response };
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
return { ...source, status: 'offline', version: null };
|
return { ...source, status: 'offline', version: null };
|
||||||
@@ -345,18 +474,21 @@ app.get('/api/sources', async (req, res) => {
|
|||||||
|
|
||||||
// Add a new Prometheus source
|
// Add a new Prometheus source
|
||||||
app.post('/api/sources', requireAuth, async (req, res) => {
|
app.post('/api/sources', requireAuth, async (req, res) => {
|
||||||
let { name, url, description } = req.body;
|
let { name, url, description, is_server_source, type } = req.body;
|
||||||
if (!name || !url) {
|
if (!name || !url) {
|
||||||
return res.status(400).json({ error: 'Name and URL are required' });
|
return res.status(400).json({ error: 'Name and URL are required' });
|
||||||
}
|
}
|
||||||
if (!/^https?:\/\//i.test(url)) url = 'http://' + url;
|
if (!/^https?:\/\//i.test(url)) url = 'http://' + url;
|
||||||
try {
|
try {
|
||||||
const [result] = await db.query(
|
const [result] = await db.query(
|
||||||
'INSERT INTO prometheus_sources (name, url, description) VALUES (?, ?, ?)',
|
'INSERT INTO prometheus_sources (name, url, description, is_server_source, type) VALUES (?, ?, ?, ?, ?)',
|
||||||
[name, url, description || '']
|
[name, url, description || '', is_server_source === undefined ? 1 : (is_server_source ? 1 : 0), type || 'prometheus']
|
||||||
);
|
);
|
||||||
const [rows] = await db.query('SELECT * FROM prometheus_sources WHERE id = ?', [result.insertId]);
|
const [rows] = await db.query('SELECT * FROM prometheus_sources WHERE id = ?', [result.insertId]);
|
||||||
|
|
||||||
|
// Clear network history cache to force refresh
|
||||||
|
await cache.del('network_history_all');
|
||||||
|
|
||||||
res.status(201).json(rows[0]);
|
res.status(201).json(rows[0]);
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error('Error adding source:', err);
|
console.error('Error adding source:', err);
|
||||||
@@ -366,13 +498,16 @@ app.post('/api/sources', requireAuth, async (req, res) => {
|
|||||||
|
|
||||||
// Update a Prometheus source
|
// Update a Prometheus source
|
||||||
app.put('/api/sources/:id', requireAuth, async (req, res) => {
|
app.put('/api/sources/:id', requireAuth, async (req, res) => {
|
||||||
let { name, url, description } = req.body;
|
let { name, url, description, is_server_source } = req.body;
|
||||||
if (url && !/^https?:\/\//i.test(url)) url = 'http://' + url;
|
if (url && !/^https?:\/\//i.test(url)) url = 'http://' + url;
|
||||||
try {
|
try {
|
||||||
await db.query(
|
await db.query(
|
||||||
'UPDATE prometheus_sources SET name = ?, url = ?, description = ? WHERE id = ?',
|
'UPDATE prometheus_sources SET name = ?, url = ?, description = ?, is_server_source = ?, type = ? WHERE id = ?',
|
||||||
[name, url, description || '', req.params.id]
|
[name, url, description || '', is_server_source ? 1 : 0, type || 'prometheus', req.params.id]
|
||||||
);
|
);
|
||||||
|
// Clear network history cache
|
||||||
|
await cache.del('network_history_all');
|
||||||
|
|
||||||
const [rows] = await db.query('SELECT * FROM prometheus_sources WHERE id = ?', [req.params.id]);
|
const [rows] = await db.query('SELECT * FROM prometheus_sources WHERE id = ?', [req.params.id]);
|
||||||
res.json(rows[0]);
|
res.json(rows[0]);
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
@@ -385,6 +520,8 @@ app.put('/api/sources/:id', requireAuth, async (req, res) => {
|
|||||||
app.delete('/api/sources/:id', requireAuth, async (req, res) => {
|
app.delete('/api/sources/:id', requireAuth, async (req, res) => {
|
||||||
try {
|
try {
|
||||||
await db.query('DELETE FROM prometheus_sources WHERE id = ?', [req.params.id]);
|
await db.query('DELETE FROM prometheus_sources WHERE id = ?', [req.params.id]);
|
||||||
|
// Clear network history cache
|
||||||
|
await cache.del('network_history_all');
|
||||||
res.json({ message: 'Source deleted' });
|
res.json({ message: 'Source deleted' });
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error('Error deleting source:', err);
|
console.error('Error deleting source:', err);
|
||||||
@@ -394,11 +531,18 @@ app.delete('/api/sources/:id', requireAuth, async (req, res) => {
|
|||||||
|
|
||||||
// Test connection to a Prometheus source
|
// Test connection to a Prometheus source
|
||||||
app.post('/api/sources/test', async (req, res) => {
|
app.post('/api/sources/test', async (req, res) => {
|
||||||
let { url } = req.body;
|
let { url, type } = req.body;
|
||||||
if (url && !/^https?:\/\//i.test(url)) url = 'http://' + url;
|
if (url && !/^https?:\/\//i.test(url)) url = 'http://' + url;
|
||||||
try {
|
try {
|
||||||
const version = await prometheusService.testConnection(url);
|
let result;
|
||||||
res.json({ status: 'ok', version });
|
if (type === 'blackbox') {
|
||||||
|
const resVal = await fetch(`${url.replace(/\/+$/, '')}/metrics`, { timeout: 5000 }).catch(() => null);
|
||||||
|
result = (resVal && resVal.ok) ? 'Blackbox Exporter Ready' : 'Connection Failed';
|
||||||
|
if (!resVal || !resVal.ok) throw new Error(result);
|
||||||
|
} else {
|
||||||
|
result = await prometheusService.testConnection(url);
|
||||||
|
}
|
||||||
|
res.json({ status: 'ok', version: result });
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
res.status(400).json({ status: 'error', message: err.message });
|
res.status(400).json({ status: 'error', message: err.message });
|
||||||
}
|
}
|
||||||
@@ -414,7 +558,13 @@ app.get('/api/settings', async (req, res) => {
|
|||||||
return res.json({
|
return res.json({
|
||||||
page_name: '数据可视化展示大屏',
|
page_name: '数据可视化展示大屏',
|
||||||
title: '数据可视化展示大屏',
|
title: '数据可视化展示大屏',
|
||||||
logo_url: null
|
logo_url: null,
|
||||||
|
show_95_bandwidth: 0,
|
||||||
|
p95_type: 'tx',
|
||||||
|
blackbox_source_id: null,
|
||||||
|
latency_source: null,
|
||||||
|
latency_dest: null,
|
||||||
|
latency_target: null
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
res.json(rows[0]);
|
res.json(rows[0]);
|
||||||
@@ -426,17 +576,28 @@ app.get('/api/settings', async (req, res) => {
|
|||||||
|
|
||||||
// Update site settings
|
// Update site settings
|
||||||
app.post('/api/settings', requireAuth, async (req, res) => {
|
app.post('/api/settings', requireAuth, async (req, res) => {
|
||||||
const { page_name, title, logo_url, default_theme } = req.body;
|
const { page_name, title, logo_url, default_theme, show_95_bandwidth, p95_type, blackbox_source_id, latency_source, latency_dest, latency_target } = req.body;
|
||||||
try {
|
try {
|
||||||
await db.query(
|
await db.query(
|
||||||
`INSERT INTO site_settings (id, page_name, title, logo_url, default_theme)
|
`INSERT INTO site_settings (id, page_name, title, logo_url, default_theme, show_95_bandwidth, p95_type, blackbox_source_id, latency_source, latency_dest, latency_target)
|
||||||
VALUES (1, ?, ?, ?, ?)
|
VALUES (1, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||||
ON DUPLICATE KEY UPDATE
|
ON DUPLICATE KEY UPDATE
|
||||||
page_name = VALUES(page_name),
|
page_name = VALUES(page_name),
|
||||||
title = VALUES(title),
|
title = VALUES(title),
|
||||||
logo_url = VALUES(logo_url),
|
logo_url = VALUES(logo_url),
|
||||||
default_theme = VALUES(default_theme)`,
|
default_theme = VALUES(default_theme),
|
||||||
[page_name, title, logo_url, default_theme]
|
show_95_bandwidth = VALUES(show_95_bandwidth),
|
||||||
|
p95_type = VALUES(p95_type),
|
||||||
|
blackbox_source_id = VALUES(blackbox_source_id),
|
||||||
|
latency_source = VALUES(latency_source),
|
||||||
|
latency_dest = VALUES(latency_dest),
|
||||||
|
latency_target = VALUES(latency_target)`,
|
||||||
|
[
|
||||||
|
page_name, title, logo_url, default_theme,
|
||||||
|
show_95_bandwidth ? 1 : 0, p95_type || 'tx',
|
||||||
|
blackbox_source_id || null, latency_source || null,
|
||||||
|
latency_dest || null, latency_target || null
|
||||||
|
]
|
||||||
);
|
);
|
||||||
res.json({ success: true });
|
res.json({ success: true });
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
@@ -447,20 +608,20 @@ app.post('/api/settings', requireAuth, async (req, res) => {
|
|||||||
|
|
||||||
// ==================== Metrics Aggregation ====================
|
// ==================== Metrics Aggregation ====================
|
||||||
|
|
||||||
// Get all aggregated metrics from all Prometheus sources
|
// Reusable function to get overview metrics
|
||||||
app.get('/api/metrics/overview', async (req, res) => {
|
async function getOverview() {
|
||||||
try {
|
const [sources] = await db.query('SELECT * FROM prometheus_sources WHERE is_server_source = 1 AND type != "blackbox"');
|
||||||
const [sources] = await db.query('SELECT * FROM prometheus_sources');
|
|
||||||
if (sources.length === 0) {
|
if (sources.length === 0) {
|
||||||
return res.json({
|
return {
|
||||||
totalServers: 0,
|
totalServers: 0,
|
||||||
|
activeServers: 0,
|
||||||
cpu: { used: 0, total: 0, percent: 0 },
|
cpu: { used: 0, total: 0, percent: 0 },
|
||||||
memory: { used: 0, total: 0, percent: 0 },
|
memory: { used: 0, total: 0, percent: 0 },
|
||||||
disk: { used: 0, total: 0, percent: 0 },
|
disk: { used: 0, total: 0, percent: 0 },
|
||||||
network: { total: 0, rx: 0, tx: 0 },
|
network: { total: 0, rx: 0, tx: 0 },
|
||||||
traffic24h: { rx: 0, tx: 0, total: 0 },
|
traffic24h: { rx: 0, tx: 0, total: 0 },
|
||||||
servers: []
|
servers: []
|
||||||
});
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
const allMetrics = await Promise.all(sources.map(async (source) => {
|
const allMetrics = await Promise.all(sources.map(async (source) => {
|
||||||
@@ -470,6 +631,8 @@ app.get('/api/metrics/overview', async (req, res) => {
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
const metrics = await prometheusService.getOverviewMetrics(source.url, source.name);
|
const metrics = await prometheusService.getOverviewMetrics(source.url, source.name);
|
||||||
|
// Don't set cache here if we want real-time WS push to be fresh,
|
||||||
|
// but keeping it for REST API performance is fine.
|
||||||
await cache.set(cacheKey, metrics, 15); // Cache for 15s
|
await cache.set(cacheKey, metrics, 15); // Cache for 15s
|
||||||
return metrics;
|
return metrics;
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
@@ -482,6 +645,7 @@ app.get('/api/metrics/overview', async (req, res) => {
|
|||||||
|
|
||||||
// Aggregate across all sources
|
// Aggregate across all sources
|
||||||
let totalServers = 0;
|
let totalServers = 0;
|
||||||
|
let activeServers = 0;
|
||||||
let cpuUsed = 0, cpuTotal = 0;
|
let cpuUsed = 0, cpuTotal = 0;
|
||||||
let memUsed = 0, memTotal = 0;
|
let memUsed = 0, memTotal = 0;
|
||||||
let diskUsed = 0, diskTotal = 0;
|
let diskUsed = 0, diskTotal = 0;
|
||||||
@@ -491,6 +655,7 @@ app.get('/api/metrics/overview', async (req, res) => {
|
|||||||
|
|
||||||
for (const m of validMetrics) {
|
for (const m of validMetrics) {
|
||||||
totalServers += m.totalServers;
|
totalServers += m.totalServers;
|
||||||
|
activeServers += (m.activeServers !== undefined ? m.activeServers : m.totalServers);
|
||||||
cpuUsed += m.cpu.used;
|
cpuUsed += m.cpu.used;
|
||||||
cpuTotal += m.cpu.total;
|
cpuTotal += m.cpu.total;
|
||||||
memUsed += m.memory.used;
|
memUsed += m.memory.used;
|
||||||
@@ -504,22 +669,9 @@ app.get('/api/metrics/overview', async (req, res) => {
|
|||||||
allServers = allServers.concat(m.servers);
|
allServers = allServers.concat(m.servers);
|
||||||
}
|
}
|
||||||
|
|
||||||
// --- 24h Traffic from DB (Integrating Bandwidth) ---
|
const overview = {
|
||||||
try {
|
|
||||||
// Each record represents a 5-second interval
|
|
||||||
const [sumRows] = await db.query('SELECT SUM(rx_bandwidth) as sumRx, SUM(tx_bandwidth) as sumTx FROM traffic_stats WHERE timestamp >= NOW() - INTERVAL 1 DAY');
|
|
||||||
|
|
||||||
if (sumRows.length > 0 && sumRows[0].sumRx !== null) {
|
|
||||||
// Total bytes = Sum of (bytes/sec) * 5 seconds
|
|
||||||
traffic24hRx = sumRows[0].sumRx * 5;
|
|
||||||
traffic24hTx = sumRows[0].sumTx * 5;
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
console.error('Error calculating 24h traffic from DB integration:', err);
|
|
||||||
}
|
|
||||||
|
|
||||||
res.json({
|
|
||||||
totalServers,
|
totalServers,
|
||||||
|
activeServers,
|
||||||
cpu: {
|
cpu: {
|
||||||
used: cpuUsed,
|
used: cpuUsed,
|
||||||
total: cpuTotal,
|
total: cpuTotal,
|
||||||
@@ -546,37 +698,89 @@ app.get('/api/metrics/overview', async (req, res) => {
|
|||||||
total: traffic24hRx + traffic24hTx
|
total: traffic24hRx + traffic24hTx
|
||||||
},
|
},
|
||||||
servers: allServers
|
servers: allServers
|
||||||
});
|
};
|
||||||
|
|
||||||
|
// --- Add Geo Information to Servers ---
|
||||||
|
const geoServers = await Promise.all(overview.servers.map(async (server) => {
|
||||||
|
const realInstance = server.originalInstance || prometheusService.resolveToken(server.instance);
|
||||||
|
const cleanIp = realInstance.split(':')[0];
|
||||||
|
|
||||||
|
let geoData = null;
|
||||||
|
try {
|
||||||
|
const [rows] = await db.query('SELECT * FROM server_locations WHERE ip = ?', [cleanIp]);
|
||||||
|
if (rows.length > 0) {
|
||||||
|
geoData = rows[0];
|
||||||
|
} else {
|
||||||
|
geoService.getLocation(cleanIp).catch(() => {});
|
||||||
|
}
|
||||||
|
} catch (e) {}
|
||||||
|
|
||||||
|
const { originalInstance, ...safeServer } = server;
|
||||||
|
if (geoData) {
|
||||||
|
return {
|
||||||
|
...safeServer,
|
||||||
|
country: geoData.country,
|
||||||
|
countryName: geoData.country_name,
|
||||||
|
city: geoData.city,
|
||||||
|
lat: geoData.latitude,
|
||||||
|
lng: geoData.longitude
|
||||||
|
};
|
||||||
|
}
|
||||||
|
return safeServer;
|
||||||
|
}));
|
||||||
|
|
||||||
|
overview.servers = geoServers;
|
||||||
|
return overview;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get all aggregated metrics from all Prometheus sources
|
||||||
|
app.get('/api/metrics/overview', async (req, res) => {
|
||||||
|
try {
|
||||||
|
const overview = await getOverview();
|
||||||
|
res.json(overview);
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error('Error fetching overview metrics:', err);
|
console.error('Error fetching overview metrics:', err);
|
||||||
res.status(500).json({ error: 'Failed to fetch metrics' });
|
res.status(500).json({ error: 'Failed to fetch metrics' });
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
// Get network traffic history from DB (past 24h)
|
// Get network traffic history (past 24h) from Prometheus
|
||||||
app.get('/api/metrics/network-history', async (req, res) => {
|
app.get('/api/metrics/network-history', async (req, res) => {
|
||||||
try {
|
try {
|
||||||
const [rows] = await db.query('SELECT rx_bandwidth, tx_bandwidth, UNIX_TIMESTAMP(timestamp) as ts FROM traffic_stats WHERE timestamp >= NOW() - INTERVAL 1 DAY ORDER BY ts ASC');
|
const cacheKey = 'network_history_all';
|
||||||
|
const cached = await cache.get(cacheKey);
|
||||||
|
if (cached) return res.json(cached);
|
||||||
|
|
||||||
if (rows.length === 0) {
|
const [sources] = await db.query('SELECT * FROM prometheus_sources WHERE is_server_source = 1 AND type != "blackbox"');
|
||||||
|
if (sources.length === 0) {
|
||||||
return res.json({ timestamps: [], rx: [], tx: [] });
|
return res.json({ timestamps: [], rx: [], tx: [] });
|
||||||
}
|
}
|
||||||
|
|
||||||
res.json({
|
const histories = await Promise.all(sources.map(source =>
|
||||||
timestamps: rows.map(r => r.ts * 1000),
|
prometheusService.getNetworkHistory(source.url).catch(err => {
|
||||||
rx: rows.map(r => r.rx_bandwidth),
|
console.error(`Error fetching network history from ${source.name}:`, err.message);
|
||||||
tx: rows.map(r => r.tx_bandwidth)
|
return null;
|
||||||
});
|
})
|
||||||
|
));
|
||||||
|
|
||||||
|
const validHistories = histories.filter(h => h !== null);
|
||||||
|
if (validHistories.length === 0) {
|
||||||
|
return res.json({ timestamps: [], rx: [], tx: [] });
|
||||||
|
}
|
||||||
|
|
||||||
|
const merged = prometheusService.mergeNetworkHistories(validHistories);
|
||||||
|
await cache.set(cacheKey, merged, 300); // Cache for 5 minutes
|
||||||
|
res.json(merged);
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error('Error fetching network history from DB:', err);
|
console.error('Error fetching network history history:', err);
|
||||||
res.status(500).json({ error: 'Failed to fetch network history' });
|
res.status(500).json({ error: 'Failed to fetch network history history' });
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
// Get CPU usage history for sparklines
|
// Get CPU usage history for sparklines
|
||||||
app.get('/api/metrics/cpu-history', async (req, res) => {
|
app.get('/api/metrics/cpu-history', async (req, res) => {
|
||||||
try {
|
try {
|
||||||
const [sources] = await db.query('SELECT * FROM prometheus_sources');
|
const [sources] = await db.query('SELECT * FROM prometheus_sources WHERE is_server_source = 1 AND type != "blackbox"');
|
||||||
if (sources.length === 0) {
|
if (sources.length === 0) {
|
||||||
return res.json({ timestamps: [], values: [] });
|
return res.json({ timestamps: [], values: [] });
|
||||||
}
|
}
|
||||||
@@ -601,6 +805,51 @@ app.get('/api/metrics/cpu-history', async (req, res) => {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Get detailed metrics for a specific server
|
||||||
|
app.get('/api/metrics/server-details', async (req, res) => {
|
||||||
|
const { instance, job, source } = req.query;
|
||||||
|
|
||||||
|
if (!instance || !job || !source) {
|
||||||
|
return res.status(400).json({ error: 'instance, job, and source name are required' });
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Find the source URL by name
|
||||||
|
const [rows] = await db.query('SELECT url FROM prometheus_sources WHERE name = ?', [source]);
|
||||||
|
if (rows.length === 0) {
|
||||||
|
return res.status(404).json({ error: 'Prometheus source not found' });
|
||||||
|
}
|
||||||
|
const sourceUrl = rows[0].url;
|
||||||
|
|
||||||
|
// Fetch detailed metrics
|
||||||
|
const details = await prometheusService.getServerDetails(sourceUrl, instance, job);
|
||||||
|
res.json(details);
|
||||||
|
} catch (err) {
|
||||||
|
console.error(`Error fetching server details for ${instance}:`, err.message);
|
||||||
|
res.status(500).json({ error: 'Failed to fetch server details' });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get historical metrics for a specific server
|
||||||
|
app.get('/api/metrics/server-history', async (req, res) => {
|
||||||
|
const { instance, job, source, metric, range, start, end } = req.query;
|
||||||
|
|
||||||
|
if (!instance || !job || !source || !metric) {
|
||||||
|
return res.status(400).json({ error: 'instance, job, source, and metric are required' });
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const [rows] = await db.query('SELECT url FROM prometheus_sources WHERE name = ?', [source]);
|
||||||
|
if (rows.length === 0) return res.status(404).json({ error: 'Source not found' });
|
||||||
|
const sourceUrl = rows[0].url;
|
||||||
|
|
||||||
|
const data = await prometheusService.getServerHistory(sourceUrl, instance, job, metric, range, start, end);
|
||||||
|
res.json(data);
|
||||||
|
} catch (err) {
|
||||||
|
res.status(500).json({ error: err.message });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
// SPA fallback
|
// SPA fallback
|
||||||
app.get('*', (req, res, next) => {
|
app.get('*', (req, res, next) => {
|
||||||
if (req.path.startsWith('/api/') || req.path.includes('.')) return next();
|
if (req.path.startsWith('/api/') || req.path.includes('.')) return next();
|
||||||
@@ -608,68 +857,125 @@ app.get('*', (req, res, next) => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
async function recordTrafficStats() {
|
// ==================== Latency Routes CRUD ====================
|
||||||
if (!isDbInitialized) return;
|
|
||||||
try {
|
|
||||||
const [sources] = await db.query('SELECT * FROM prometheus_sources');
|
|
||||||
if (sources.length === 0) return;
|
|
||||||
|
|
||||||
let totalRxBytes = 0;
|
app.get('/api/latency-routes', async (req, res) => {
|
||||||
let totalTxBytes = 0;
|
|
||||||
let totalRxBandwidth = 0;
|
|
||||||
let totalTxBandwidth = 0;
|
|
||||||
|
|
||||||
const results = await Promise.all(sources.map(async source => {
|
|
||||||
try {
|
try {
|
||||||
const [rxBytesRes, txBytesRes, rxBWRes, txBWRes] = await Promise.all([
|
const [rows] = await db.query(`
|
||||||
prometheusService.query(source.url, 'sum(node_network_receive_bytes_total{device!~"lo|veth.*|docker.*|br-.*"})'),
|
SELECT r.*, s.name as source_name
|
||||||
prometheusService.query(source.url, 'sum(node_network_transmit_bytes_total{device!~"lo|veth.*|docker.*|br-.*"})'),
|
FROM latency_routes r
|
||||||
prometheusService.query(source.url, 'sum(rate(node_network_receive_bytes_total{device!~"lo|veth.*|docker.*|br-.*"}[1m]))'),
|
LEFT JOIN prometheus_sources s ON r.source_id = s.id
|
||||||
prometheusService.query(source.url, 'sum(rate(node_network_transmit_bytes_total{device!~"lo|veth.*|docker.*|br-.*"}[1m]))')
|
ORDER BY r.created_at DESC
|
||||||
]);
|
`);
|
||||||
|
res.json(rows);
|
||||||
|
} catch (err) {
|
||||||
|
res.status(500).json({ error: 'Failed to fetch latency routes' });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
app.post('/api/latency-routes', requireAuth, async (req, res) => {
|
||||||
|
const { source_id, latency_source, latency_dest, latency_target } = req.body;
|
||||||
|
try {
|
||||||
|
await db.query('INSERT INTO latency_routes (source_id, latency_source, latency_dest, latency_target) VALUES (?, ?, ?, ?)', [source_id, latency_source, latency_dest, latency_target]);
|
||||||
|
res.json({ success: true });
|
||||||
|
} catch (err) {
|
||||||
|
res.status(500).json({ error: 'Failed to add latency route' });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
app.delete('/api/latency-routes/:id', requireAuth, async (req, res) => {
|
||||||
|
try {
|
||||||
|
await db.query('DELETE FROM latency_routes WHERE id = ?', [req.params.id]);
|
||||||
|
res.json({ success: true });
|
||||||
|
} catch (err) {
|
||||||
|
res.status(500).json({ error: 'Failed to delete latency route' });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
app.put('/api/latency-routes/:id', requireAuth, async (req, res) => {
|
||||||
|
const { source_id, latency_source, latency_dest, latency_target } = req.body;
|
||||||
|
try {
|
||||||
|
await db.query(
|
||||||
|
'UPDATE latency_routes SET source_id = ?, latency_source = ?, latency_dest = ?, latency_target = ? WHERE id = ?',
|
||||||
|
[source_id, latency_source, latency_dest, latency_target, req.params.id]
|
||||||
|
);
|
||||||
|
res.json({ success: true });
|
||||||
|
} catch (err) {
|
||||||
|
res.status(500).json({ error: 'Failed to update latency route' });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// ==================== Metrics Latency ====================
|
||||||
|
|
||||||
|
app.get('/api/metrics/latency', async (req, res) => {
|
||||||
|
try {
|
||||||
|
const [routes] = await db.query(`
|
||||||
|
SELECT r.*, s.url, s.type as source_type
|
||||||
|
FROM latency_routes r
|
||||||
|
JOIN prometheus_sources s ON r.source_id = s.id
|
||||||
|
`);
|
||||||
|
|
||||||
|
if (routes.length === 0) {
|
||||||
|
return res.json({ routes: [] });
|
||||||
|
}
|
||||||
|
|
||||||
|
const results = await Promise.all(routes.map(async (route) => {
|
||||||
|
// Try to get from Valkey first (filled by background latencyService)
|
||||||
|
let latency = await cache.get(`latency:route:${route.id}`);
|
||||||
|
|
||||||
|
// Fallback if not in cache (only for prometheus sources, blackbox sources rely on the background service)
|
||||||
|
if (latency === null && route.source_type === 'prometheus') {
|
||||||
|
latency = await prometheusService.getLatency(route.url, route.latency_target);
|
||||||
|
}
|
||||||
|
|
||||||
return {
|
return {
|
||||||
rxBytes: (rxBytesRes.length > 0) ? parseFloat(rxBytesRes[0].value[1]) : 0,
|
id: route.id,
|
||||||
txBytes: (txBytesRes.length > 0) ? parseFloat(txBytesRes[0].value[1]) : 0,
|
source: route.latency_source,
|
||||||
rxBW: (rxBWRes.length > 0) ? parseFloat(rxBWRes[0].value[1]) : 0,
|
dest: route.latency_dest,
|
||||||
txBW: (txBWRes.length > 0) ? parseFloat(txBWRes[0].value[1]) : 0
|
latency: latency
|
||||||
};
|
};
|
||||||
} catch (e) {
|
|
||||||
return { rxBytes: 0, txBytes: 0, rxBW: 0, txBW: 0 };
|
|
||||||
}
|
|
||||||
}));
|
}));
|
||||||
|
|
||||||
for (const r of results) {
|
res.json({ routes: results });
|
||||||
totalRxBytes += r.rxBytes;
|
|
||||||
totalTxBytes += r.txBytes;
|
|
||||||
totalRxBandwidth += r.rxBW;
|
|
||||||
totalTxBandwidth += r.txBW;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Always insert a record if we have sources, so the timeline advances
|
|
||||||
// Even if traffic is 0, we want to see 0 on the chart
|
|
||||||
await db.query('INSERT INTO traffic_stats (rx_bytes, tx_bytes, rx_bandwidth, tx_bandwidth) VALUES (?, ?, ?, ?)', [
|
|
||||||
Math.round(totalRxBytes),
|
|
||||||
Math.round(totalTxBytes),
|
|
||||||
totalRxBandwidth,
|
|
||||||
totalTxBandwidth
|
|
||||||
]);
|
|
||||||
console.log(`[Traffic Recorder] Saved stats: BW_RX=${totalRxBandwidth.toFixed(2)}, BW_TX=${totalTxBandwidth.toFixed(2)}`);
|
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error('[Traffic Recorder] Error recording stats:', err);
|
console.error('Error fetching latencies:', err);
|
||||||
|
res.status(500).json({ error: 'Failed to fetch latency' });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// ==================== WebSocket Server ====================
|
||||||
|
|
||||||
|
const server = http.createServer(app);
|
||||||
|
const wss = new WebSocket.Server({ server });
|
||||||
|
|
||||||
|
function broadcast(data) {
|
||||||
|
const message = JSON.stringify(data);
|
||||||
|
wss.clients.forEach(client => {
|
||||||
|
if (client.readyState === WebSocket.OPEN) {
|
||||||
|
client.send(message);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Broadcast loop
|
||||||
|
async function broadcastMetrics() {
|
||||||
|
try {
|
||||||
|
const overview = await getOverview();
|
||||||
|
broadcast({ type: 'overview', data: overview });
|
||||||
|
} catch (err) {
|
||||||
|
// console.error('WS Broadcast error:', err.message);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check and fix database integrity on startup
|
// Start services
|
||||||
checkAndFixDatabase();
|
checkAndFixDatabase();
|
||||||
|
latencyService.start();
|
||||||
|
|
||||||
// Record traffic every 5 seconds (17,280 points/day)
|
const REFRESH_INT = parseInt(process.env.REFRESH_INTERVAL) || 5000;
|
||||||
setInterval(recordTrafficStats, 5 * 1000);
|
setInterval(broadcastMetrics, REFRESH_INT);
|
||||||
// Initial record after a short delay
|
|
||||||
setTimeout(recordTrafficStats, 10000);
|
|
||||||
|
|
||||||
app.listen(PORT, HOST, () => {
|
server.listen(PORT, HOST, () => {
|
||||||
console.log(`\n 🚀 Data Visualization Display Wall`);
|
console.log(`\n 🚀 Data Visualization Display Wall (WebSocket Enabled)`);
|
||||||
console.log(` 📊 Server running at http://${HOST === '0.0.0.0' ? 'localhost' : HOST}:${PORT}`);
|
console.log(` 📊 Server running at http://${HOST === '0.0.0.0' ? 'localhost' : HOST}:${PORT}`);
|
||||||
console.log(` ⚙️ Configure Prometheus sources at http://${HOST === '0.0.0.0' ? 'localhost' : HOST}:${PORT}/settings\n`);
|
console.log(` ⚙️ Configure Prometheus sources at http://${HOST === '0.0.0.0' ? 'localhost' : HOST}:${PORT}/settings\n`);
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -67,6 +67,22 @@ async function initDatabase() {
|
|||||||
`);
|
`);
|
||||||
console.log(' ✅ Table "site_settings" ready');
|
console.log(' ✅ Table "site_settings" ready');
|
||||||
|
|
||||||
|
// Create server_locations table
|
||||||
|
await connection.query(`
|
||||||
|
CREATE TABLE IF NOT EXISTS server_locations (
|
||||||
|
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||||
|
ip VARCHAR(255) NOT NULL UNIQUE,
|
||||||
|
country CHAR(2),
|
||||||
|
country_name VARCHAR(100),
|
||||||
|
region VARCHAR(100),
|
||||||
|
city VARCHAR(100),
|
||||||
|
latitude DOUBLE,
|
||||||
|
longitude DOUBLE,
|
||||||
|
last_updated TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP
|
||||||
|
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci
|
||||||
|
`);
|
||||||
|
console.log(' ✅ Table "server_locations" ready');
|
||||||
|
|
||||||
console.log('\n🎉 Database initialization complete!\n');
|
console.log('\n🎉 Database initialization complete!\n');
|
||||||
await connection.end();
|
await connection.end();
|
||||||
}
|
}
|
||||||
|
|||||||
134
server/latency-service.js
Normal file
134
server/latency-service.js
Normal file
@@ -0,0 +1,134 @@
|
|||||||
|
const axios = require('axios');
|
||||||
|
const cache = require('./cache');
|
||||||
|
const db = require('./db');
|
||||||
|
|
||||||
|
const POLL_INTERVAL = 10000; // 10 seconds
|
||||||
|
|
||||||
|
async function pollLatency() {
|
||||||
|
try {
|
||||||
|
const [routes] = await db.query(`
|
||||||
|
SELECT r.*, s.url
|
||||||
|
FROM latency_routes r
|
||||||
|
JOIN prometheus_sources s ON r.source_id = s.id
|
||||||
|
WHERE s.type = 'blackbox'
|
||||||
|
`);
|
||||||
|
|
||||||
|
if (routes.length === 0) return;
|
||||||
|
|
||||||
|
// Poll each route
|
||||||
|
await Promise.allSettled(routes.map(async (route) => {
|
||||||
|
try {
|
||||||
|
// Blackbox exporter probe URL
|
||||||
|
// We assume ICMP module for now. If target is a URL, maybe use http_2xx
|
||||||
|
let module = 'icmp';
|
||||||
|
let target = route.latency_target;
|
||||||
|
|
||||||
|
if (target.startsWith('http://') || target.startsWith('https://')) {
|
||||||
|
module = 'http_2xx';
|
||||||
|
}
|
||||||
|
|
||||||
|
const probeUrl = `${route.url.replace(/\/+$/, '')}/probe?module=${module}&target=${encodeURIComponent(target)}`;
|
||||||
|
|
||||||
|
const startTime = Date.now();
|
||||||
|
const response = await axios.get(probeUrl, {
|
||||||
|
timeout: 5000,
|
||||||
|
responseType: 'text',
|
||||||
|
validateStatus: false
|
||||||
|
});
|
||||||
|
|
||||||
|
if (typeof response.data !== 'string') {
|
||||||
|
throw new Error('Response data is not a string');
|
||||||
|
}
|
||||||
|
|
||||||
|
const lines = response.data.split('\n').map(l => l.trim()).filter(l => l && !l.startsWith('#'));
|
||||||
|
|
||||||
|
// 1. Check if the probe was successful
|
||||||
|
let isProbeSuccess = false;
|
||||||
|
for (const line of lines) {
|
||||||
|
if (/^probe_success(\{.*\})?\s+1/.test(line)) {
|
||||||
|
isProbeSuccess = true;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 2. Extract latency from priority metrics
|
||||||
|
const targetMetrics = [
|
||||||
|
'probe_icmp_duration_seconds',
|
||||||
|
'probe_http_duration_seconds',
|
||||||
|
'probe_duration_seconds'
|
||||||
|
];
|
||||||
|
|
||||||
|
let foundLatency = null;
|
||||||
|
for (const metricName of targetMetrics) {
|
||||||
|
let bestLine = null;
|
||||||
|
|
||||||
|
// First pass: look for phase="rtt" which is the most accurate "ping"
|
||||||
|
for (const line of lines) {
|
||||||
|
if (line.startsWith(metricName) && line.includes('phase="rtt"')) {
|
||||||
|
bestLine = line;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Second pass: if no rtt phase, look for a line without phases (legacy format) or just the first line
|
||||||
|
if (!bestLine) {
|
||||||
|
for (const line of lines) {
|
||||||
|
if (line.startsWith(metricName)) {
|
||||||
|
// Prefer lines without {} if possible, otherwise take the first one
|
||||||
|
if (!line.includes('{')) {
|
||||||
|
bestLine = line;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
if (!bestLine) bestLine = line;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (bestLine) {
|
||||||
|
// Regex to capture the number, including scientific notation
|
||||||
|
const regex = new RegExp(`^${metricName}(?:\\{[^}]*\\})?\\s+([\\d.eE+-]+)`);
|
||||||
|
const match = bestLine.match(regex);
|
||||||
|
|
||||||
|
if (match) {
|
||||||
|
const val = parseFloat(match[1]);
|
||||||
|
if (!isNaN(val)) {
|
||||||
|
foundLatency = val * 1000; // convert to ms
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 3. Final decision
|
||||||
|
// If it's a success, use found latency. If success=0 or missing, handle carefully.
|
||||||
|
let latency;
|
||||||
|
if (isProbeSuccess && foundLatency !== null) {
|
||||||
|
latency = foundLatency;
|
||||||
|
} else {
|
||||||
|
// If probe failed or metrics missing, do not show 0, show null (Measurement in progress/Error)
|
||||||
|
latency = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Save to Valkey
|
||||||
|
await cache.set(`latency:route:${route.id}`, latency, 60);
|
||||||
|
} catch (err) {
|
||||||
|
await cache.set(`latency:route:${route.id}`, null, 60);
|
||||||
|
}
|
||||||
|
}));
|
||||||
|
} catch (err) {
|
||||||
|
console.error('[Latency] Service error:', err.message);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let intervalId = null;
|
||||||
|
|
||||||
|
function start() {
|
||||||
|
if (intervalId) clearInterval(intervalId);
|
||||||
|
pollLatency(); // initial run
|
||||||
|
intervalId = setInterval(pollLatency, POLL_INTERVAL);
|
||||||
|
console.log('[Latency] Background service started (polling Blackbox Exporter directly)');
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
start
|
||||||
|
};
|
||||||
@@ -5,9 +5,21 @@ const https = require('https');
|
|||||||
const QUERY_TIMEOUT = 10000;
|
const QUERY_TIMEOUT = 10000;
|
||||||
|
|
||||||
// Reusable agents to handle potential redirect issues and protocol mismatches
|
// Reusable agents to handle potential redirect issues and protocol mismatches
|
||||||
|
const crypto = require('crypto');
|
||||||
const httpAgent = new http.Agent({ keepAlive: true });
|
const httpAgent = new http.Agent({ keepAlive: true });
|
||||||
const httpsAgent = new https.Agent({ keepAlive: true, rejectUnauthorized: false });
|
const httpsAgent = new https.Agent({ keepAlive: true, rejectUnauthorized: false });
|
||||||
|
|
||||||
|
const serverIdMap = new Map(); // token -> { instance, job, source }
|
||||||
|
const SECRET = process.env.APP_SECRET || 'prom-data-panel-stable-secret-key-123';
|
||||||
|
|
||||||
|
function getServerToken(instance, job, source) {
|
||||||
|
const hash = crypto.createHmac('sha256', SECRET)
|
||||||
|
.update(`${instance}:${job}:${source}`)
|
||||||
|
.digest('hex')
|
||||||
|
.substring(0, 16);
|
||||||
|
return hash;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Normalize URL and ensure protocol
|
* Normalize URL and ensure protocol
|
||||||
*/
|
*/
|
||||||
@@ -98,6 +110,38 @@ async function query(baseUrl, expr) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get all targets from Prometheus
|
||||||
|
*/
|
||||||
|
async function getTargets(baseUrl) {
|
||||||
|
const url = normalizeUrl(baseUrl);
|
||||||
|
try {
|
||||||
|
const controller = new AbortController();
|
||||||
|
const timer = setTimeout(() => controller.abort(), QUERY_TIMEOUT);
|
||||||
|
|
||||||
|
const res = await fetch(`${url}/api/v1/targets`, {
|
||||||
|
signal: controller.signal
|
||||||
|
});
|
||||||
|
|
||||||
|
clearTimeout(timer);
|
||||||
|
|
||||||
|
if (!res.ok) {
|
||||||
|
throw new Error(`Prometheus returned HTTP ${res.status}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const data = await res.json();
|
||||||
|
if (data.status !== 'success') {
|
||||||
|
throw new Error(`Prometheus targets fetch failed: ${data.error || 'unknown error'}`);
|
||||||
|
}
|
||||||
|
return data.data.activeTargets || [];
|
||||||
|
} catch (err) {
|
||||||
|
if (err.name === 'AbortError') {
|
||||||
|
throw new Error('Prometheus targets fetch timed out');
|
||||||
|
}
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Execute a Prometheus range query
|
* Execute a Prometheus range query
|
||||||
*/
|
*/
|
||||||
@@ -133,9 +177,6 @@ async function queryRange(baseUrl, expr, start, end, step) {
|
|||||||
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get overview metrics from a single Prometheus source
|
|
||||||
*/
|
|
||||||
async function getOverviewMetrics(url, sourceName) {
|
async function getOverviewMetrics(url, sourceName) {
|
||||||
// Run all queries in parallel
|
// Run all queries in parallel
|
||||||
const [
|
const [
|
||||||
@@ -147,9 +188,7 @@ async function getOverviewMetrics(url, sourceName) {
|
|||||||
diskFreeResult,
|
diskFreeResult,
|
||||||
netRxResult,
|
netRxResult,
|
||||||
netTxResult,
|
netTxResult,
|
||||||
traffic24hRxResult,
|
targetsResult
|
||||||
traffic24hTxResult,
|
|
||||||
upResult
|
|
||||||
] = await Promise.all([
|
] = await Promise.all([
|
||||||
// CPU usage per instance: 1 - avg idle
|
// CPU usage per instance: 1 - avg idle
|
||||||
query(url, '100 - (avg by (instance, job) (rate(node_cpu_seconds_total{mode="idle"}[1m])) * 100)').catch(() => []),
|
query(url, '100 - (avg by (instance, job) (rate(node_cpu_seconds_total{mode="idle"}[1m])) * 100)').catch(() => []),
|
||||||
@@ -159,31 +198,36 @@ async function getOverviewMetrics(url, sourceName) {
|
|||||||
query(url, 'node_memory_MemTotal_bytes').catch(() => []),
|
query(url, 'node_memory_MemTotal_bytes').catch(() => []),
|
||||||
// Memory available per instance
|
// Memory available per instance
|
||||||
query(url, 'node_memory_MemAvailable_bytes').catch(() => []),
|
query(url, 'node_memory_MemAvailable_bytes').catch(() => []),
|
||||||
// Disk total per instance (root filesystem + /data)
|
// Disk total per instance (excluding virtual fs and FUSE/rclone mounts)
|
||||||
query(url, 'sum by (instance, job) (node_filesystem_size_bytes{mountpoint=~"/|/data",fstype!="tmpfs"})').catch(() => []),
|
query(url, 'sum by (instance, job) (node_filesystem_size_bytes{fstype!~"tmpfs|autofs|proc|sysfs|fuse.*", mountpoint!~"/tmp.*|/var/lib/docker/.*|/run/.*"})').catch(() => []),
|
||||||
// Disk free per instance (root filesystem + /data)
|
// Disk free per instance
|
||||||
query(url, 'sum by (instance, job) (node_filesystem_free_bytes{mountpoint=~"/|/data",fstype!="tmpfs"})').catch(() => []),
|
query(url, 'sum by (instance, job) (node_filesystem_free_bytes{fstype!~"tmpfs|autofs|proc|sysfs|fuse.*", mountpoint!~"/tmp.*|/var/lib/docker/.*|/run/.*"})').catch(() => []),
|
||||||
// Network receive rate (bytes/sec)
|
// Network receive rate (bytes/sec)
|
||||||
query(url, 'sum by (instance, job) (rate(node_network_receive_bytes_total{device!~"lo|veth.*|docker.*|br-.*"}[1m]))').catch(() => []),
|
query(url, 'sum by (instance, job) (rate(node_network_receive_bytes_total{device!~"lo|veth.*|docker.*|br-.*"}[1m]))').catch(() => []),
|
||||||
// Network transmit rate (bytes/sec)
|
// Network transmit rate (bytes/sec)
|
||||||
query(url, 'sum by (instance, job) (rate(node_network_transmit_bytes_total{device!~"lo|veth.*|docker.*|br-.*"}[1m]))').catch(() => []),
|
query(url, 'sum by (instance, job) (rate(node_network_transmit_bytes_total{device!~"lo|veth.*|docker.*|br-.*"}[1m]))').catch(() => []),
|
||||||
// Total traffic received in last 24h
|
// Targets status from /api/v1/targets
|
||||||
query(url, 'sum by (instance, job) (increase(node_network_receive_bytes_total{device!~"lo|veth.*|docker.*|br-.*"}[24h]))').catch(() => []),
|
getTargets(url).catch(() => [])
|
||||||
// Total traffic transmitted in last 24h
|
|
||||||
query(url, 'sum by (instance, job) (increase(node_network_transmit_bytes_total{device!~"lo|veth.*|docker.*|br-.*"}[24h]))').catch(() => []),
|
|
||||||
// Up instances (at least one successful scrape in last 5m)
|
|
||||||
// We broaden the job filter to catch more variations of node-exporter jobs
|
|
||||||
query(url, 'max_over_time(up{job=~".*node.*|.*exporter.*|.*host.*"}[5m])').catch(() => [])
|
|
||||||
]);
|
]);
|
||||||
|
|
||||||
|
// Fetch 24h detailed traffic using the A*duration logic
|
||||||
|
const traffic24hSum = await get24hTrafficSum(url).catch(() => ({ rx: 0, tx: 0 }));
|
||||||
|
|
||||||
// Build per-instance data map
|
// Build per-instance data map
|
||||||
const instances = new Map();
|
const instances = new Map();
|
||||||
|
|
||||||
const getOrCreate = (metric) => {
|
const getOrCreate = (metric) => {
|
||||||
const key = metric.instance;
|
const originalInstance = metric.instance || 'Unknown';
|
||||||
if (!instances.has(key)) {
|
const job = metric.job || 'Unknown';
|
||||||
instances.set(key, {
|
const token = getServerToken(originalInstance, job, sourceName);
|
||||||
instance: key,
|
|
||||||
|
// Store mapping for detail queries
|
||||||
|
serverIdMap.set(token, { instance: originalInstance, source: sourceName, job });
|
||||||
|
|
||||||
|
if (!instances.has(token)) {
|
||||||
|
instances.set(token, {
|
||||||
|
instance: token, // This is the masked IP SENT TO FRONTEND
|
||||||
|
originalInstance, // Keep internal for aggregation/parsing
|
||||||
job: metric.job || 'Unknown',
|
job: metric.job || 'Unknown',
|
||||||
source: sourceName,
|
source: sourceName,
|
||||||
cpuPercent: 0,
|
cpuPercent: 0,
|
||||||
@@ -194,10 +238,12 @@ async function getOverviewMetrics(url, sourceName) {
|
|||||||
diskUsed: 0,
|
diskUsed: 0,
|
||||||
netRx: 0,
|
netRx: 0,
|
||||||
netTx: 0,
|
netTx: 0,
|
||||||
up: false
|
up: false,
|
||||||
|
memPercent: 0,
|
||||||
|
diskPercent: 0
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
const inst = instances.get(key);
|
const inst = instances.get(token);
|
||||||
// If job was Unknown but we now have a job name, update it
|
// If job was Unknown but we now have a job name, update it
|
||||||
if (inst.job === 'Unknown' && metric.job) {
|
if (inst.job === 'Unknown' && metric.job) {
|
||||||
inst.job = metric.job;
|
inst.job = metric.job;
|
||||||
@@ -205,10 +251,17 @@ async function getOverviewMetrics(url, sourceName) {
|
|||||||
return inst;
|
return inst;
|
||||||
};
|
};
|
||||||
|
|
||||||
// Parse UP status
|
// Initialize instances from targets first (to ensure we have all servers even if they have no metrics)
|
||||||
for (const r of upResult) {
|
for (const target of targetsResult) {
|
||||||
const inst = getOrCreate(r.metric);
|
const labels = target.labels || {};
|
||||||
inst.up = parseFloat(r.value[1]) === 1;
|
const instance = labels.instance;
|
||||||
|
const job = labels.job || '';
|
||||||
|
|
||||||
|
// Include every target from the activeTargets list
|
||||||
|
if (instance) {
|
||||||
|
const inst = getOrCreate(labels);
|
||||||
|
inst.up = target.health === 'up';
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Parse CPU usage
|
// Parse CPU usage
|
||||||
@@ -253,14 +306,18 @@ async function getOverviewMetrics(url, sourceName) {
|
|||||||
inst.netTx = parseFloat(r.value[1]) || 0;
|
inst.netTx = parseFloat(r.value[1]) || 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Final check: If an instance has non-zero CPU or Memory total data but is marked offline,
|
|
||||||
// it means we missed its 'up' metric due to job labels, but it's clearly sending data.
|
|
||||||
for (const inst of instances.values()) {
|
for (const inst of instances.values()) {
|
||||||
if (!inst.up && (inst.cpuPercent > 0 || inst.memTotal > 0)) {
|
if (!inst.up && (inst.cpuPercent > 0 || inst.memTotal > 0)) {
|
||||||
inst.up = true;
|
inst.up = true;
|
||||||
}
|
}
|
||||||
|
// Calculate percentages on backend
|
||||||
|
inst.memPercent = inst.memTotal > 0 ? (inst.memUsed / inst.memTotal * 100) : 0;
|
||||||
|
inst.diskPercent = inst.diskTotal > 0 ? (inst.diskUsed / inst.diskTotal * 100) : 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const allInstancesList = Array.from(instances.values());
|
||||||
|
const activeInstances = allInstancesList.filter(inst => inst.up);
|
||||||
|
|
||||||
// Aggregate
|
// Aggregate
|
||||||
let totalCpuUsed = 0, totalCpuCores = 0;
|
let totalCpuUsed = 0, totalCpuCores = 0;
|
||||||
let totalMemUsed = 0, totalMemTotal = 0;
|
let totalMemUsed = 0, totalMemTotal = 0;
|
||||||
@@ -268,7 +325,7 @@ async function getOverviewMetrics(url, sourceName) {
|
|||||||
let totalNetRx = 0, totalNetTx = 0;
|
let totalNetRx = 0, totalNetTx = 0;
|
||||||
let totalTraffic24hRx = 0, totalTraffic24hTx = 0;
|
let totalTraffic24hRx = 0, totalTraffic24hTx = 0;
|
||||||
|
|
||||||
for (const inst of instances.values()) {
|
for (const inst of activeInstances) {
|
||||||
totalCpuUsed += (inst.cpuPercent / 100) * inst.cpuCores;
|
totalCpuUsed += (inst.cpuPercent / 100) * inst.cpuCores;
|
||||||
totalCpuCores += inst.cpuCores;
|
totalCpuCores += inst.cpuCores;
|
||||||
totalMemUsed += inst.memUsed;
|
totalMemUsed += inst.memUsed;
|
||||||
@@ -279,16 +336,13 @@ async function getOverviewMetrics(url, sourceName) {
|
|||||||
totalNetTx += inst.netTx;
|
totalNetTx += inst.netTx;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Parse 24h traffic
|
// Use the pre-calculated 24h traffic
|
||||||
for (const r of traffic24hRxResult) {
|
totalTraffic24hRx = traffic24hSum.rx;
|
||||||
totalTraffic24hRx += parseFloat(r.value[1]) || 0;
|
totalTraffic24hTx = traffic24hSum.tx;
|
||||||
}
|
|
||||||
for (const r of traffic24hTxResult) {
|
|
||||||
totalTraffic24hTx += parseFloat(r.value[1]) || 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
totalServers: instances.size,
|
totalServers: allInstancesList.length,
|
||||||
|
activeServers: activeInstances.length,
|
||||||
cpu: {
|
cpu: {
|
||||||
used: totalCpuUsed,
|
used: totalCpuUsed,
|
||||||
total: totalCpuCores,
|
total: totalCpuCores,
|
||||||
@@ -311,19 +365,86 @@ async function getOverviewMetrics(url, sourceName) {
|
|||||||
},
|
},
|
||||||
traffic24h: {
|
traffic24h: {
|
||||||
rx: totalTraffic24hRx,
|
rx: totalTraffic24hRx,
|
||||||
tx: totalTraffic24hTx
|
tx: totalTraffic24hTx,
|
||||||
|
total: totalTraffic24hRx + totalTraffic24hTx
|
||||||
},
|
},
|
||||||
servers: Array.from(instances.values())
|
servers: allInstancesList
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Calculate total traffic from bandwidth data points using the A*duration logic
|
||||||
|
*/
|
||||||
|
function calculateTrafficFromHistory(values) {
|
||||||
|
if (!values || values.length < 2) return 0;
|
||||||
|
|
||||||
|
let totalBytes = 0;
|
||||||
|
for (let i = 0; i < values.length - 1; i++) {
|
||||||
|
const [tsA, valA] = values[i];
|
||||||
|
const [tsB] = values[i+1];
|
||||||
|
const duration = tsB - tsA;
|
||||||
|
totalBytes += parseFloat(valA) * duration;
|
||||||
|
}
|
||||||
|
return totalBytes;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get total traffic for the past 24h by fetching all points and integrating
|
||||||
|
*/
|
||||||
|
async function get24hTrafficSum(url) {
|
||||||
|
const now = Math.floor(Date.now() / 1000);
|
||||||
|
const start = now - 86400;
|
||||||
|
const step = 60; // 1-minute points for calculation
|
||||||
|
|
||||||
|
const [rxResult, txResult] = await Promise.all([
|
||||||
|
queryRange(url, 'sum(rate(node_network_receive_bytes_total{device!~"lo|veth.*|docker.*|br-.*"}[1m]))', start, now, step).catch(() => []),
|
||||||
|
queryRange(url, 'sum(rate(node_network_transmit_bytes_total{device!~"lo|veth.*|docker.*|br-.*"}[1m]))', start, now, step).catch(() => [])
|
||||||
|
]);
|
||||||
|
|
||||||
|
const rxValues = rxResult.length > 0 ? rxResult[0].values : [];
|
||||||
|
const txValues = txResult.length > 0 ? txResult[0].values : [];
|
||||||
|
|
||||||
|
return {
|
||||||
|
rx: calculateTrafficFromHistory(rxValues),
|
||||||
|
tx: calculateTrafficFromHistory(txValues)
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get network traffic history (past 24h, 15-min intervals)
|
* Get total traffic for a specific server in the past 24h
|
||||||
|
*/
|
||||||
|
async function get24hServerTrafficSum(url, instance, job) {
|
||||||
|
const node = resolveToken(instance);
|
||||||
|
const now = Math.floor(Date.now() / 1000);
|
||||||
|
const start = now - 86400;
|
||||||
|
const step = 60;
|
||||||
|
|
||||||
|
const rxExpr = `sum(rate(node_network_receive_bytes_total{instance="${node}",job="${job}",device!~'tap.*|veth.*|br.*|docker.*|virbr*|podman.*|lo.*|vmbr.*|fwbr.|ip.*|gre.*|virbr.*|vnet.*'}[1m]))`;
|
||||||
|
const txExpr = `sum(rate(node_network_transmit_bytes_total{instance="${node}",job="${job}",device!~'tap.*|veth.*|br.*|docker.*|virbr*|podman.*|lo.*|vmbr.*|fwbr.|ip.*|gre.*|virbr.*|vnet.*'}[1m]))`;
|
||||||
|
|
||||||
|
const [rxResult, txResult] = await Promise.all([
|
||||||
|
queryRange(url, rxExpr, start, now, step).catch(() => []),
|
||||||
|
queryRange(url, txExpr, start, now, step).catch(() => [])
|
||||||
|
]);
|
||||||
|
|
||||||
|
const rxValues = rxResult.length > 0 ? rxResult[0].values : [];
|
||||||
|
const txValues = txResult.length > 0 ? txResult[0].values : [];
|
||||||
|
|
||||||
|
return {
|
||||||
|
rx: calculateTrafficFromHistory(rxValues),
|
||||||
|
tx: calculateTrafficFromHistory(txValues)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get network traffic history (past 24h, 5-min intervals for chart)
|
||||||
*/
|
*/
|
||||||
async function getNetworkHistory(url) {
|
async function getNetworkHistory(url) {
|
||||||
const now = Math.floor(Date.now() / 1000);
|
const now = Math.floor(Date.now() / 1000);
|
||||||
const start = now - 86400; // 24h ago
|
const start = now - 86400; // 24h ago
|
||||||
const step = 900; // 15 minutes
|
const step = 300; // 5 minutes for better resolution on chart
|
||||||
|
|
||||||
const [rxResult, txResult] = await Promise.all([
|
const [rxResult, txResult] = await Promise.all([
|
||||||
queryRange(url,
|
queryRange(url,
|
||||||
@@ -412,13 +533,275 @@ function mergeCpuHistories(histories) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
function resolveToken(token) {
|
||||||
|
if (serverIdMap.has(token)) {
|
||||||
|
return serverIdMap.get(token).instance;
|
||||||
|
}
|
||||||
|
return token;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get detailed metrics for a specific server (node)
|
||||||
|
*/
|
||||||
|
async function getServerDetails(baseUrl, instance, job) {
|
||||||
|
const url = normalizeUrl(baseUrl);
|
||||||
|
const node = resolveToken(instance);
|
||||||
|
|
||||||
|
// Queries based on the requested dashboard structure
|
||||||
|
const queries = {
|
||||||
|
cpuIowait: `avg(rate(node_cpu_seconds_total{mode="iowait", instance="${node}"}[1m])) * 100`,
|
||||||
|
cpuOther: `avg(rate(node_cpu_seconds_total{mode=~"nice|steal|guest|guest_nice", instance="${node}"}[1m])) * 100`,
|
||||||
|
cpuBusy: `100 * (1 - avg(rate(node_cpu_seconds_total{mode="idle", instance="${node}"}[1m])))`,
|
||||||
|
sysLoad: `node_load1{instance="${node}",job="${job}"} * 100 / count(count(node_cpu_seconds_total{instance="${node}",job="${job}"}) by (cpu))`,
|
||||||
|
memUsedPct: `(1 - (node_memory_MemAvailable_bytes{instance="${node}", job="${job}"} / node_memory_MemTotal_bytes{instance="${node}", job="${job}"})) * 100`,
|
||||||
|
swapUsedPct: `((node_memory_SwapTotal_bytes{instance="${node}",job="${job}"} - node_memory_SwapFree_bytes{instance="${node}",job="${job}"}) / (node_memory_SwapTotal_bytes{instance="${node}",job="${job}"})) * 100`,
|
||||||
|
rootFsUsedPct: `100 - ((node_filesystem_avail_bytes{instance="${node}",job="${job}",mountpoint="/",fstype!~"rootfs|tmpfs"} * 100) / node_filesystem_size_bytes{instance="${node}",job="${job}",mountpoint="/",fstype!~"rootfs|tmpfs"})`,
|
||||||
|
cpuCores: `count(count(node_cpu_seconds_total{instance="${node}",job="${job}"}) by (cpu))`,
|
||||||
|
memTotal: `node_memory_MemTotal_bytes{instance="${node}",job="${job}"}`,
|
||||||
|
uptime: `node_time_seconds{instance="${node}",job="${job}"} - node_boot_time_seconds{instance="${node}",job="${job}"}`,
|
||||||
|
netRx: `sum(rate(node_network_receive_bytes_total{instance="${node}",job="${job}",device!~'tap.*|veth.*|br.*|docker.*|virbr*|podman.*|lo.*|vmbr.*|fwbr.|ip.*|gre.*|virbr.*|vnet.*'}[1m]))`,
|
||||||
|
netTx: `sum(rate(node_network_transmit_bytes_total{instance="${node}",job="${job}",device!~'tap.*|veth.*|br.*|docker.*|virbr*|podman.*|lo.*|vmbr.*|fwbr.|ip.*|gre.*|virbr.*|vnet.*'}[1m]))`,
|
||||||
|
sockstatTcp: `node_sockstat_TCP_inuse{instance="${node}",job="${job}"}`,
|
||||||
|
sockstatTcpMem: `node_sockstat_TCP_mem{instance="${node}",job="${job}"} * 4096`,
|
||||||
|
// Get individual partitions (excluding virtual and FUSE mounts)
|
||||||
|
partitions_size: `node_filesystem_size_bytes{instance="${node}", job="${job}", fstype!~"tmpfs|autofs|proc|sysfs|fuse.*", mountpoint!~"/tmp.*|/var/lib/docker/.*|/run/.*"}`,
|
||||||
|
partitions_free: `node_filesystem_free_bytes{instance="${node}", job="${job}", fstype!~"tmpfs|autofs|proc|sysfs|fuse.*", mountpoint!~"/tmp.*|/var/lib/docker/.*|/run/.*"}`
|
||||||
|
};
|
||||||
|
|
||||||
|
const results = {};
|
||||||
|
const queryPromises = Object.entries(queries).map(async ([key, expr]) => {
|
||||||
|
try {
|
||||||
|
const res = await query(url, expr);
|
||||||
|
if (key.startsWith('partitions_')) {
|
||||||
|
results[key] = res.map(r => ({
|
||||||
|
mountpoint: r.metric.mountpoint,
|
||||||
|
value: parseFloat(r.value[1]) || 0
|
||||||
|
}));
|
||||||
|
} else {
|
||||||
|
results[key] = res.length > 0 ? parseFloat(res[0].value[1]) : 0;
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
console.error(`[Prometheus] Error querying ${key} for ${node}:`, e.message);
|
||||||
|
results[key] = key.startsWith('partitions_') ? [] : 0;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
await Promise.all(queryPromises);
|
||||||
|
|
||||||
|
// Group partitions
|
||||||
|
const partitionsMap = {};
|
||||||
|
(results.partitions_size || []).forEach(p => {
|
||||||
|
partitionsMap[p.mountpoint] = { mountpoint: p.mountpoint, size: p.value, free: 0 };
|
||||||
|
});
|
||||||
|
(results.partitions_free || []).forEach(p => {
|
||||||
|
if (partitionsMap[p.mountpoint]) {
|
||||||
|
partitionsMap[p.mountpoint].free = p.value;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
results.partitions = Object.values(partitionsMap).map(p => ({
|
||||||
|
...p,
|
||||||
|
used: p.size - p.free,
|
||||||
|
percent: p.size > 0 ? ((p.size - p.free) / p.size * 100) : 0
|
||||||
|
})).sort((a, b) => a.mountpoint.localeCompare(b.mountpoint));
|
||||||
|
|
||||||
|
// Calculate total disk size
|
||||||
|
results.totalDiskSize = results.partitions.reduce((sum, p) => sum + (p.size || 0), 0);
|
||||||
|
|
||||||
|
delete results.partitions_size;
|
||||||
|
delete results.partitions_free;
|
||||||
|
|
||||||
|
// Add 24h traffic sum for this specific server
|
||||||
|
try {
|
||||||
|
const traffic24h = await get24hServerTrafficSum(baseUrl, instance, job);
|
||||||
|
results.traffic24h = traffic24h;
|
||||||
|
} catch (e) {
|
||||||
|
console.error(`[Prometheus] Error fetching 24h traffic for ${node}:`, e.message);
|
||||||
|
results.traffic24h = { rx: 0, tx: 0 };
|
||||||
|
}
|
||||||
|
|
||||||
|
return results;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get historical metrics for a specific server (node)
|
||||||
|
*/
|
||||||
|
async function getServerHistory(baseUrl, instance, job, metric, range = '1h', start = null, end = null) {
|
||||||
|
const url = normalizeUrl(baseUrl);
|
||||||
|
const node = resolveToken(instance);
|
||||||
|
|
||||||
|
// CPU Busy history: 100 - idle
|
||||||
|
if (metric === 'cpuBusy') {
|
||||||
|
const expr = `100 * (1 - avg(rate(node_cpu_seconds_total{mode="idle", instance="${node}"}[1m])))`;
|
||||||
|
const rangeObj = parseRange(range, start, end);
|
||||||
|
const result = await queryRange(url, expr, rangeObj.queryStart, rangeObj.queryEnd, rangeObj.step);
|
||||||
|
|
||||||
|
if (!result || result.length === 0) return { timestamps: [], values: [] };
|
||||||
|
|
||||||
|
return {
|
||||||
|
timestamps: result[0].values.map(v => v[0] * 1000),
|
||||||
|
values: result[0].values.map(v => parseFloat(v[1]))
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Map metric keys to Prometheus expressions
|
||||||
|
const metricMap = {
|
||||||
|
sysLoad: `node_load1{instance="${node}",job="${job}"} * 100 / count(count(node_cpu_seconds_total{instance="${node}",job="${job}"}) by (cpu))`,
|
||||||
|
memUsedPct: `(1 - (node_memory_MemAvailable_bytes{instance="${node}", job="${job}"} / node_memory_MemTotal_bytes{instance="${node}", job="${job}"})) * 100`,
|
||||||
|
swapUsedPct: `((node_memory_SwapTotal_bytes{instance="${node}",job="${job}"} - node_memory_SwapFree_bytes{instance="${node}",job="${job}"}) / (node_memory_SwapTotal_bytes{instance="${node}",job="${job}"})) * 100`,
|
||||||
|
rootFsUsedPct: `100 - ((node_filesystem_avail_bytes{instance="${node}",job="${job}",mountpoint="/",fstype!="rootfs"} * 100) / node_filesystem_size_bytes{instance="${node}",job="${job}",mountpoint="/",fstype!="rootfs"})`,
|
||||||
|
netRx: `sum(rate(node_network_receive_bytes_total{instance="${node}",job="${job}",device!~'tap.*|veth.*|br.*|docker.*|virbr*|podman.*|lo.*|vmbr.*|fwbr.|ip.*|gre.*|virbr.*|vnet.*'}[1m]))`,
|
||||||
|
netTx: `sum(rate(node_network_transmit_bytes_total{instance="${node}",job="${job}",device!~'tap.*|veth.*|br.*|docker.*|virbr*|podman.*|lo.*|vmbr.*|fwbr.|ip.*|gre.*|virbr.*|vnet.*'}[1m]))`,
|
||||||
|
sockstatTcp: `node_sockstat_TCP_inuse{instance="${node}",job="${job}"}`,
|
||||||
|
sockstatTcpMem: `node_sockstat_TCP_mem{instance="${node}",job="${job}"} * 4096`
|
||||||
|
};
|
||||||
|
|
||||||
|
const rangeObj = parseRange(range, start, end);
|
||||||
|
|
||||||
|
if (metric === 'networkTrend') {
|
||||||
|
const txExpr = metricMap.netTx;
|
||||||
|
const rxExpr = metricMap.netRx;
|
||||||
|
const [txResult, rxResult] = await Promise.all([
|
||||||
|
queryRange(url, txExpr, rangeObj.queryStart, rangeObj.queryEnd, rangeObj.step),
|
||||||
|
queryRange(url, rxExpr, rangeObj.queryStart, rangeObj.queryEnd, rangeObj.step)
|
||||||
|
]);
|
||||||
|
|
||||||
|
if (txResult.length === 0 && rxResult.length === 0) return { timestamps: [], rx: [], tx: [] };
|
||||||
|
|
||||||
|
const timestamps = (txResult.length > 0 ? txResult[0] : rxResult[0]).values.map(v => v[0] * 1000);
|
||||||
|
const tx = txResult.length > 0 ? txResult[0].values.map(v => parseFloat(v[1])) : new Array(timestamps.length).fill(0);
|
||||||
|
const rx = rxResult.length > 0 ? rxResult[0].values.map(v => parseFloat(v[1])) : new Array(timestamps.length).fill(0);
|
||||||
|
|
||||||
|
// Calculate statistics on backend
|
||||||
|
let rxTotal = 0;
|
||||||
|
let txTotal = 0;
|
||||||
|
for (let i = 0; i < timestamps.length - 1; i++) {
|
||||||
|
const duration = (timestamps[i+1] - timestamps[i]) / 1000;
|
||||||
|
rxTotal += (rx[i] || 0) * duration;
|
||||||
|
txTotal += (tx[i] || 0) * duration;
|
||||||
|
}
|
||||||
|
|
||||||
|
const sortedTx = [...tx].sort((a, b) => a - b);
|
||||||
|
const p95Idx = Math.floor(sortedTx.length * 0.95);
|
||||||
|
const p95 = sortedTx.length > 0 ? sortedTx[p95Idx] : 0;
|
||||||
|
|
||||||
|
return {
|
||||||
|
timestamps,
|
||||||
|
tx,
|
||||||
|
rx,
|
||||||
|
stats: {
|
||||||
|
rxTotal,
|
||||||
|
txTotal,
|
||||||
|
p95,
|
||||||
|
total: rxTotal + txTotal
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const expr = metricMap[metric];
|
||||||
|
if (!expr) throw new Error('Invalid metric for history');
|
||||||
|
|
||||||
|
try {
|
||||||
|
const result = await queryRange(url, expr, rangeObj.queryStart, rangeObj.queryEnd, rangeObj.step);
|
||||||
|
if (!result || result.length === 0) return { timestamps: [], values: [] };
|
||||||
|
|
||||||
|
return {
|
||||||
|
timestamps: result[0].values.map(v => v[0] * 1000),
|
||||||
|
values: result[0].values.map(v => parseFloat(v[1]))
|
||||||
|
};
|
||||||
|
} catch (err) {
|
||||||
|
console.error(`[Prometheus] Error fetching history for ${metric} on ${node}:`, err.message);
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function parseRange(range, start, end) {
|
||||||
|
let duration, step, queryStart, queryEnd;
|
||||||
|
|
||||||
|
if (start && end) {
|
||||||
|
queryStart = Math.floor(new Date(start).getTime() / 1000);
|
||||||
|
queryEnd = Math.floor(new Date(end).getTime() / 1000);
|
||||||
|
duration = queryEnd - queryStart;
|
||||||
|
step = Math.max(15, Math.floor(duration / 100));
|
||||||
|
} else {
|
||||||
|
const rangeMap = {
|
||||||
|
'15m': { duration: 900, step: 15 },
|
||||||
|
'30m': { duration: 1800, step: 30 },
|
||||||
|
'1h': { duration: 3600, step: 60 },
|
||||||
|
'6h': { duration: 21600, step: 300 },
|
||||||
|
'12h': { duration: 43200, step: 600 },
|
||||||
|
'24h': { duration: 86400, step: 900 },
|
||||||
|
'2d': { duration: 172800, step: 1800 },
|
||||||
|
'7d': { duration: 604800, step: 3600 }
|
||||||
|
};
|
||||||
|
|
||||||
|
if (rangeMap[range]) {
|
||||||
|
duration = rangeMap[range].duration;
|
||||||
|
step = rangeMap[range].step;
|
||||||
|
} else {
|
||||||
|
const match = range.match(/^(\d+)([smhd])$/);
|
||||||
|
if (match) {
|
||||||
|
const val = parseInt(match[1]);
|
||||||
|
const unit = match[2];
|
||||||
|
const multipliers = { s: 1, m: 60, h: 3600, d: 86400 };
|
||||||
|
duration = val * (multipliers[unit] || 3600);
|
||||||
|
step = Math.max(15, Math.floor(duration / 100));
|
||||||
|
} else {
|
||||||
|
duration = 3600;
|
||||||
|
step = 60;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
queryEnd = Math.floor(Date.now() / 1000);
|
||||||
|
queryStart = queryEnd - duration;
|
||||||
|
}
|
||||||
|
return { queryStart, queryEnd, step };
|
||||||
|
}
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
testConnection,
|
testConnection,
|
||||||
query,
|
query,
|
||||||
queryRange,
|
queryRange,
|
||||||
|
getTargets,
|
||||||
getOverviewMetrics,
|
getOverviewMetrics,
|
||||||
|
get24hTrafficSum,
|
||||||
getNetworkHistory,
|
getNetworkHistory,
|
||||||
mergeNetworkHistories,
|
mergeNetworkHistories,
|
||||||
getCpuHistory,
|
getCpuHistory,
|
||||||
mergeCpuHistories
|
mergeCpuHistories,
|
||||||
|
getServerDetails,
|
||||||
|
getServerHistory,
|
||||||
|
resolveToken,
|
||||||
|
getLatency: async (blackboxUrl, target) => {
|
||||||
|
if (!blackboxUrl || !target) return null;
|
||||||
|
try {
|
||||||
|
const normalized = blackboxUrl.trim().replace(/\/+$/, '');
|
||||||
|
|
||||||
|
// Construct a single optimized query searching for priority metrics and common labels
|
||||||
|
// Prioritize probe_icmp_duration_seconds OVER probe_duration_seconds
|
||||||
|
const queryExpr = `(
|
||||||
|
probe_icmp_duration_seconds{phase="rtt", instance="${target}"} or
|
||||||
|
probe_icmp_duration_seconds{phase="rtt", target="${target}"} or
|
||||||
|
probe_http_duration_seconds{phase="rtt", instance="${target}"} or
|
||||||
|
probe_http_duration_seconds{phase="rtt", target="${target}"} or
|
||||||
|
probe_icmp_duration_seconds{instance="${target}"} or
|
||||||
|
probe_icmp_duration_seconds{target="${target}"} or
|
||||||
|
probe_duration_seconds{instance="${target}"} or
|
||||||
|
probe_duration_seconds{target="${target}"}
|
||||||
|
)`;
|
||||||
|
|
||||||
|
const params = new URLSearchParams({ query: queryExpr });
|
||||||
|
const res = await fetch(`${normalized}/api/v1/query?${params.toString()}`);
|
||||||
|
|
||||||
|
if (res.ok) {
|
||||||
|
const data = await res.json();
|
||||||
|
if (data.status === 'success' && data.data.result.length > 0) {
|
||||||
|
return parseFloat(data.data.result[0].value[1]) * 1000;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
} catch (err) {
|
||||||
|
console.error(`[Prometheus] Error fetching latency for ${target}:`, err.message);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|||||||
Reference in New Issue
Block a user