Compare commits
196 Commits
f4d7f129dd
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6e4b444404 | ||
|
|
e05c681429 | ||
|
|
8e50437ed4 | ||
|
|
c254923bd6 | ||
|
|
e865006cee | ||
|
|
1430ecf366 | ||
|
|
ce739d1232 | ||
|
|
ba633c8be4 | ||
|
|
44843475c8 | ||
|
|
5e9dac6197 | ||
|
|
77743ce097 | ||
|
|
28667bcbee | ||
|
|
395b3f3c4e | ||
|
|
0c3ef3d07e | ||
|
|
83e8a96b2b | ||
|
|
0a8efaf29d | ||
|
|
34bf4d5e91 | ||
|
|
24b5f8455e | ||
|
|
e60fa2b982 | ||
|
|
b79cb09987 | ||
|
|
d7ac1bedb4 | ||
|
|
a876c854f4 | ||
|
|
18c81fb8bb | ||
|
|
d8c33ca3ee | ||
|
|
27c2fb0b95 | ||
|
|
bdc723d197 | ||
|
|
531913a83c | ||
|
|
b7e7c173d7 | ||
|
|
14796231f1 | ||
|
|
1826d4e34a | ||
|
|
e373e1ce62 | ||
|
|
4467b62a01 | ||
|
|
3519003a77 | ||
|
|
7362bcf206 | ||
|
|
2cd6c6ef27 | ||
|
|
cb27d1a249 | ||
|
|
710b6a719e | ||
|
|
66b5702d03 | ||
|
|
90634dfacf | ||
|
|
c9784ec48e | ||
|
|
cf1842f4e5 | ||
|
|
f1a215d504 | ||
|
|
9beba2a306 | ||
|
|
a3340cb630 | ||
|
|
5afcd3d86a | ||
|
|
e65de2c30b | ||
|
|
bfb40f4947 | ||
|
|
9854f478c0 | ||
|
|
3963d137de | ||
|
|
60d8a3d550 | ||
|
|
09f20ec81d | ||
|
|
06ddd5a8e1 | ||
|
|
a14fcdf158 | ||
|
|
6aa8ba5fbc | ||
|
|
2eae34bb96 | ||
|
|
05ae5dff2a | ||
|
|
64fc023f7b | ||
|
|
307a26c0db | ||
|
|
73401309f2 | ||
|
|
f169dd4267 | ||
|
|
09887b52d0 | ||
|
|
c94b697319 | ||
|
|
1bfee2026f | ||
|
|
c47e483028 | ||
|
|
864cbc3569 | ||
|
|
47a795cd73 | ||
|
|
92f97b7e51 | ||
|
|
3bdde47c60 | ||
|
|
1583758f29 | ||
|
|
0602e37bc9 | ||
|
|
41bdb38d51 | ||
|
|
d958aa8d74 | ||
|
|
2024523b46 | ||
|
|
da722ee07e | ||
|
|
bc8414df3d | ||
|
|
15f4b610af | ||
|
|
94ed27199a | ||
|
|
131c011c5c | ||
|
|
6d5b8bbb08 | ||
|
|
b4415f25ac | ||
|
|
d469dacc08 | ||
|
|
ee4354b571 | ||
|
|
50818b54ca | ||
|
|
480cdf3f6d | ||
|
|
832fd0fde1 | ||
|
|
fd0a52368a | ||
|
|
650cc6f1b5 | ||
|
|
ff1c53ea40 | ||
|
|
b79655ccdc | ||
|
|
1f12197a91 | ||
|
|
d0455fb032 | ||
|
|
b2f6f7d2d0 | ||
|
|
ff439bb831 | ||
|
|
3980d66b49 | ||
| b16d910051 | |||
|
|
7e3a8e12d0 | ||
|
|
1d728d991e | ||
|
|
96de50285f | ||
|
|
225ec71ac3 | ||
|
|
90e72af72c | ||
|
|
d0bc646c22 | ||
|
|
90b31f9926 | ||
|
|
61748b8959 | ||
|
|
217510c07d | ||
|
|
e4b97be54e | ||
|
|
43d2b80fb2 | ||
|
|
9845f2fe5c | ||
|
|
607d71d1ca | ||
|
|
d7b6d3aebb | ||
|
|
98fdeca33b | ||
|
|
646696b8a0 | ||
|
|
58c5d36022 | ||
|
|
5fedaa299b | ||
|
|
c42d512dbd | ||
|
|
5baffb7e05 | ||
|
|
1ce174bd93 | ||
|
|
7fdac71062 | ||
|
|
6b61104641 | ||
|
|
40eeb0b9dd | ||
|
|
62747f0fcf | ||
|
|
b2f14528a9 | ||
|
|
cc3c67eae9 | ||
|
|
542258a271 | ||
|
|
a35dac78f8 | ||
|
|
e8b60ce28b | ||
|
|
d4d2927963 | ||
|
|
97e87409b5 | ||
|
|
dddf9dba65 | ||
|
|
4e953c01fc | ||
|
|
90c7bd80b1 | ||
|
|
322621a97b | ||
|
|
058a6c73a1 | ||
|
|
84972cdaeb | ||
|
|
28432c9c23 | ||
|
|
afe7361e06 | ||
|
|
2a8cb32d47 | ||
|
|
d7d650c5f9 | ||
|
|
469ef9e448 | ||
|
|
e8e23c5af8 | ||
|
|
a103c7dbf5 | ||
|
|
0c217963bb | ||
|
|
73807eaaaf | ||
|
|
d5b70edd11 | ||
|
|
d67815c7b6 | ||
|
|
8c25f1735d | ||
|
|
5e40c19ef1 | ||
|
|
6b9de37bf9 | ||
|
|
46ef8131c7 | ||
|
|
e91dcc8c02 | ||
|
|
7c1f0d4e63 | ||
|
|
464c3193d1 | ||
|
|
aed9147074 | ||
|
|
9e827c9831 | ||
|
|
34a10e3cd2 | ||
|
|
144b9b817d | ||
|
|
b2c37b8fe3 | ||
|
|
f997b6236c | ||
|
|
d557588b47 | ||
|
|
5238167212 | ||
|
|
dc1a8a1a44 | ||
|
|
d595397f08 | ||
|
|
dc865c6d9d | ||
|
|
a9fe0f219a | ||
|
|
035ebd8d40 | ||
|
|
d7f8db89a3 | ||
|
|
2149aa0208 | ||
|
|
e7b8000808 | ||
|
|
484a7a766f | ||
|
|
e55e6e8af6 | ||
|
|
236a548f58 | ||
|
|
0cf10a7e8a | ||
|
|
6b82cfb561 | ||
|
|
e66905e57f | ||
|
|
672ea11598 | ||
|
|
bea8ed607e | ||
|
|
37444eb6f4 | ||
|
|
0f4d3a2986 | ||
|
|
af83f42d26 | ||
|
|
2fc84f999c | ||
|
|
316e0e1b7e | ||
|
|
e50f95c325 | ||
|
|
b3580c15cc | ||
|
|
ded8d1b18d | ||
|
|
3d4b926b16 | ||
|
|
755bd45a0b | ||
|
|
f6fa253a11 | ||
|
|
4e8cce52ea | ||
|
|
c6e6c91e77 | ||
|
|
e9ca358eb1 | ||
|
|
a1703e72be | ||
|
|
f3f49f2c8e | ||
|
|
79779d6fcf | ||
|
|
e2dbf06601 | ||
|
|
0914881d26 | ||
|
|
e77bdbcc9e | ||
|
|
286eb1687d |
21
.env.example
21
.env.example
@@ -1,10 +1,19 @@
|
||||
# PromdataPanel Environment Configuration
|
||||
# Note: Database and Cache settings will be automatically configured upon visiting /init.html
|
||||
|
||||
# Server Binding
|
||||
HOST=0.0.0.0
|
||||
PORT=3000
|
||||
|
||||
# Aggregation interval in milliseconds (default 5s)
|
||||
REFRESH_INTERVAL=5000
|
||||
|
||||
# Valkey/Redis Cache Configuration
|
||||
VALKEY_HOST=localhost
|
||||
VALKEY_PORT=6379
|
||||
VALKEY_PASSWORD=
|
||||
VALKEY_DB=dashboard
|
||||
VALKEY_TTL=30
|
||||
# Security
|
||||
# Keep remote setup disabled unless you explicitly need to initialize from another host.
|
||||
ALLOW_REMOTE_SETUP=false
|
||||
COOKIE_SECURE=false
|
||||
SESSION_TTL_SECONDS=86400
|
||||
PASSWORD_ITERATIONS=210000
|
||||
|
||||
# Runtime external data providers
|
||||
ENABLE_EXTERNAL_GEO_LOOKUP=false
|
||||
|
||||
151
README.md
151
README.md
@@ -1,92 +1,119 @@
|
||||
# 数据可视化展示大屏
|
||||
# PromdataPanel
|
||||
|
||||
多源 Prometheus 服务器监控展示大屏,支持对接多个 Prometheus 实例,实时展示所有服务器的 CPU、内存、磁盘、网络等关键指标。
|
||||
多源 Prometheus 服务器监控展示大屏。支持对接多个 Prometheus 实例,实时聚合展示所有服务器的 CPU、内存、磁盘、带宽等关键指标,并提供可视化节点分布图。
|
||||
|
||||
## 功能特性
|
||||
|
||||
- 🔌 **多数据源管理** - MySQL 存储配置,支持对接多个 Prometheus 实例
|
||||
- 📊 **NodeExporter 数据查询** - 自动聚合所有 Prometheus 中的 NodeExporter 数据
|
||||
- 🌐 **网络流量统计** - 24 小时网络流量趋势图,总流量统计
|
||||
- ⚡ **实时带宽监控** - 所有服务器网络带宽求和,实时显示
|
||||
- 💻 **资源使用概览** - CPU、内存、磁盘的总使用率和详细统计
|
||||
- 🖥️ **服务器列表** - 所有服务器的详细指标一览表
|
||||
- 🔌 **多数据源管理** - 支持对接多个 Prometheus 实例(Node_Exporter / BlackboxExporter)
|
||||
- 📊 **指标自动聚合** - 自动汇总所有数据源的 NodeExporter 指标,实时计算全网负载
|
||||
- 🌐 **网络流量统计** - 24 小时流量趋势图,实时带宽(Rx/Tx)求和显示
|
||||
- 🗺️ **节点分布可视化** - 自动识别服务器地理位置,并在全球地图上展示实时连接状态与延迟
|
||||
- ⚡ **毫秒级实时性** - 深度优化查询逻辑,支持 5s 采集频率的实时动态展示
|
||||
- 📱 **响应式与美学设计** - 现代 UI/UX 体验,支持暗色模式,极致性能优化
|
||||
|
||||
## 快速开始
|
||||
## 快速安装
|
||||
|
||||
### 1. 环境要求
|
||||
### 方式一:一键脚本安装 (推荐)
|
||||
|
||||
- Node.js >= 16
|
||||
- MySQL >= 5.7
|
||||
|
||||
### 2. 配置
|
||||
|
||||
复制环境变量文件并修改:
|
||||
在 Linux 服务器上,您可以使用以下脚本一键完成下载、环境检测、依赖安装并将其注册为 Systemd 系统服务:
|
||||
|
||||
```bash
|
||||
cp .env.example .env
|
||||
# 下载安装最新版本 (默认 v0.1.0)
|
||||
VERSION=v0.1.0 curl -sSL https://git.littlediary.cn/CN-JS-HuiBai/PromdataPanel/raw/branch/main/install.sh | bash
|
||||
```
|
||||
|
||||
编辑 `.env` 文件,配置 MySQL 连接信息:
|
||||
### 方式二:手动安装
|
||||
|
||||
```env
|
||||
MYSQL_HOST=localhost
|
||||
MYSQL_PORT=3306
|
||||
MYSQL_USER=root
|
||||
MYSQL_PASSWORD=your_password
|
||||
MYSQL_DATABASE=display_wall
|
||||
PORT=3000
|
||||
```
|
||||
#### 1. 环境要求
|
||||
- **Node.js** >= 18
|
||||
- **MySQL** >= 8.0
|
||||
- **Valkey** >= 7.0 (或 Redis >= 6.0)
|
||||
|
||||
### 3. 初始化数据库
|
||||
#### 2. 配置与启动
|
||||
1. 克隆代码库:`git clone https://git.littlediary.cn/CN-JS-HuiBai/PromdataPanel.git`
|
||||
2. 复制配置文件:`cp .env.example .env`
|
||||
3. 安装依赖:`npm install --production`
|
||||
4. 启动服务:`npm start`
|
||||
|
||||
### 方式三:更新现有版本
|
||||
|
||||
如果您已经安装了本系统,可以使用随附的 `update.sh` 脚本一键升级到最新代码:
|
||||
|
||||
```bash
|
||||
npm run init-db
|
||||
# 进入程序目录
|
||||
curl -sSL https://git.littlediary.cn/CN-JS-HuiBai/PromdataPanel/raw/branch/main/update.sh | bash
|
||||
```
|
||||
|
||||
### 4. 安装依赖并启动
|
||||
#### 3. 系统初始化
|
||||
首次运行后,访问 `http://your-ip:3000/init.html`,按照引导完成 MySQL 数据库和 Valkey 缓存的连接。
|
||||
|
||||
```bash
|
||||
npm install
|
||||
npm run dev
|
||||
```
|
||||
## 使用指引
|
||||
|
||||
访问 `http://localhost:3000` 即可看到展示大屏。
|
||||
### 1. 添加 Prometheus 数据源
|
||||
点击页面右上角的 ⚙️ 按钮进入设置,添加并测试您的 Prometheus HTTP 地址。
|
||||
|
||||
### 5. 配置 Prometheus 数据源
|
||||
|
||||
点击右上角的 ⚙️ 按钮,添加你的 Prometheus 地址(如 `http://prometheus.example.com:9090`)。
|
||||
|
||||
### 6. Prometheus 配置参考 (Example)
|
||||
|
||||
在您的 Prometheus 配置文件 `prometheus.yml` 中,建议执行以下配置(`scrape_interval` 建议设为 `5s` 以获取最佳实时展示效果):
|
||||
### 2. Prometheus 采集配置
|
||||
建议在 `prometheus.yml` 中设置采集周期为 `5s` 以实现平滑的实时动态效果:
|
||||
|
||||
```yaml
|
||||
global:
|
||||
scrape_interval: 5s
|
||||
scrape_configs:
|
||||
- job_name: '机器名称'
|
||||
static_configs:
|
||||
- targets: ['IP:Port']
|
||||
```
|
||||
|
||||
scrape_configs:
|
||||
- job_name: 'nodes'
|
||||
static_configs:
|
||||
- targets: ['your-server-ip:9100']
|
||||
```
|
||||
|
||||
## 技术栈
|
||||
|
||||
- **后端**: Node.js + Express
|
||||
- **数据库**: MySQL (mysql2)
|
||||
- **数据源**: Prometheus HTTP API
|
||||
- **前端**: 原生 HTML/CSS/JavaScript
|
||||
- **图表**: 自定义 Canvas 渲染
|
||||
- **Runtime**: Node.js
|
||||
- **Framework**: Express.js
|
||||
- **Database**: MySQL 8.0+
|
||||
- **Caching**: Valkey / Redis
|
||||
- **Visualization**: ECharts / Canvas
|
||||
- **Frontend**: Vanilla JS / CSS3
|
||||
|
||||
## API 接口
|
||||
## API 接口文档
|
||||
|
||||
| 方法 | 路径 | 说明 |
|
||||
|------|------|------|
|
||||
| GET | `/api/sources` | 获取所有数据源 |
|
||||
| POST | `/api/sources` | 添加数据源 |
|
||||
| PUT | `/api/sources/:id` | 更新数据源 |
|
||||
| DELETE | `/api/sources/:id` | 删除数据源 |
|
||||
| POST | `/api/sources/test` | 测试数据源连接 |
|
||||
| GET | `/api/metrics/overview` | 获取聚合指标概览 |
|
||||
| GET | `/api/metrics/network-history` | 获取24h网络流量历史 |
|
||||
| GET | `/api/metrics/cpu-history` | 获取CPU使用率历史 |
|
||||
本项提供了完整的 RESTful API,用于数据采集、系统配置和状态监控。
|
||||
|
||||
### 1. 认证接口 (`/api/auth`)
|
||||
- `POST /api/auth/login`: 用户登录
|
||||
- `POST /api/auth/logout`: 退出登录
|
||||
- `POST /api/auth/change-password`: 修改密码 (需登录)
|
||||
- `GET /api/auth/status`: 获取当前登录状态
|
||||
|
||||
### 2. 数据源管理 (`/api/sources`)
|
||||
- `GET /api/sources`: 获取所有 Prometheus 数据源及其状态
|
||||
- `POST /api/sources`: 添加新数据源 (需登录)
|
||||
- `PUT /api/sources/:id`: 修改数据源信息 (需登录)
|
||||
- `DELETE /api/sources/:id`: 删除数据源 (需登录)
|
||||
- `POST /api/sources/test`: 测试数据源连接性 (需登录)
|
||||
|
||||
### 3. 指标数据获取 (`/api/metrics`)
|
||||
- `GET /api/metrics/overview`: 获取所有服务器的聚合实时指标 (CPU, 内存, 磁盘, 网络)
|
||||
- `GET /api/metrics/network-history`: 获取全网 24 小时流量历史趋势
|
||||
- `GET /api/metrics/cpu-history`: 获取全网 CPU 使用率历史记录
|
||||
- `GET /api/metrics/server-details`: 获取特定服务器的详细实时指标
|
||||
- `GET /api/metrics/server-history`: 获取特定服务器的历史指标数据
|
||||
- `GET /api/metrics/latency`: 获取节点间的实时延迟数据
|
||||
|
||||
### 4. 系统配置与监控
|
||||
- `GET /api/settings`: 获取站点全局配置
|
||||
- `POST /api/settings`: 修改站点全局配置 (需登录)
|
||||
- `GET /health`: 获取系统健康检查报告 (数据库、缓存、内存等状态)
|
||||
|
||||
### 5. 延迟链路管理 (`/api/latency-routes`)
|
||||
- `GET /api/latency-routes`: 获取配置的所有延迟检测链路
|
||||
- `POST /api/latency-routes`: 添加延迟检测链路 (需登录)
|
||||
- `PUT /api/latency-routes/:id`: 修改延迟检测链路 (需登录)
|
||||
- `DELETE /api/latency-routes/:id`: 删除延迟检测链路 (需登录)
|
||||
|
||||
### 6. 实时通信 (WebSocket)
|
||||
系统支持通过 WebSocket 接收实时推送,默认端口与 HTTP 服务一致:
|
||||
- **消息类型 `overview`**: 包含聚合指标、服务器在线状态以及地理分布后的延迟链路数据。
|
||||
|
||||
## LICENSE
|
||||
|
||||
MIT License
|
||||
|
||||
298
install.sh
298
install.sh
@@ -1,124 +1,262 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Data Visualization Display Wall - Systemd Installer
|
||||
# Requirements: Node.js, NPM, Systemd (Linux)
|
||||
set -euo pipefail
|
||||
|
||||
# Colors for output
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
BLUE='\033[0;34m'
|
||||
NC='\033[0m' # No Color
|
||||
NC='\033[0m'
|
||||
|
||||
echo -e "${BLUE}=== Data Visualization Display Wall Installer ===${NC}"
|
||||
VERSION=${VERSION:-"v0.1.0"}
|
||||
DOWNLOAD_URL="https://git.littlediary.cn/CN-JS-HuiBai/PromdataPanel/archive/${VERSION}.zip"
|
||||
MIN_NODE_VERSION=18
|
||||
SERVICE_NAME="promdatapanel"
|
||||
SERVICE_FILE="/etc/systemd/system/${SERVICE_NAME}.service"
|
||||
|
||||
# 1. Check permissions
|
||||
if [ "$EUID" -ne 0 ]; then
|
||||
echo -e "${RED}Please run as root (sudo ./Install.sh)${NC}"
|
||||
OS_ID=""
|
||||
OS_VER=""
|
||||
PROJECT_DIR=""
|
||||
REAL_USER=""
|
||||
|
||||
echo -e "${BLUE}================================================${NC}"
|
||||
echo -e "${BLUE} PromdataPanel Auto-Installer ${NC}"
|
||||
echo -e "${BLUE} Version: ${VERSION} ${NC}"
|
||||
echo -e "${BLUE}================================================${NC}"
|
||||
|
||||
detect_os() {
|
||||
if [ -f /etc/os-release ]; then
|
||||
# shellcheck disable=SC1091
|
||||
. /etc/os-release
|
||||
OS_ID="${ID:-}"
|
||||
OS_VER="${VERSION_ID:-}"
|
||||
else
|
||||
echo -e "${RED}Error: Cannot detect operating system type (/etc/os-release missing).${NC}"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
# 2. Get current directory and user
|
||||
PROJECT_DIR=$(pwd)
|
||||
REAL_USER=${SUDO_USER:-$USER}
|
||||
USER_HOME=$(getent passwd "$REAL_USER" | cut -d: -f6)
|
||||
|
||||
echo -e "Project Directory: ${GREEN}$PROJECT_DIR${NC}"
|
||||
echo -e "Running User: ${GREEN}$REAL_USER${NC}"
|
||||
|
||||
# 3. Check for mandatory files
|
||||
if [ ! -f "server/index.js" ]; then
|
||||
echo -e "${RED}Error: server/index.js not found. Please run this script from the project root.${NC}"
|
||||
if [ -z "$OS_ID" ]; then
|
||||
echo -e "${RED}Error: Unable to determine operating system ID.${NC}"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
# 4. Check for dependencies
|
||||
echo -e "${BLUE}Checking dependencies...${NC}"
|
||||
check_dep() {
|
||||
if ! command -v "$1" &> /dev/null; then
|
||||
echo -e "${RED}$1 is not installed. Please install $1 first.${NC}"
|
||||
echo -e "Detected OS: ${GREEN}${OS_ID} ${OS_VER}${NC}"
|
||||
}
|
||||
|
||||
require_cmd() {
|
||||
local cmd="$1"
|
||||
local hint="${2:-}"
|
||||
if ! command -v "$cmd" >/dev/null 2>&1; then
|
||||
echo -e "${RED}Missing required command: ${cmd}.${NC}"
|
||||
if [ -n "$hint" ]; then
|
||||
echo -e "${YELLOW}${hint}${NC}"
|
||||
fi
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
check_dep node
|
||||
check_dep npm
|
||||
|
||||
# 5. Check for .env file
|
||||
if [ ! -f ".env" ]; then
|
||||
echo -e "${YELLOW}Warning: .env file not found.${NC}"
|
||||
if [ -f ".env.example" ]; then
|
||||
echo -e "Creating .env from .env.example..."
|
||||
cp .env.example .env
|
||||
chown "$REAL_USER":"$REAL_USER" .env
|
||||
echo -e "${GREEN}Created .env file. Please ensure values are correct.${NC}"
|
||||
else
|
||||
echo -e "${RED}Error: .env.example not found. Configuration missing.${NC}"
|
||||
fi
|
||||
fi
|
||||
|
||||
# 6. Install NPM dependencies
|
||||
echo -e "${BLUE}Installing dependencies...${NC}"
|
||||
# Run npm install as the real user to avoid permission issues in node_modules
|
||||
sudo -u "$REAL_USER" npm install
|
||||
|
||||
if [ $? -ne 0 ]; then
|
||||
echo -e "${RED}NPM install failed.${NC}"
|
||||
install_packages() {
|
||||
case "$OS_ID" in
|
||||
ubuntu|debian|raspbian)
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y "$@"
|
||||
;;
|
||||
centos|rhel|almalinux|rocky)
|
||||
sudo yum install -y "$@"
|
||||
;;
|
||||
fedora)
|
||||
sudo dnf install -y "$@"
|
||||
;;
|
||||
*)
|
||||
echo -e "${RED}Unsupported OS for automatic package installation: ${OS_ID}${NC}"
|
||||
echo -e "${YELLOW}Please install the following packages manually: $*${NC}"
|
||||
exit 1
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
# 7. Create Systemd Service File
|
||||
SERVICE_FILE="/etc/systemd/system/data-wall.service"
|
||||
NODE_PATH=$(command -v node)
|
||||
ensure_tooling() {
|
||||
if ! command -v curl >/dev/null 2>&1; then
|
||||
echo -e "${BLUE}Installing curl...${NC}"
|
||||
install_packages curl
|
||||
fi
|
||||
|
||||
echo -e "${BLUE}Creating systemd service at $SERVICE_FILE...${NC}"
|
||||
if ! command -v unzip >/dev/null 2>&1; then
|
||||
echo -e "${BLUE}Installing unzip...${NC}"
|
||||
install_packages unzip
|
||||
fi
|
||||
}
|
||||
|
||||
cat <<EOF > "$SERVICE_FILE"
|
||||
configure_nodesource_apt_repo() {
|
||||
sudo install -d -m 0755 /etc/apt/keyrings
|
||||
curl -fsSL https://deb.nodesource.com/gpgkey/nodesource-repo.gpg.key | sudo gpg --dearmor -o /etc/apt/keyrings/nodesource.gpg
|
||||
echo "deb [signed-by=/etc/apt/keyrings/nodesource.gpg] https://deb.nodesource.com/node_20.x nodistro main" | sudo tee /etc/apt/sources.list.d/nodesource.list >/dev/null
|
||||
}
|
||||
|
||||
install_node() {
|
||||
echo -e "${BLUE}Verifying Node.js environment...${NC}"
|
||||
|
||||
local node_installed=false
|
||||
if command -v node >/dev/null 2>&1; then
|
||||
local current_node_ver
|
||||
current_node_ver=$(node -v | cut -d'v' -f2 | cut -d'.' -f1)
|
||||
if [ "$current_node_ver" -ge "$MIN_NODE_VERSION" ]; then
|
||||
echo -e "${GREEN}Node.js $(node -v) is already installed.${NC}"
|
||||
node_installed=true
|
||||
else
|
||||
echo -e "${YELLOW}Existing Node.js $(node -v) is too old (requires >= ${MIN_NODE_VERSION}).${NC}"
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ "$node_installed" = true ]; then
|
||||
return
|
||||
fi
|
||||
|
||||
echo -e "${BLUE}Installing Node.js 20.x...${NC}"
|
||||
case "$OS_ID" in
|
||||
ubuntu|debian|raspbian)
|
||||
install_packages ca-certificates curl gnupg
|
||||
configure_nodesource_apt_repo
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y nodejs
|
||||
;;
|
||||
centos|rhel|almalinux|rocky)
|
||||
install_packages nodejs
|
||||
;;
|
||||
fedora)
|
||||
install_packages nodejs
|
||||
;;
|
||||
*)
|
||||
echo -e "${RED}Unsupported OS for automatic Node.js installation: ${OS_ID}${NC}"
|
||||
echo -e "${YELLOW}Please install Node.js >= ${MIN_NODE_VERSION} manually.${NC}"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
require_cmd node "Please install Node.js >= ${MIN_NODE_VERSION} manually and rerun the installer."
|
||||
local installed_major
|
||||
installed_major=$(node -v | cut -d'v' -f2 | cut -d'.' -f1)
|
||||
if [ "$installed_major" -lt "$MIN_NODE_VERSION" ]; then
|
||||
echo -e "${RED}Installed Node.js $(node -v) is still below the required version.${NC}"
|
||||
echo -e "${YELLOW}Please upgrade Node.js manually to >= ${MIN_NODE_VERSION}.${NC}"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
download_project_if_needed() {
|
||||
if [ -f "server/index.js" ]; then
|
||||
return
|
||||
fi
|
||||
|
||||
echo -e "${YELLOW}Project files not found. Starting download...${NC}"
|
||||
ensure_tooling
|
||||
|
||||
local temp_dir
|
||||
temp_dir=$(mktemp -d "${TMPDIR:-/tmp}/promdatapanel-install-XXXXXX")
|
||||
local temp_zip="${temp_dir}/promdatapanel_${VERSION}.zip"
|
||||
|
||||
echo -e "${BLUE}Downloading ${DOWNLOAD_URL}...${NC}"
|
||||
curl -fL "$DOWNLOAD_URL" -o "$temp_zip"
|
||||
|
||||
echo -e "${BLUE}Extracting files...${NC}"
|
||||
unzip -q "$temp_zip" -d "$temp_dir"
|
||||
|
||||
local extracted_dir
|
||||
extracted_dir=$(find "$temp_dir" -mindepth 1 -maxdepth 1 -type d | head -n 1)
|
||||
if [ -z "$extracted_dir" ] || [ ! -f "$extracted_dir/server/index.js" ]; then
|
||||
echo -e "${RED}Download succeeded, but archive structure is invalid.${NC}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
cd "$extracted_dir"
|
||||
}
|
||||
|
||||
detect_runtime_user() {
|
||||
if [ "$EUID" -eq 0 ]; then
|
||||
REAL_USER="${SUDO_USER:-${USER:-root}}"
|
||||
else
|
||||
REAL_USER="${USER}"
|
||||
fi
|
||||
}
|
||||
|
||||
write_service_file() {
|
||||
local node_path
|
||||
node_path=$(command -v node)
|
||||
if [ -z "$node_path" ]; then
|
||||
echo -e "${RED}Unable to locate node executable after installation.${NC}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
local tmp_service
|
||||
tmp_service=$(mktemp "${TMPDIR:-/tmp}/${SERVICE_NAME}.service.XXXXXX")
|
||||
|
||||
cat > "$tmp_service" <<EOF
|
||||
[Unit]
|
||||
Description=Data Visualization Display Wall
|
||||
Description=PromdataPanel Monitoring Dashboard
|
||||
After=network.target mysql.service redis-server.service valkey-server.service
|
||||
Wants=mysql.service
|
||||
|
||||
[Service]
|
||||
Type=simple
|
||||
User=$REAL_USER
|
||||
WorkingDirectory=$PROJECT_DIR
|
||||
ExecStart=$NODE_PATH server/index.js
|
||||
User=${REAL_USER}
|
||||
WorkingDirectory=${PROJECT_DIR}
|
||||
ExecStart=${node_path} server/index.js
|
||||
Restart=always
|
||||
RestartSec=10
|
||||
StandardOutput=syslog
|
||||
StandardError=syslog
|
||||
SyslogIdentifier=data-wall
|
||||
# Pass environment via .env file injection
|
||||
EnvironmentFile=-$PROJECT_DIR/.env
|
||||
StandardOutput=journal
|
||||
StandardError=journal
|
||||
SyslogIdentifier=${SERVICE_NAME}
|
||||
EnvironmentFile=-${PROJECT_DIR}/.env
|
||||
Environment=NODE_ENV=production
|
||||
|
||||
# Security Hardening
|
||||
CapabilityBoundingSet=
|
||||
NoNewPrivileges=true
|
||||
LimitNOFILE=65535
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
EOF
|
||||
|
||||
# 8. Reload Systemd and Start
|
||||
echo -e "${BLUE}Reloading systemd and restarting service...${NC}"
|
||||
systemctl daemon-reload
|
||||
systemctl enable data-wall
|
||||
systemctl restart data-wall
|
||||
echo -e "${BLUE}Creating systemd service at ${SERVICE_FILE}...${NC}"
|
||||
sudo install -m 0644 "$tmp_service" "$SERVICE_FILE"
|
||||
rm -f "$tmp_service"
|
||||
}
|
||||
|
||||
detect_os
|
||||
download_project_if_needed
|
||||
detect_runtime_user
|
||||
install_node
|
||||
|
||||
PROJECT_DIR=$(pwd)
|
||||
echo -e "Project Directory: ${GREEN}${PROJECT_DIR}${NC}"
|
||||
echo -e "Running User: ${GREEN}${REAL_USER}${NC}"
|
||||
|
||||
if [ ! -f ".env" ] && [ -f ".env.example" ]; then
|
||||
echo -e "${BLUE}Creating .env from .env.example...${NC}"
|
||||
cp .env.example .env
|
||||
fi
|
||||
|
||||
echo -e "${BLUE}Installing NPM dependencies...${NC}"
|
||||
npm install --production
|
||||
|
||||
write_service_file
|
||||
|
||||
echo -e "${BLUE}Reloading systemd and restarting service...${NC}"
|
||||
sudo systemctl daemon-reload
|
||||
sudo systemctl enable "$SERVICE_NAME"
|
||||
sudo systemctl restart "$SERVICE_NAME"
|
||||
|
||||
# 9. Check Status
|
||||
echo -e "${BLUE}Checking service status...${NC}"
|
||||
sleep 2
|
||||
if systemctl is-active --quiet data-wall; then
|
||||
echo -e "${GREEN}SUCCESS: Service is now running.${NC}"
|
||||
PORT=$(grep "^PORT=" .env | cut -d'=' -f2)
|
||||
if sudo systemctl is-active --quiet "$SERVICE_NAME"; then
|
||||
echo -e "${GREEN}SUCCESS: PromdataPanel is now running.${NC}"
|
||||
PORT=$(grep "^PORT=" .env 2>/dev/null | cut -d'=' -f2 || true)
|
||||
PORT=${PORT:-3000}
|
||||
echo -e "Dashboard URL: ${YELLOW}http://localhost:${PORT}${NC}"
|
||||
echo -e "View logs: ${BLUE}journalctl -u data-wall -f${NC}"
|
||||
IP_ADDR=$(hostname -I 2>/dev/null | awk '{print $1}')
|
||||
if [ -n "${IP_ADDR:-}" ]; then
|
||||
echo -e "Dashboard URL: ${YELLOW}http://${IP_ADDR}:${PORT}${NC}"
|
||||
fi
|
||||
else
|
||||
echo -e "${RED}FAILED: Service failed to start.${NC}"
|
||||
echo -e "Check logs with: ${BLUE}journalctl -u data-wall -xe${NC}"
|
||||
echo -e "Check logs with: ${BLUE}journalctl -u ${SERVICE_NAME} -xe${NC}"
|
||||
fi
|
||||
|
||||
echo -e "${BLUE}================================================${NC}"
|
||||
echo -e "${GREEN}Installation completed!${NC}"
|
||||
echo -e "${BLUE}================================================${NC}"
|
||||
|
||||
24
package-lock.json
generated
24
package-lock.json
generated
@@ -13,7 +13,8 @@
|
||||
"dotenv": "^16.4.0",
|
||||
"express": "^4.21.0",
|
||||
"ioredis": "^5.10.1",
|
||||
"mysql2": "^3.11.0"
|
||||
"mysql2": "^3.11.0",
|
||||
"ws": "^8.20.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@ioredis/commands": {
|
||||
@@ -1215,6 +1216,27 @@
|
||||
"engines": {
|
||||
"node": ">= 0.8"
|
||||
}
|
||||
},
|
||||
"node_modules/ws": {
|
||||
"version": "8.20.0",
|
||||
"resolved": "https://registry.npmjs.org/ws/-/ws-8.20.0.tgz",
|
||||
"integrity": "sha512-sAt8BhgNbzCtgGbt2OxmpuryO63ZoDk/sqaB/znQm94T4fCEsy/yV+7CdC1kJhOU9lboAEU7R3kquuycDoibVA==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=10.0.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"bufferutil": "^4.0.1",
|
||||
"utf-8-validate": ">=5.0.2"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"bufferutil": {
|
||||
"optional": true
|
||||
},
|
||||
"utf-8-validate": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,12 +1,13 @@
|
||||
{
|
||||
"name": "data-visualization-display-wall",
|
||||
"name": "promdatapanel",
|
||||
"version": "1.0.0",
|
||||
"description": "Data Visualization Display Wall - Multi-Prometheus Monitoring Dashboard",
|
||||
"main": "server/index.js",
|
||||
"scripts": {
|
||||
"dev": "node server/index.js",
|
||||
"start": "node server/index.js",
|
||||
"init-db": "node server/init-db.js"
|
||||
"init-db": "node server/init-db.js",
|
||||
"db-migrate": "node server/init-db.js"
|
||||
},
|
||||
"dependencies": {
|
||||
"axios": "^1.7.0",
|
||||
@@ -14,6 +15,7 @@
|
||||
"dotenv": "^16.4.0",
|
||||
"express": "^4.21.0",
|
||||
"ioredis": "^5.10.1",
|
||||
"mysql2": "^3.11.0"
|
||||
"mysql2": "^3.11.0",
|
||||
"ws": "^8.20.0"
|
||||
}
|
||||
}
|
||||
|
||||
1385
public/css/style.css
1385
public/css/style.css
File diff suppressed because it is too large
Load Diff
@@ -4,19 +4,22 @@
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<meta name="description" content="多源Prometheus服务器监控展示大屏 - 实时CPU、内存、磁盘、网络统计">
|
||||
<title>数据可视化展示大屏</title>
|
||||
<link rel="preconnect" href="https://fonts.googleapis.com">
|
||||
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin>
|
||||
<link
|
||||
href="https://fonts.googleapis.com/css2?family=Inter:wght@300;400;500;600;700;800;900&family=JetBrains+Mono:wght@400;500;600&display=swap"
|
||||
rel="stylesheet">
|
||||
<meta name="description" content="LDNET-GA">
|
||||
<title></title>
|
||||
<link rel="icon" id="siteFavicon" href="data:image/gif;base64,R0lGODlhAQABAIAAAAAAAP///yH5BAEAAAAALAAAAAABAAEAAAIBRAA7">
|
||||
<link rel="stylesheet" href="/css/style.css">
|
||||
<script src="/vendor/echarts.min.js"></script>
|
||||
<script>
|
||||
// Prevent theme flicker
|
||||
(function () {
|
||||
const savedTheme = localStorage.getItem('theme');
|
||||
const settings = window.SITE_SETTINGS || {};
|
||||
const sanitizeAssetUrl = (url) => {
|
||||
if (!url || typeof url !== 'string') return null;
|
||||
const trimmed = url.trim();
|
||||
if (!trimmed) return null;
|
||||
return /^(https?:|data:image\/|\/)/i.test(trimmed) ? trimmed : null;
|
||||
};
|
||||
const defaultTheme = settings.default_theme || 'dark';
|
||||
let theme = savedTheme || defaultTheme;
|
||||
|
||||
@@ -28,12 +31,69 @@
|
||||
document.documentElement.classList.add('light-theme');
|
||||
}
|
||||
|
||||
// Also apply title if available to prevent flicker
|
||||
// Also apply title and favicon if available to prevent flicker
|
||||
if (settings.page_name) {
|
||||
document.title = settings.page_name;
|
||||
}
|
||||
|
||||
const safeFaviconUrl = sanitizeAssetUrl(settings.favicon_url);
|
||||
if (safeFaviconUrl) {
|
||||
const link = document.getElementById('siteFavicon');
|
||||
if (link) link.href = safeFaviconUrl;
|
||||
}
|
||||
|
||||
// Advanced Anti-Flicker: Wait for header elements to appear
|
||||
const observer = new MutationObserver(function(mutations, me) {
|
||||
const logoText = document.getElementById('logoText');
|
||||
const logoIcon = document.getElementById('logoIconContainer');
|
||||
const header = document.getElementById('header');
|
||||
|
||||
if (logoText || logoIcon) {
|
||||
// If we found either, apply what we have
|
||||
if (logoText) {
|
||||
const displayTitle = settings.title || settings.page_name || '数据可视化展示大屏';
|
||||
logoText.textContent = displayTitle;
|
||||
if (settings.show_page_name === 0) logoText.style.display = 'none';
|
||||
}
|
||||
|
||||
if (logoIcon) {
|
||||
const actualTheme = document.documentElement.classList.contains('light-theme') ? 'light' : 'dark';
|
||||
const logoToUse = sanitizeAssetUrl((actualTheme === 'dark' && settings.logo_url_dark) ? settings.logo_url_dark : (settings.logo_url || null));
|
||||
if (logoToUse) {
|
||||
const img = document.createElement('img');
|
||||
img.src = logoToUse;
|
||||
img.alt = 'Logo';
|
||||
img.className = 'logo-icon-img';
|
||||
logoIcon.replaceChildren(img);
|
||||
} else {
|
||||
// Only if we REALLY have no logo URL, we show the default SVG fallback
|
||||
// (But since it's already in HTML, we just don't touch it or we show it if we hid it)
|
||||
const svg = logoIcon.querySelector('svg');
|
||||
if (svg) svg.style.visibility = 'visible';
|
||||
}
|
||||
}
|
||||
|
||||
// Once found everything or we are past header, we are done
|
||||
if (logoText && logoIcon) me.disconnect();
|
||||
}
|
||||
});
|
||||
observer.observe(document.documentElement, { childList: true, subtree: true });
|
||||
})();
|
||||
</script>
|
||||
<script>
|
||||
// Global Error Logger for remote debugging
|
||||
window.onerror = function(msg, url, line, col, error) {
|
||||
var debugDiv = document.getElementById('js-debug-overlay');
|
||||
if (!debugDiv) {
|
||||
debugDiv = document.createElement('div');
|
||||
debugDiv.id = 'js-debug-overlay';
|
||||
debugDiv.style.cssText = 'position:fixed;top:0;left:0;width:100%;background:rgba(220,38,38,0.95);color:white;z-index:99999;padding:10px;font-family:monospace;font-size:12px;max-height:30vh;overflow:auto;pointer-events:none;';
|
||||
document.body.appendChild(debugDiv);
|
||||
}
|
||||
debugDiv.innerHTML += '<div>[JS ERROR] ' + msg + ' at ' + line + ':' + col + '</div>';
|
||||
return false;
|
||||
};
|
||||
</script>
|
||||
</head>
|
||||
|
||||
<body>
|
||||
@@ -50,7 +110,7 @@
|
||||
<div class="header-left">
|
||||
<div class="logo">
|
||||
<div id="logoIconContainer">
|
||||
<svg class="logo-icon" id="logoSvg" viewBox="0 0 32 32" fill="none">
|
||||
<svg class="logo-icon" id="logoSvg" viewBox="0 0 32 32" fill="none" style="visibility: hidden;">
|
||||
<rect x="2" y="2" width="28" height="28" rx="8" stroke="url(#logoGrad)" stroke-width="2.5" />
|
||||
<path d="M8 22 L12 14 L16 18 L20 10 L24 16" stroke="url(#logoGrad)" stroke-width="2"
|
||||
stroke-linecap="round" stroke-linejoin="round" fill="none" />
|
||||
@@ -64,14 +124,7 @@
|
||||
</defs>
|
||||
</svg>
|
||||
</div>
|
||||
<h1 class="logo-text" id="logoText">数据可视化展示大屏</h1>
|
||||
</div>
|
||||
<div class="header-meta">
|
||||
<span class="server-count" id="serverCount">
|
||||
<span class="dot dot-pulse"></span>
|
||||
<span id="serverCountText">0 台服务器</span>
|
||||
</span>
|
||||
<span class="source-count" id="sourceCount">0 个数据源</span>
|
||||
<h1 class="logo-text" id="logoText"></h1>
|
||||
</div>
|
||||
</div>
|
||||
<div class="header-right">
|
||||
@@ -102,6 +155,13 @@
|
||||
<div id="userSection">
|
||||
<button class="btn btn-login" id="btnLogin">登录</button>
|
||||
</div>
|
||||
<button class="btn-refresh-global" id="btnGlobalRefresh" title="全局强制刷新数据" style="display: none;">
|
||||
<svg viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2">
|
||||
<polyline points="23 4 23 10 17 10" />
|
||||
<polyline points="1 20 1 14 7 14" />
|
||||
<path d="M3.51 9a9 9 0 0 1 14.85-3.36L23 10M1 14l4.64 4.36A9 9 0 0 0 20.49 15" />
|
||||
</svg>
|
||||
</button>
|
||||
<button class="btn-settings" id="btnSettings" title="配置管理">
|
||||
<svg viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round"
|
||||
stroke-linejoin="round">
|
||||
@@ -128,7 +188,7 @@
|
||||
</svg>
|
||||
</div>
|
||||
<div class="stat-card-content">
|
||||
<span class="stat-card-label">服务器总数</span>
|
||||
<span class="stat-card-label" id="totalServersLabel">服务器总数</span>
|
||||
<span class="stat-card-value" id="totalServers">0</span>
|
||||
</div>
|
||||
</div>
|
||||
@@ -187,9 +247,12 @@
|
||||
</svg>
|
||||
</div>
|
||||
<div class="stat-card-content">
|
||||
<span class="stat-card-label">实时总带宽</span>
|
||||
<span class="stat-card-value" id="totalBandwidth">0 B/s</span>
|
||||
<span class="stat-card-sub" id="bandwidthDetail">↓ 0 ↑ 0</span>
|
||||
<span class="stat-card-label">实时带宽 (MB/s ↑/↓)</span>
|
||||
<div class="stat-card-value-group">
|
||||
<span class="stat-card-value" id="totalBandwidthTx">0.00</span>
|
||||
<span class="stat-card-separator">/</span>
|
||||
<span class="stat-card-value" id="totalBandwidthRx">0.00</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</section>
|
||||
@@ -206,12 +269,21 @@
|
||||
</svg>
|
||||
网络流量趋势 (24h)
|
||||
</h2>
|
||||
<div class="chart-header-actions">
|
||||
<button class="btn-icon" id="btnRefreshNetwork" title="刷新流量趋势">
|
||||
<svg viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" style="width: 16px; height: 16px;">
|
||||
<path d="M23 4v6h-6M1 20v-6h6M3.51 9a9 9 0 0 1 14.85-3.36L23 10M1 14l4.64 4.36A9 9 0 0 0 20.49 15"></path>
|
||||
</svg>
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
<div class="chart-legend">
|
||||
<span class="legend-item"><span class="legend-dot legend-rx"></span>接收 (RX)</span>
|
||||
<span class="legend-item"><span class="legend-dot legend-tx"></span>发送 (TX)</span>
|
||||
<span class="legend-item" id="legendRx" style="cursor: pointer;" title="点击切换 接收 (RX) 显示/隐藏"><span
|
||||
class="legend-dot legend-rx"></span>接收 (RX)</span>
|
||||
<span class="legend-item" id="legendTx" style="cursor: pointer;" title="点击切换 发送 (TX) 显示/隐藏"><span
|
||||
class="legend-dot legend-tx"></span>发送 (TX)</span>
|
||||
<span class="legend-item disabled" id="legendP95" style="cursor: pointer;" title="点击切换 P95 线显示/隐藏">
|
||||
<span class="legend-dot legend-p95"></span>95计费 (P95)
|
||||
<span class="legend-dot legend-p95"></span>95计费 (<span id="p95LabelText">上行</span>)
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
@@ -228,7 +300,7 @@
|
||||
<span class="traffic-value" id="traffic24hTx">0 B</span>
|
||||
</div>
|
||||
<div class="traffic-stat traffic-stat-p95">
|
||||
<span class="traffic-label">95计费带宽</span>
|
||||
<span class="traffic-label">95计费 (上行)</span>
|
||||
<span class="traffic-value" id="trafficP95">0 B/s</span>
|
||||
</div>
|
||||
<div class="traffic-stat traffic-stat-total">
|
||||
@@ -241,6 +313,43 @@
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Global Traffic 3D Globe -->
|
||||
<div class="chart-card globe-card" id="globeCard">
|
||||
<div class="chart-card-header">
|
||||
<h2 class="chart-title">
|
||||
<svg viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="1.5" class="chart-title-icon">
|
||||
<circle cx="12" cy="12" r="10" />
|
||||
<path
|
||||
d="M2 12h20M12 2a15.3 15.3 0 0 1 4 10 15.3 15.3 0 0 1-4 10 15.3 15.3 0 0 1-4-10 15.3 15.3 0 0 1 4-10z" />
|
||||
</svg>
|
||||
全球骨干分布
|
||||
</h2>
|
||||
<div class="chart-header-actions">
|
||||
<button class="btn-icon" id="btnExpandGlobe" title="放大显示">
|
||||
<svg viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2"
|
||||
style="width: 18px; height: 18px;">
|
||||
<path d="M15 3h6v6M9 21H3v-6M21 3l-7 7M3 21l7-7" />
|
||||
</svg>
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
<div class="globe-body" id="globeContainer"></div>
|
||||
<div class="chart-footer">
|
||||
<div class="traffic-stat">
|
||||
<span class="traffic-label">全球节点总数</span>
|
||||
<span class="traffic-value" id="globeTotalNodes">0</span>
|
||||
</div>
|
||||
<div class="traffic-stat">
|
||||
<span class="traffic-label">覆盖地区/国家</span>
|
||||
<span class="traffic-value" id="globeTotalRegions">0</span>
|
||||
</div>
|
||||
<div class="traffic-stat">
|
||||
<span class="traffic-label">实时活跃状态</span>
|
||||
<span class="traffic-value" style="color: var(--accent-emerald);">Active</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</section>
|
||||
|
||||
<!-- Server List -->
|
||||
@@ -256,32 +365,84 @@
|
||||
</svg>
|
||||
服务器详情
|
||||
</h2>
|
||||
<div class="chart-header-right">
|
||||
<div class="search-box">
|
||||
<input type="search" id="serverSearchFilter" name="q-filter-server" placeholder="检索服务器名称..."
|
||||
autocomplete="one-time-code" spellcheck="false">
|
||||
<svg viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round"
|
||||
stroke-linejoin="round" class="search-icon">
|
||||
<circle cx="11" cy="11" r="8"></circle>
|
||||
<line x1="21" y1="21" x2="16.65" y2="16.65"></line>
|
||||
</svg>
|
||||
</div>
|
||||
<button id="btnResetSort" class="btn-icon-sm" title="重置筛选与排序">
|
||||
<svg viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round"
|
||||
stroke-linejoin="round">
|
||||
<path d="M3 12a9 9 0 1 0 9-9 9.75 9.75 0 0 0-6.74 2.74L3 8"></path>
|
||||
<path d="M3 3v5h5"></path>
|
||||
</svg>
|
||||
</button>
|
||||
<select id="sourceFilter" class="source-select">
|
||||
<option value="all">所有数据源</option>
|
||||
</select>
|
||||
</div>
|
||||
</div>
|
||||
<div class="server-table-wrap">
|
||||
<table class="server-table" id="serverTable">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>状态</th>
|
||||
<th>Job / 实例</th>
|
||||
<th>数据源</th>
|
||||
<th>CPU</th>
|
||||
<th>内存</th>
|
||||
<th>磁盘</th>
|
||||
<th>网络 ↓</th>
|
||||
<th>网络 ↑</th>
|
||||
<th class="sortable active" data-sort="up">状态 <span class="sort-icon"></span></th>
|
||||
<th class="sortable" data-sort="job">Job / 实例 <span class="sort-icon"></span></th>
|
||||
<th class="sortable" data-sort="source">数据源 <span class="sort-icon"></span></th>
|
||||
<th class="sortable" data-sort="cpu">CPU <span class="sort-icon"></span></th>
|
||||
<th class="sortable" data-sort="mem">内存 <span class="sort-icon"></span></th>
|
||||
<th class="sortable" data-sort="disk">磁盘 <span class="sort-icon"></span></th>
|
||||
<th class="sortable" data-sort="netRx">网络 ↓ <span class="sort-icon"></span></th>
|
||||
<th class="sortable" data-sort="netTx">网络 ↑ <span class="sort-icon"></span></th>
|
||||
<th class="sortable" data-sort="conntrack">Conntrack <span class="sort-icon"></span></th>
|
||||
<th class="sortable" data-sort="traffic24h">24h 流量 <span class="sort-icon"></span></th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody id="serverTableBody">
|
||||
<tr class="empty-row">
|
||||
<td colspan="8">暂无数据 - 请先配置 Prometheus 数据源</td>
|
||||
<td colspan="10">暂无数据 - 请先配置 Prometheus 数据源</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
<div class="pagination-footer">
|
||||
<div class="page-size-selector">
|
||||
<span>每页显示</span>
|
||||
<select id="pageSizeSelect" class="source-select">
|
||||
<option value="10">10</option>
|
||||
<option value="20" selected>20</option>
|
||||
<option value="50">50</option>
|
||||
<option value="100">100</option>
|
||||
</select>
|
||||
<span>条</span>
|
||||
</div>
|
||||
<div class="pagination-controls" id="paginationControls">
|
||||
<!-- Pagination buttons will be injected here -->
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</section>
|
||||
</main>
|
||||
|
||||
<!-- Footer -->
|
||||
<footer class="site-footer">
|
||||
<div class="footer-content">
|
||||
<div class="copyright">© <span id="copyrightYear"></span> LDNET-GA-Service. All rights reserved.</div>
|
||||
<div class="filings">
|
||||
<a href="http://www.beian.gov.cn/portal/registerSystemInfo" target="_blank" id="psFilingDisplay" style="display: none;">
|
||||
<span id="psFilingText"></span>
|
||||
</a>
|
||||
<span class="filing-sep"></span>
|
||||
<a href="https://beian.miit.gov.cn/" target="_blank" id="icpFilingDisplay" style="display: none;"></a>
|
||||
</div>
|
||||
</div>
|
||||
</footer>
|
||||
|
||||
<!-- Settings Modal -->
|
||||
<div class="modal-overlay" id="settingsModal">
|
||||
<div class="modal">
|
||||
@@ -289,6 +450,9 @@
|
||||
<div class="modal-tabs">
|
||||
<button class="modal-tab active" data-tab="prom">数据源管理</button>
|
||||
<button class="modal-tab" data-tab="site">大屏设置</button>
|
||||
<button class="modal-tab" data-tab="security">安全设置</button>
|
||||
<button class="modal-tab" data-tab="latency">延迟线路管理</button>
|
||||
<button class="modal-tab" data-tab="auth">账号安全</button>
|
||||
</div>
|
||||
<button class="modal-close" id="modalClose">×</button>
|
||||
</div>
|
||||
@@ -299,20 +463,51 @@
|
||||
<div class="add-source-form" id="addSourceForm">
|
||||
<h3>添加数据源</h3>
|
||||
<div class="form-row">
|
||||
<div class="form-group">
|
||||
<div class="form-group" style="flex: 0.8;">
|
||||
<label for="sourceType">类型</label>
|
||||
<select id="sourceType"
|
||||
style="padding: 10px 14px; background: var(--bg-input); border: 1px solid var(--border-color); border-radius: var(--radius-sm); color: var(--text-primary); outline: none;">
|
||||
<option value="prometheus">Prometheus</option>
|
||||
<option value="blackbox">Blackbox Exporter</option>
|
||||
</select>
|
||||
</div>
|
||||
<div class="form-group" style="flex: 1;">
|
||||
<label for="sourceName">名称</label>
|
||||
<input type="text" id="sourceName" placeholder="例:生产环境" autocomplete="off">
|
||||
</div>
|
||||
<div class="form-group form-group-wide">
|
||||
<label for="sourceUrl">Prometheus URL</label>
|
||||
<input type="url" id="sourceUrl" placeholder="http://prometheus.example.com:9090" autocomplete="off">
|
||||
<label for="sourceUrl">URL 地址</label>
|
||||
<input type="url" id="sourceUrl" placeholder="http://1.2.3.4:9090" autocomplete="off">
|
||||
</div>
|
||||
</div>
|
||||
<div class="form-row">
|
||||
<div class="form-group form-group-wide">
|
||||
<label for="sourceDesc">描述 (可选)</label>
|
||||
<input type="text" id="sourceDesc" placeholder="数据源描述" autocomplete="off">
|
||||
<input type="text" id="sourceDesc" placeholder="记录关于此数据源的备注信息" autocomplete="off">
|
||||
</div>
|
||||
</div>
|
||||
<div class="form-row" id="serverSourceOption" style="margin-top: 4px;">
|
||||
<div class="form-group form-group-wide">
|
||||
<div class="source-options-clean-row">
|
||||
<label class="source-option-item" title="将此数据源的服务器指标聚合到首页总览中">
|
||||
<div class="switch-wrapper">
|
||||
<input type="checkbox" id="isOverviewSource" checked class="switch-input">
|
||||
<div class="switch-label"></div>
|
||||
</div>
|
||||
<span class="source-option-label">加入总览统计</span>
|
||||
</label>
|
||||
<label class="source-option-item" title="在服务器详情列表中显示此数据源的服务器">
|
||||
<div class="switch-wrapper">
|
||||
<input type="checkbox" id="isDetailSource" checked class="switch-input">
|
||||
<div class="switch-label"></div>
|
||||
</div>
|
||||
<span class="source-option-label">加入详情展示</span>
|
||||
</label>
|
||||
</div>
|
||||
<input type="checkbox" id="isServerSource" checked disabled style="display: none;">
|
||||
</div>
|
||||
</div>
|
||||
<div class="form-row" style="margin-top: 8px;">
|
||||
<div class="form-actions">
|
||||
<button class="btn btn-test" id="btnTest">测试连接</button>
|
||||
<button class="btn btn-add" id="btnAdd">添加</button>
|
||||
@@ -343,24 +538,218 @@
|
||||
<input type="text" id="siteTitleInput" placeholder="例:数据可视化展示大屏">
|
||||
</div>
|
||||
<div class="form-group" style="margin-top: 15px;">
|
||||
<label for="logoUrlInput">Logo URL (图片链接,为空则显示默认图标)</label>
|
||||
<input type="url" id="logoUrlInput" placeholder="https://example.com/logo.png">
|
||||
</div>
|
||||
<div class="form-group" style="margin-top: 15px;">
|
||||
<label for="defaultThemeInput">默认主题</label>
|
||||
<select id="defaultThemeInput"
|
||||
style="padding: 10px 14px; background: var(--bg-input); border: 1px solid var(--border-color); border-radius: var(--radius-sm); color: var(--text-primary);">
|
||||
<option value="dark">默认夜间模式</option>
|
||||
<option value="light">默认白天模式</option>
|
||||
<option value="auto">跟随浏览器/系统</option>
|
||||
<label for="showPageNameInput">是否显示左上角标题</label>
|
||||
<select id="showPageNameInput"
|
||||
style="padding: 10px 14px; background: var(--bg-input); border: 1px solid var(--border-color); border-radius: var(--radius-sm); color: var(--text-primary); width: 100%;">
|
||||
<option value="1">显示 (Show)</option>
|
||||
<option value="0">隐藏 (Hide)</option>
|
||||
</select>
|
||||
</div>
|
||||
<div class="form-group" style="margin-top: 15px;">
|
||||
<label for="logoUrlInput">Logo URL (白天/默认,支持图片链接)</label>
|
||||
<input type="url" id="logoUrlInput" placeholder="https://example.com/logo_light.png">
|
||||
</div>
|
||||
<div class="form-group" style="margin-top: 15px;">
|
||||
<label for="logoUrlDarkInput">Logo URL (黑夜模式,可为空则使用默认)</label>
|
||||
<input type="url" id="logoUrlDarkInput" placeholder="https://example.com/logo_dark.png">
|
||||
</div>
|
||||
<div class="form-group" style="margin-top: 15px;">
|
||||
<label for="faviconUrlInput">Favicon URL (浏览器标签页图标)</label>
|
||||
<input type="url" id="faviconUrlInput" placeholder="https://example.com/favicon.ico">
|
||||
</div>
|
||||
<div class="settings-section" style="margin-top: 25px; border-top: 1px solid var(--border-color); padding-top: 20px;">
|
||||
<h4 style="font-size: 0.85rem; color: var(--accent-indigo); margin-bottom: 15px; text-transform: uppercase; letter-spacing: 0.5px;">界面外观 (Appearance)</h4>
|
||||
<div class="form-group">
|
||||
<label for="defaultThemeInput">色彩主题模式</label>
|
||||
<select id="defaultThemeInput"
|
||||
style="padding: 10px 14px; background: var(--bg-input); border: 1px solid var(--border-color); border-radius: var(--radius-sm); color: var(--text-primary); width: 100%;">
|
||||
<option value="auto">跟随系统主题 (Sync with OS)</option>
|
||||
<option value="dark">强制深色模式 (Always Dark)</option>
|
||||
<option value="light">强制浅色模式 (Always Light)</option>
|
||||
</select>
|
||||
<p style="font-size: 0.72rem; color: var(--text-muted); margin-top: 6px;">选择“跟随系统”后,应用将自动同步您操作系统或浏览器的黑暗/白天模式设置。</p>
|
||||
</div>
|
||||
</div>
|
||||
<div class="form-group" style="margin-top: 15px;">
|
||||
<label for="show95BandwidthInput">24h趋势图默认显示 95计费线</label>
|
||||
<select id="show95BandwidthInput"
|
||||
style="padding: 10px 14px; background: var(--bg-input); border: 1px solid var(--border-color); border-radius: var(--radius-sm); color: var(--text-primary);">
|
||||
<option value="1">显示</option>
|
||||
<option value="0">不显示</option>
|
||||
</select>
|
||||
</div>
|
||||
<div class="form-group" style="margin-top: 15px;">
|
||||
<label for="p95TypeSelect">95带宽计费统计类型</label>
|
||||
<select id="p95TypeSelect"
|
||||
style="padding: 10px 14px; background: var(--bg-input); border: 1px solid var(--border-color); border-radius: var(--radius-sm); color: var(--text-primary);">
|
||||
<option value="tx">仅统计上行 (TX)</option>
|
||||
<option value="rx">仅统计下行 (RX)</option>
|
||||
<option value="both">统计上行+下行 (Sum)</option>
|
||||
<option value="max">出入取大 (Max)</option>
|
||||
</select>
|
||||
</div>
|
||||
<div class="form-group" style="margin-top: 15px;">
|
||||
<label for="prometheusCacheTtlInput">数据自动刷新/同步间隔 (秒)</label>
|
||||
<input type="number" id="prometheusCacheTtlInput" placeholder="例:30" min="0" max="86400">
|
||||
<p style="font-size: 0.72rem; color: var(--text-muted); margin-top: 6px;">后端将按此频率主动从 Prometheus 抓取数据并缓存。设为 0 则禁用自动同步。建议值:15-60s。</p>
|
||||
</div>
|
||||
<div class="form-group" style="margin-top: 15px;">
|
||||
<label for="psFilingInput">公安备案号 (如:京公网安备 11010102000001号)</label>
|
||||
<input type="text" id="psFilingInput" placeholder="请输入公安备案号">
|
||||
</div>
|
||||
<div class="form-group" style="margin-top: 15px;">
|
||||
<label for="icpFilingInput">ICP 备案号 (如:京ICP备12345678号)</label>
|
||||
<input type="text" id="icpFilingInput" placeholder="请输入 ICP 备案号">
|
||||
</div>
|
||||
<div class="form-group" style="margin-top: 15px;">
|
||||
<label for="cdnUrlInput">静态资源 CDN 地址 (例如: https://cdn.example.com)</label>
|
||||
<input type="url" id="cdnUrlInput" placeholder="留空则使用本地服务器资源">
|
||||
<p style="font-size: 0.72rem; color: var(--text-muted); margin-top: 6px;">开启后,页面中的 JS/CSS/图片等资源将尝试从该 CDN 加载。请确保 CDN 已正确镜像相关资源。</p>
|
||||
</div>
|
||||
<div class="form-actions" style="margin-top: 25px; display: flex; justify-content: flex-end;">
|
||||
<button class="btn btn-add" id="btnSaveSiteSettings">保存设置</button>
|
||||
<button class="btn btn-add" id="btnSaveSiteSettings">保存基础设置</button>
|
||||
</div>
|
||||
<div class="form-message" id="siteSettingsMessage"></div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Security Settings Tab -->
|
||||
<div class="tab-content" id="tab-security">
|
||||
<div class="security-settings-form">
|
||||
<h3>安全与隐私设置</h3>
|
||||
<div class="form-group" style="margin-top: 15px;">
|
||||
<label for="requireLoginForServerDetailsInput">服务器详情是否仅登录后可查看</label>
|
||||
<select id="requireLoginForServerDetailsInput"
|
||||
style="padding: 10px 14px; background: var(--bg-input); border: 1px solid var(--border-color); border-radius: var(--radius-sm); color: var(--text-primary); width: 100%;">
|
||||
<option value="1">仅登录后可查看</option>
|
||||
<option value="0">允许公开查看</option>
|
||||
</select>
|
||||
<p style="font-size: 0.72rem; color: var(--text-muted); margin-top: 6px;">开启后,未登录访客仍可看到大屏总览,但点击单台服务器时需要先登录。</p>
|
||||
</div>
|
||||
<div class="form-group" style="margin-top: 15px;">
|
||||
<label for="showServerIpInput">是否在服务器详情中显示公网 IP</label>
|
||||
<select id="showServerIpInput"
|
||||
style="padding: 10px 14px; background: var(--bg-input); border: 1px solid var(--border-color); border-radius: var(--radius-sm); color: var(--text-primary); width: 100%;">
|
||||
<option value="1">显示 (Show)</option>
|
||||
<option value="0">隐藏 (Hide)</option>
|
||||
</select>
|
||||
<p style="font-size: 0.72rem; color: var(--text-muted); margin-top: 6px;">开启后,点击服务器详情时会显示该服务器的公网 IP 地址。</p>
|
||||
</div>
|
||||
<div class="form-group" style="margin-top: 15px;">
|
||||
<label for="ipMetricNameInput">自定义 IP 采集指标 (可选)</label>
|
||||
<input type="text" id="ipMetricNameInput" placeholder="例:node_network_address_info">
|
||||
<p style="font-size: 0.72rem; color: var(--text-muted); margin-top: 6px;">如果您的 Prometheus 中有专门记录 IP 的指标,请在此输入。留空则尝试自动发现。</p>
|
||||
</div>
|
||||
<div class="form-group" style="margin-top: 15px;">
|
||||
<label for="ipLabelNameInput">IP 指标中的 Label 名称</label>
|
||||
<input type="text" id="ipLabelNameInput" placeholder="默认:address">
|
||||
</div>
|
||||
<div class="form-actions" style="margin-top: 25px; display: flex; justify-content: flex-end;">
|
||||
<button class="btn btn-add" id="btnSaveSecuritySettings">保存安全设置</button>
|
||||
</div>
|
||||
<div class="form-message" id="securitySettingsMessage"></div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Custom Detail Metrics Tab -->
|
||||
<div class="tab-content" id="tab-details-metrics">
|
||||
<div class="metrics-settings-form">
|
||||
<div style="display: flex; justify-content: space-between; align-items: center; margin-bottom: 20px;">
|
||||
<h3 style="margin: 0;">服务器详情指标配置</h3>
|
||||
<button class="btn btn-add" id="btnAddCustomMetric" style="padding: 6px 12px; font-size: 0.8rem;">
|
||||
<i class="fas fa-plus"></i> 添加指标
|
||||
</button>
|
||||
</div>
|
||||
|
||||
<div id="customMetricsList" class="custom-metrics-list" style="max-height: 400px; overflow-y: auto; padding-right: 5px;">
|
||||
<!-- Dynamic rows will be added here -->
|
||||
</div>
|
||||
|
||||
<div class="form-actions" style="margin-top: 25px; display: flex; justify-content: flex-end;">
|
||||
<button class="btn btn-add" id="btnSaveCustomMetrics">保存指标配置</button>
|
||||
</div>
|
||||
<div class="form-message" id="customMetricsMessage"></div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Latency Routes Tab -->
|
||||
<div class="tab-content" id="tab-latency">
|
||||
<div class="latency-settings-form">
|
||||
<h3>Blackbox 延迟连线管理</h3>
|
||||
<div class="latency-routes-manager">
|
||||
<!-- Add Route Form -->
|
||||
<div class="add-route-mini-form"
|
||||
style="background: rgba(255,255,255,0.02); padding: 15px; border-radius: 8px; margin-bottom: 20px; border: 1px solid var(--border-color);">
|
||||
<div class="form-row">
|
||||
<div class="form-group" style="flex: 1.5;">
|
||||
<label>探测用服务器</label>
|
||||
<select id="routeSourceSelect"
|
||||
style="padding: 10px 14px; background: var(--bg-input); border: 1px solid var(--border-color); border-radius: var(--radius-sm); color: var(--text-primary);">
|
||||
<option value="">-- 选择数据源 --</option>
|
||||
</select>
|
||||
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label>起航点</label>
|
||||
<input type="text" id="routeSourceInput" placeholder="例:China">
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label>目的地</label>
|
||||
<input type="text" id="routeDestInput" placeholder="例:United States">
|
||||
</div>
|
||||
</div>
|
||||
<div class="form-row" style="margin-top: 10px; align-items: flex-end;">
|
||||
<div class="form-group" style="flex: 2;">
|
||||
<label>Blackbox 探测目标 (IP 或 域名)</label>
|
||||
<input type="text" id="routeTargetInput" placeholder="例:1.1.1.1 或 google.com">
|
||||
</div>
|
||||
<div class="form-actions" style="padding-bottom: 0; display: flex; gap: 8px;">
|
||||
<button class="btn btn-add" id="btnAddRoute" style="padding: 10px 24px;">添加线路</button>
|
||||
<button class="btn btn-test" id="btnCancelEditRoute"
|
||||
style="display: none; padding: 10px 15px; background: rgba(0,0,0,0.3);">取消</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Routes List -->
|
||||
<div class="latency-routes-list-container">
|
||||
<h4
|
||||
style="font-size: 0.75rem; color: var(--text-muted); text-transform: uppercase;; margin-bottom: 10px;">
|
||||
已配置线路</h4>
|
||||
<div id="latencyRoutesList" class="latency-routes-list"
|
||||
style="display: flex; flex-direction: column; gap: 10px;">
|
||||
<!-- Routes will be injected here -->
|
||||
<div class="route-empty"
|
||||
style="text-align: center; padding: 20px; color: var(--text-muted); font-size: 0.85rem; background: rgba(0,0,0,0.1); border-radius: 8px;">
|
||||
暂无线路</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Account Security Tab -->
|
||||
<div class="tab-content" id="tab-auth">
|
||||
<div class="security-settings-form">
|
||||
<h3>修改登录密码</h3>
|
||||
<div class="form-group">
|
||||
<label for="oldPassword">当前密码</label>
|
||||
<input type="password" id="oldPassword" placeholder="请输入当前旧密码">
|
||||
</div>
|
||||
<div class="form-group" style="margin-top: 15px;">
|
||||
<label for="newPassword">新密码</label>
|
||||
<input type="password" id="newPassword" placeholder="请输入要设置的新密码">
|
||||
</div>
|
||||
<div class="form-group" style="margin-top: 15px;">
|
||||
<label for="confirmNewPassword">确认新密码</label>
|
||||
<input type="password" id="confirmNewPassword" placeholder="请再次确认新密码">
|
||||
</div>
|
||||
<div class="form-actions" style="margin-top: 25px; display: flex; justify-content: flex-end;">
|
||||
<button class="btn btn-add" id="btnChangePassword">提交修改</button>
|
||||
</div>
|
||||
<div class="form-message" id="changePasswordMessage"></div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
@@ -370,20 +759,35 @@
|
||||
<div class="modal" style="max-width: 800px; width: 95%;">
|
||||
<div class="modal-header">
|
||||
<div style="display: flex; flex-direction: column;">
|
||||
<h2 id="serverDetailTitle" style="margin-bottom: 4px;">服务器详情</h2>
|
||||
<div id="serverDetailSubtitle" style="font-size: 0.85rem; color: var(--text-secondary); font-family: var(--font-mono);"></div>
|
||||
<h2 id="serverDetailTitle" style="margin-bottom: 0;">服务器详情</h2>
|
||||
</div>
|
||||
<button class="modal-close" id="serverDetailClose">×</button>
|
||||
</div>
|
||||
<div class="modal-body" id="serverDetailBody" style="padding: 0;">
|
||||
<div id="detailLoading" style="text-align: center; padding: 40px; display: none;">
|
||||
<div class="dot dot-pulse" style="display: inline-block; width: 12px; height: 12px; background: var(--accent-indigo);"></div>
|
||||
<div class="dot dot-pulse"
|
||||
style="display: inline-block; width: 12px; height: 12px; background: var(--accent-indigo);"></div>
|
||||
<span style="margin-left: 10px; color: var(--text-secondary);">正在从数据源读取详情...</span>
|
||||
</div>
|
||||
<div class="detail-container" id="detailContainer">
|
||||
<!-- Metric Items are injected here -->
|
||||
<div class="detail-metrics-list" id="detailMetricsList"></div>
|
||||
|
||||
<div class="detail-partitions-container metric-item" id="detailPartitionsContainer" style="display: none;">
|
||||
<div class="metric-item-header" id="partitionHeader">
|
||||
<div class="metric-label-group">
|
||||
<span class="metric-label">磁盘分区详情 (已挂载)</span>
|
||||
<span class="metric-value" id="partitionSummary">读取中...</span>
|
||||
</div>
|
||||
<svg class="chevron-icon" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2">
|
||||
<polyline points="6 9 12 15 18 9"></polyline>
|
||||
</svg>
|
||||
</div>
|
||||
<div class="metric-item-content" id="partitionContent">
|
||||
<div class="detail-partitions-list" id="detailPartitionsList"></div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="detail-info-grid" id="detailInfoGrid">
|
||||
<div class="info-item">
|
||||
<span class="info-label">CPU 核心总数</span>
|
||||
@@ -397,6 +801,10 @@
|
||||
<span class="info-label">运行时间 (Uptime)</span>
|
||||
<span class="info-value" id="detailUptime">0天 0小时</span>
|
||||
</div>
|
||||
<div class="info-item">
|
||||
<span class="info-label">硬盘总量统计</span>
|
||||
<span class="info-value" id="detailDiskTotal">0 GB</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -4,9 +4,6 @@
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>系统初始化 - 数据可视化展示大屏</title>
|
||||
<link rel="preconnect" href="https://fonts.googleapis.com">
|
||||
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin>
|
||||
<link href="https://fonts.googleapis.com/css2?family=Inter:wght@300;400;500;600;700;800;900&family=JetBrains+Mono:wght@400;500;600&display=swap" rel="stylesheet">
|
||||
<link rel="stylesheet" href="/css/style.css">
|
||||
<style>
|
||||
body {
|
||||
@@ -70,6 +67,33 @@
|
||||
justify-content: center;
|
||||
padding: 10px 0;
|
||||
}
|
||||
|
||||
@media (max-width: 480px) {
|
||||
body {
|
||||
align-items: flex-start;
|
||||
padding: 16px 12px;
|
||||
}
|
||||
.init-container {
|
||||
padding: 24px 18px;
|
||||
border-radius: 10px;
|
||||
max-width: 100%;
|
||||
}
|
||||
.init-header h2 {
|
||||
font-size: 18px;
|
||||
}
|
||||
.init-header p {
|
||||
font-size: 12px;
|
||||
}
|
||||
.form-row {
|
||||
flex-direction: column;
|
||||
}
|
||||
.actions {
|
||||
flex-direction: column;
|
||||
}
|
||||
.actions .btn {
|
||||
width: 100%;
|
||||
}
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
@@ -117,11 +141,34 @@
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="init-header" style="margin: 24px 0 16px 0; text-align: left;">
|
||||
<h3 style="font-size: 16px; color: var(--text-main); margin: 0;">Valkey / Redis 缓存配置 (可选)</h3>
|
||||
</div>
|
||||
|
||||
<div class="form-row">
|
||||
<div class="form-group" style="flex: 2;">
|
||||
<label for="vHost">Valkey 地址</label>
|
||||
<input type="text" id="vHost" value="localhost" placeholder="localhost" autocomplete="off">
|
||||
</div>
|
||||
<div class="form-group" style="flex: 1;">
|
||||
<label for="vPort">端口</label>
|
||||
<input type="number" id="vPort" value="6379" placeholder="6379" autocomplete="off">
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="form-row">
|
||||
<div class="form-group form-group-wide">
|
||||
<label for="vPassword">Valkey 密码</label>
|
||||
<input type="password" id="vPassword" placeholder="留空则无密码">
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="form-message" id="messageBox"></div>
|
||||
|
||||
<div class="actions">
|
||||
<button class="btn btn-test" id="btnTest">测试连接</button>
|
||||
<button class="btn btn-add" id="btnInit">初始化数据库</button>
|
||||
<div class="actions" style="flex-wrap: wrap;">
|
||||
<button class="btn btn-test" id="btnTest" style="flex: 1 1 45%;">测试 MySQL</button>
|
||||
<button class="btn btn-test" id="btnTestValkey" style="flex: 1 1 45%;">测试 Valkey</button>
|
||||
<button class="btn btn-add" id="btnInit" style="flex: 1 1 100%;">确认并初始化系统</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -165,8 +212,8 @@
|
||||
</div>
|
||||
<div class="form-row">
|
||||
<div class="form-group form-group-wide">
|
||||
<label for="promName">数据源名称</label>
|
||||
<input type="text" id="promName" placeholder="例如:生产环境" autocomplete="off">
|
||||
<label for="promSourceName">数据源名称</label>
|
||||
<input type="text" id="promSourceName" name="p-source-name-init" placeholder="例如:生产环境" autocomplete="one-time-code">
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
||||
2200
public/js/app.js
2200
public/js/app.js
File diff suppressed because it is too large
Load Diff
@@ -10,14 +10,99 @@ class AreaChart {
|
||||
this.animProgress = 0;
|
||||
this.animFrame = null;
|
||||
this.showP95 = false;
|
||||
this.showRx = true;
|
||||
this.showTx = true;
|
||||
this.p95Type = 'tx'; // 'tx', 'rx', 'both'
|
||||
this.dpr = window.devicePixelRatio || 1;
|
||||
this.padding = { top: 20, right: 16, bottom: 32, left: 56 };
|
||||
|
||||
this._resize = this.resize.bind(this);
|
||||
this.prevMaxVal = 0;
|
||||
this.currentMaxVal = 0;
|
||||
this.lastDataHash = ''; // Fingerprint for optimization
|
||||
|
||||
// Use debounced resize for performance and safety
|
||||
this._resize = typeof debounce === 'function' ? debounce(this.resize.bind(this), 100) : this.resize.bind(this);
|
||||
window.addEventListener('resize', this._resize);
|
||||
|
||||
// Drag zoom support
|
||||
this.isDraggingP95 = false;
|
||||
this.customMaxVal = null;
|
||||
|
||||
this.onPointerDown = this.onPointerDown.bind(this);
|
||||
this.onPointerMove = this.onPointerMove.bind(this);
|
||||
this.onPointerUp = this.onPointerUp.bind(this);
|
||||
|
||||
this.canvas.addEventListener('pointerdown', this.onPointerDown);
|
||||
window.addEventListener('pointermove', this.onPointerMove);
|
||||
window.addEventListener('pointerup', this.onPointerUp);
|
||||
|
||||
this.resize();
|
||||
}
|
||||
|
||||
onPointerDown(e) {
|
||||
if (!this.showP95 || !this.p95) return;
|
||||
const rect = this.canvas.getBoundingClientRect();
|
||||
const scaleY = this.height / rect.height;
|
||||
const y = (e.clientY - rect.top) * scaleY;
|
||||
|
||||
const p = this.padding;
|
||||
const chartH = this.height - p.top - p.bottom;
|
||||
|
||||
// Calculate current P95 Y position
|
||||
const k = 1024;
|
||||
const currentMaxVal = (this.customMaxVal !== null ? this.customMaxVal : (this.currentMaxVal || 1024));
|
||||
let unitIdx = Math.floor(Math.log(Math.max(1, currentMaxVal)) / Math.log(k));
|
||||
unitIdx = Math.max(0, Math.min(unitIdx, 4));
|
||||
const unitFactor = Math.pow(k, unitIdx);
|
||||
const rawValInUnit = (currentMaxVal * 1.15) / unitFactor;
|
||||
let niceMaxInUnit;
|
||||
if (rawValInUnit <= 1) niceMaxInUnit = 1;
|
||||
else if (rawValInUnit <= 2) niceMaxInUnit = 2;
|
||||
else if (rawValInUnit <= 5) niceMaxInUnit = 5;
|
||||
else if (rawValInUnit <= 10) niceMaxInUnit = 10;
|
||||
else if (rawValInUnit <= 20) niceMaxInUnit = 20;
|
||||
else if (rawValInUnit <= 50) niceMaxInUnit = 50;
|
||||
else if (rawValInUnit <= 100) niceMaxInUnit = 100;
|
||||
else if (rawValInUnit <= 200) niceMaxInUnit = 200;
|
||||
else if (rawValInUnit <= 500) niceMaxInUnit = 500;
|
||||
else if (rawValInUnit <= 1000) niceMaxInUnit = 1000;
|
||||
else niceMaxInUnit = Math.ceil(rawValInUnit / 100) * 100;
|
||||
|
||||
const displayMaxVal = this.customMaxVal !== null ? this.customMaxVal : (niceMaxInUnit * unitFactor);
|
||||
const p95Y = p.top + chartH - (this.p95 / (displayMaxVal || 1)) * chartH;
|
||||
|
||||
if (Math.abs(y - p95Y) < 25) {
|
||||
this.isDraggingP95 = true;
|
||||
this.canvas.style.cursor = 'ns-resize';
|
||||
this.canvas.setPointerCapture(e.pointerId);
|
||||
e.preventDefault();
|
||||
e.stopPropagation();
|
||||
}
|
||||
}
|
||||
|
||||
onPointerMove(e) {
|
||||
if (!this.isDraggingP95) return;
|
||||
const rect = this.canvas.getBoundingClientRect();
|
||||
const scaleY = this.height / rect.height;
|
||||
const y = (e.clientY - rect.top) * scaleY;
|
||||
const p = this.padding;
|
||||
const chartH = this.height - p.top - p.bottom;
|
||||
|
||||
const dy = p.top + chartH - y;
|
||||
if (dy > 10) {
|
||||
this.customMaxVal = (this.p95 * chartH) / dy;
|
||||
this.draw();
|
||||
}
|
||||
}
|
||||
|
||||
onPointerUp(e) {
|
||||
if (this.isDraggingP95) {
|
||||
this.isDraggingP95 = false;
|
||||
this.canvas.style.cursor = '';
|
||||
this.canvas.releasePointerCapture(e.pointerId);
|
||||
}
|
||||
}
|
||||
|
||||
resize() {
|
||||
const rect = this.canvas.parentElement.getBoundingClientRect();
|
||||
this.width = rect.width;
|
||||
@@ -33,8 +118,31 @@ class AreaChart {
|
||||
setData(data) {
|
||||
if (!data || !data.timestamps) return;
|
||||
|
||||
// Downsample if data is too dense (target ~1500 points for performance)
|
||||
const MAX_POINTS = 1500;
|
||||
// 1. Data Fingerprinting: Skip redundant updates to save GPU/CPU
|
||||
const fingerprint = data.timestamps.length + '_' +
|
||||
(data.rx.length > 0 ? data.rx[data.rx.length - 1] : 0) + '_' +
|
||||
(data.tx.length > 0 ? data.tx[data.tx.length - 1] : 0);
|
||||
|
||||
if (fingerprint === this.lastDataHash) return;
|
||||
this.lastDataHash = fingerprint;
|
||||
|
||||
// Store old data for smooth transition before updating this.data
|
||||
// Only clone if there is data to clone; otherwise use empty set
|
||||
if (this.data && this.data.timestamps && this.data.timestamps.length > 0) {
|
||||
this.prevData = {
|
||||
timestamps: [...this.data.timestamps],
|
||||
rx: [...this.data.rx],
|
||||
tx: [...this.data.tx]
|
||||
};
|
||||
} else {
|
||||
this.prevData = { timestamps: [], rx: [], tx: [] };
|
||||
}
|
||||
|
||||
// Smoothly transition max value context too
|
||||
this.prevMaxVal = this.currentMaxVal || 0;
|
||||
|
||||
// Downsample if data is too dense (target ~500 points for GPU performance)
|
||||
const MAX_POINTS = 500;
|
||||
if (data.timestamps.length > MAX_POINTS) {
|
||||
const skip = Math.ceil(data.timestamps.length / MAX_POINTS);
|
||||
const downsampled = { timestamps: [], rx: [], tx: [] };
|
||||
@@ -48,10 +156,26 @@ class AreaChart {
|
||||
this.data = data;
|
||||
}
|
||||
|
||||
// Refresh currentMaxVal target for interpolation in draw()
|
||||
let rawMax = 1024;
|
||||
for (let i = 0; i < this.data.rx.length; i++) {
|
||||
if (this.showRx) rawMax = Math.max(rawMax, this.data.rx[i] || 0);
|
||||
if (this.showTx) rawMax = Math.max(rawMax, this.data.tx[i] || 0);
|
||||
}
|
||||
this.currentMaxVal = rawMax;
|
||||
|
||||
// Calculate P95 (95th percentile)
|
||||
// Common standard: 95th percentile of the peak (max of rx/tx or sum)
|
||||
// We'll use max(rx, tx) at each point which is common for billing
|
||||
const combined = data.rx.map((r, i) => Math.max(r || 0, data.tx[i] || 0));
|
||||
let combined = [];
|
||||
if (this.p95Type === 'tx') {
|
||||
combined = data.tx.map(t => t || 0);
|
||||
} else if (this.p95Type === 'rx') {
|
||||
combined = data.rx.map(r => r || 0);
|
||||
} else if (this.p95Type === 'max') {
|
||||
combined = data.tx.map((t, i) => Math.max(t || 0, data.rx[i] || 0));
|
||||
} else {
|
||||
combined = data.tx.map((t, i) => (t || 0) + (data.rx[i] || 0));
|
||||
}
|
||||
|
||||
if (combined.length > 0) {
|
||||
const sorted = [...combined].sort((a, b) => a - b);
|
||||
const p95Idx = Math.floor(sorted.length * 0.95);
|
||||
@@ -67,7 +191,7 @@ class AreaChart {
|
||||
animate() {
|
||||
if (this.animFrame) cancelAnimationFrame(this.animFrame);
|
||||
const start = performance.now();
|
||||
const duration = 800;
|
||||
const duration = 400; // Shorter animation = less GPU time
|
||||
|
||||
const step = (now) => {
|
||||
const elapsed = now - start;
|
||||
@@ -103,25 +227,24 @@ class AreaChart {
|
||||
return;
|
||||
}
|
||||
|
||||
// Find max raw value
|
||||
let maxDataVal = 0;
|
||||
for (let i = 0; i < rx.length; i++) {
|
||||
maxDataVal = Math.max(maxDataVal, rx[i] || 0, tx[i] || 0);
|
||||
// Determine consistent unit based on max data value
|
||||
let maxDataVal = 1024;
|
||||
if (this.prevMaxVal && this.animProgress < 1) {
|
||||
// Interpolate the max value context to keep vertical scale smooth
|
||||
maxDataVal = this.prevMaxVal + (this.currentMaxVal - this.prevMaxVal) * (this.animProgress || 0);
|
||||
} else {
|
||||
maxDataVal = this.currentMaxVal;
|
||||
}
|
||||
|
||||
// Determine consistent unit based on max data value
|
||||
const k = 1024;
|
||||
const sizes = ['B/s', 'KB/s', 'MB/s', 'GB/s', 'TB/s'];
|
||||
let unitIdx = Math.floor(Math.log(Math.max(1, maxDataVal)) / Math.log(k));
|
||||
unitIdx = Math.max(0, Math.min(unitIdx, sizes.length - 1));
|
||||
const unitFactor = Math.pow(k, unitIdx);
|
||||
const unitLabel = sizes[unitIdx];
|
||||
|
||||
// Get value in current units and find a "nice" round max
|
||||
// Use 1.15 cushion
|
||||
const rawValInUnit = (maxDataVal * 1.15) / unitFactor;
|
||||
let niceMaxInUnit;
|
||||
|
||||
if (rawValInUnit <= 1) niceMaxInUnit = 1;
|
||||
else if (rawValInUnit <= 2) niceMaxInUnit = 2;
|
||||
else if (rawValInUnit <= 5) niceMaxInUnit = 5;
|
||||
@@ -134,14 +257,27 @@ class AreaChart {
|
||||
else if (rawValInUnit <= 1000) niceMaxInUnit = 1000;
|
||||
else niceMaxInUnit = Math.ceil(rawValInUnit / 100) * 100;
|
||||
|
||||
const maxVal = niceMaxInUnit * unitFactor;
|
||||
let maxVal = niceMaxInUnit * unitFactor;
|
||||
if (this.customMaxVal !== null) {
|
||||
maxVal = this.customMaxVal;
|
||||
}
|
||||
|
||||
// Recalculate units based on final maxVal (could be zoomed)
|
||||
let finalUnitIdx = Math.floor(Math.log(Math.max(1, maxVal)) / Math.log(k));
|
||||
finalUnitIdx = Math.max(0, Math.min(finalUnitIdx, sizes.length - 1));
|
||||
const finalFactor = Math.pow(k, finalUnitIdx);
|
||||
const finalUnitLabel = sizes[finalUnitIdx];
|
||||
|
||||
const len = timestamps.length;
|
||||
const xStep = chartW / (len - 1);
|
||||
|
||||
// Helper to get point
|
||||
// Helper to get point with smooth value transition
|
||||
const getX = (i) => p.left + i * xStep;
|
||||
const getY = (val) => p.top + chartH - (val / (maxVal || 1)) * chartH * this.animProgress;
|
||||
const getY = (val, prevVal = 0) => {
|
||||
// Interpolate value from previous state to new state
|
||||
const actualVal = prevVal + (val - prevVal) * this.animProgress;
|
||||
return p.top + chartH - (actualVal / (maxVal || 1)) * chartH;
|
||||
};
|
||||
|
||||
// Draw grid lines
|
||||
ctx.strokeStyle = 'rgba(99, 102, 241, 0.08)';
|
||||
@@ -154,14 +290,14 @@ class AreaChart {
|
||||
ctx.lineTo(p.left + chartW, y);
|
||||
ctx.stroke();
|
||||
|
||||
// Y-axis labels - share the same unit for readability
|
||||
const valInUnit = niceMaxInUnit * (1 - i / gridLines);
|
||||
// Y-axis labels
|
||||
const v = maxVal * (1 - i / gridLines);
|
||||
const valInUnit = v / finalFactor;
|
||||
ctx.fillStyle = '#5a6380';
|
||||
ctx.font = '10px "JetBrains Mono", monospace';
|
||||
ctx.textAlign = 'right';
|
||||
|
||||
// Format: "X.X MB/s" or "X MB/s"
|
||||
const label = (valInUnit % 1 === 0 ? valInUnit : valInUnit.toFixed(1)) + ' ' + unitLabel;
|
||||
const label = (valInUnit % 1 === 0 ? valInUnit : valInUnit.toFixed(1)) + ' ' + finalUnitLabel;
|
||||
ctx.fillText(label, p.left - 10, y + 3);
|
||||
}
|
||||
|
||||
@@ -174,41 +310,42 @@ class AreaChart {
|
||||
const x = getX(i);
|
||||
ctx.fillText(formatTime(timestamps[i]), x, h - 8);
|
||||
}
|
||||
// Always show last label
|
||||
ctx.fillText(formatTime(timestamps[len - 1]), getX(len - 1), h - 8);
|
||||
|
||||
// Draw TX area
|
||||
this.drawArea(ctx, tx, getX, getY, chartH, p,
|
||||
'rgba(99, 102, 241, 0.25)', 'rgba(99, 102, 241, 0.02)',
|
||||
'#6366f1', len);
|
||||
// Draw data areas with clipping
|
||||
ctx.save();
|
||||
ctx.beginPath();
|
||||
ctx.rect(p.left, p.top, chartW, chartH);
|
||||
ctx.clip();
|
||||
|
||||
// Draw RX area (on top)
|
||||
this.drawArea(ctx, rx, getX, getY, chartH, p,
|
||||
'rgba(6, 182, 212, 0.25)', 'rgba(6, 182, 212, 0.02)',
|
||||
'#06b6d4', len);
|
||||
if (this.showTx) {
|
||||
this.drawArea(ctx, tx, this.prevData ? this.prevData.tx : null, getX, getY, chartH, p,
|
||||
'rgba(99, 102, 241, 0.25)', 'rgba(99, 102, 241, 0.02)', '#6366f1', len);
|
||||
}
|
||||
if (this.showRx) {
|
||||
this.drawArea(ctx, rx, this.prevData ? this.prevData.rx : null, getX, getY, chartH, p,
|
||||
'rgba(6, 182, 212, 0.25)', 'rgba(6, 182, 212, 0.02)', '#06b6d4', len);
|
||||
}
|
||||
ctx.restore();
|
||||
|
||||
// Draw P95 line
|
||||
if (this.showP95 && this.p95 && this.animProgress === 1) {
|
||||
if (this.showP95 && this.p95 && (this.animProgress === 1 || this.isDraggingP95)) {
|
||||
const p95Y = getY(this.p95);
|
||||
// Only draw if within visible range
|
||||
if (p95Y >= p.top && p95Y <= p.top + chartH) {
|
||||
ctx.save();
|
||||
ctx.beginPath();
|
||||
ctx.setLineDash([6, 4]);
|
||||
ctx.strokeStyle = 'rgba(244, 63, 94, 0.85)'; // --accent-rose
|
||||
ctx.strokeStyle = 'rgba(244, 63, 94, 0.85)';
|
||||
ctx.lineWidth = 1.5;
|
||||
ctx.moveTo(p.left, p95Y);
|
||||
ctx.lineTo(p.left + chartW, p95Y);
|
||||
ctx.stroke();
|
||||
|
||||
// P95 label background
|
||||
const label = '95计费: ' + (window.formatBandwidth ? window.formatBandwidth(this.p95) : this.p95.toFixed(2));
|
||||
ctx.font = 'bold 11px "JetBrains Mono", monospace';
|
||||
const metrics = ctx.measureText(label);
|
||||
ctx.fillStyle = 'rgba(244, 63, 94, 0.15)';
|
||||
ctx.fillRect(p.left + 8, p95Y - 20, metrics.width + 12, 18);
|
||||
|
||||
// P95 label text
|
||||
ctx.fillStyle = '#f43f5e';
|
||||
ctx.textAlign = 'left';
|
||||
ctx.fillText(label, p.left + 14, p95Y - 7);
|
||||
@@ -217,22 +354,23 @@ class AreaChart {
|
||||
}
|
||||
}
|
||||
|
||||
drawArea(ctx, values, getX, getY, chartH, p, fillColorTop, fillColorBottom, strokeColor, len) {
|
||||
drawArea(ctx, values, prevValues, getX, getY, chartH, p, fillColorTop, fillColorBottom, strokeColor, len) {
|
||||
if (!values || values.length === 0) return;
|
||||
|
||||
const useSimple = len > 500;
|
||||
const useSimple = len > 80;
|
||||
const getPVal = (i) => (prevValues && i < prevValues.length) ? prevValues[i] : 0;
|
||||
|
||||
// Fill
|
||||
ctx.beginPath();
|
||||
ctx.moveTo(getX(0), getY(values[0] || 0));
|
||||
ctx.moveTo(getX(0), getY(values[0] || 0, getPVal(0)));
|
||||
for (let i = 1; i < len; i++) {
|
||||
const currY = getY(values[i] || 0, getPVal(i));
|
||||
if (useSimple) {
|
||||
ctx.lineTo(getX(i), getY(values[i] || 0));
|
||||
ctx.lineTo(getX(i), currY);
|
||||
} else {
|
||||
const prevX = getX(i - 1);
|
||||
const currX = getX(i);
|
||||
const prevY = getY(values[i - 1] || 0);
|
||||
const currY = getY(values[i] || 0);
|
||||
const prevY = getY(values[i - 1] || 0, getPVal(i - 1));
|
||||
const midX = (prevX + currX) / 2;
|
||||
ctx.bezierCurveTo(midX, prevY, midX, currY, currX, currY);
|
||||
}
|
||||
@@ -249,15 +387,15 @@ class AreaChart {
|
||||
|
||||
// Stroke
|
||||
ctx.beginPath();
|
||||
ctx.moveTo(getX(0), getY(values[0] || 0));
|
||||
ctx.moveTo(getX(0), getY(values[0] || 0, getPVal(0)));
|
||||
for (let i = 1; i < len; i++) {
|
||||
const currY = getY(values[i] || 0, getPVal(i));
|
||||
if (useSimple) {
|
||||
ctx.lineTo(getX(i), getY(values[i] || 0));
|
||||
ctx.lineTo(getX(i), currY);
|
||||
} else {
|
||||
const prevX = getX(i - 1);
|
||||
const currX = getX(i);
|
||||
const prevY = getY(values[i - 1] || 0);
|
||||
const currY = getY(values[i] || 0);
|
||||
const prevY = getY(values[i - 1] || 0, getPVal(i - 1));
|
||||
const midX = (prevX + currX) / 2;
|
||||
ctx.bezierCurveTo(midX, prevY, midX, currY, currX, currY);
|
||||
}
|
||||
@@ -278,13 +416,18 @@ class MetricChart {
|
||||
constructor(canvas, unit = '') {
|
||||
this.canvas = canvas;
|
||||
this.ctx = canvas.getContext('2d');
|
||||
this.data = { timestamps: [], values: [] };
|
||||
this.data = { timestamps: [], values: [], series: null };
|
||||
this.unit = unit; // '%', 'B/s', etc.
|
||||
this.dpr = window.devicePixelRatio || 1;
|
||||
this.padding = { top: 10, right: 10, bottom: 20, left: 60 };
|
||||
this.padding = { top: 10, right: 10, bottom: 35, left: 60 };
|
||||
this.animProgress = 0;
|
||||
|
||||
this._resize = this.resize.bind(this);
|
||||
this.prevMaxVal = 0;
|
||||
this.currentMaxVal = 0;
|
||||
this.lastDataHash = ''; // Fingerprint for optimization
|
||||
|
||||
// Use debounced resize for performance and safety
|
||||
this._resize = typeof debounce === 'function' ? debounce(this.resize.bind(this), 100) : this.resize.bind(this);
|
||||
window.addEventListener('resize', this._resize);
|
||||
this.resize();
|
||||
}
|
||||
@@ -305,14 +448,46 @@ class MetricChart {
|
||||
}
|
||||
|
||||
setData(data) {
|
||||
this.data = data || { timestamps: [], values: [] };
|
||||
if (!data || !data.timestamps) return;
|
||||
|
||||
// 1. Simple fingerprinting to avoid constant re-animation of same data
|
||||
const lastVal = data.values && data.values.length > 0 ? data.values[data.values.length - 1] : 0;
|
||||
const fingerprint = data.timestamps.length + '_' + lastVal + '_' + (data.series ? 's' : 'v');
|
||||
|
||||
if (fingerprint === this.lastDataHash) return;
|
||||
this.lastDataHash = fingerprint;
|
||||
|
||||
if (this.data && this.data.values && this.data.values.length > 0) {
|
||||
this.prevData = JSON.parse(JSON.stringify(this.data));
|
||||
} else {
|
||||
this.prevData = { timestamps: [], values: [], series: null };
|
||||
}
|
||||
|
||||
this.prevMaxVal = this.currentMaxVal || 0;
|
||||
this.data = data || { timestamps: [], values: [], series: null };
|
||||
|
||||
// Target max
|
||||
if (this.data.series) {
|
||||
this.currentMaxVal = 100;
|
||||
} else {
|
||||
const raw = Math.max(...(this.data.values || []), 0.1);
|
||||
if (this.unit === '%' && raw <= 100) {
|
||||
if (raw > 80) this.currentMaxVal = 100;
|
||||
else if (raw > 40) this.currentMaxVal = 80;
|
||||
else if (raw > 20) this.currentMaxVal = 50;
|
||||
else this.currentMaxVal = 25;
|
||||
} else {
|
||||
this.currentMaxVal = raw * 1.25;
|
||||
}
|
||||
}
|
||||
|
||||
this.animate();
|
||||
}
|
||||
|
||||
animate() {
|
||||
if (this.animFrame) cancelAnimationFrame(this.animFrame);
|
||||
const start = performance.now();
|
||||
const duration = 500;
|
||||
const duration = 300; // Snappier and lighter on GPU
|
||||
const step = (now) => {
|
||||
const elapsed = now - start;
|
||||
this.animProgress = Math.min(elapsed / duration, 1);
|
||||
@@ -333,7 +508,7 @@ class MetricChart {
|
||||
|
||||
ctx.clearRect(0, 0, w, h);
|
||||
|
||||
const { timestamps, values } = this.data;
|
||||
const { timestamps, values, series } = this.data;
|
||||
if (!timestamps || timestamps.length < 2) {
|
||||
ctx.fillStyle = '#5a6380';
|
||||
ctx.font = '11px sans-serif';
|
||||
@@ -342,21 +517,18 @@ class MetricChart {
|
||||
return;
|
||||
}
|
||||
|
||||
// Find max with cushion
|
||||
let maxVal = Math.max(...values, 0.1);
|
||||
if (this.unit === '%' && maxVal <= 100) {
|
||||
if (maxVal > 80) maxVal = 100;
|
||||
else if (maxVal > 40) maxVal = 80;
|
||||
else if (maxVal > 20) maxVal = 50;
|
||||
else maxVal = 25;
|
||||
} else {
|
||||
maxVal = maxVal * 1.25;
|
||||
}
|
||||
// Determine Y max (interpolated)
|
||||
const targetMax = this.currentMaxVal || 0.1;
|
||||
const startMax = this.prevMaxVal || targetMax;
|
||||
const maxVal = startMax + (targetMax - startMax) * this.animProgress;
|
||||
|
||||
const len = timestamps.length;
|
||||
const xStep = chartW / (len - 1);
|
||||
const getX = (i) => p.left + i * xStep;
|
||||
const getY = (val) => p.top + chartH - (val / (maxVal || 1)) * chartH * this.animProgress;
|
||||
const getY = (val, prevVal = 0) => {
|
||||
const actualVal = prevVal + (val - prevVal) * this.animProgress;
|
||||
return p.top + chartH - (actualVal / (maxVal || 1)) * chartH;
|
||||
};
|
||||
|
||||
// Grid
|
||||
ctx.strokeStyle = 'rgba(99, 102, 241, 0.05)';
|
||||
@@ -374,25 +546,122 @@ class MetricChart {
|
||||
ctx.textAlign = 'right';
|
||||
|
||||
let label = '';
|
||||
if (this.unit === 'B/s') {
|
||||
label = window.formatBandwidth ? window.formatBandwidth(v) : v.toFixed(0);
|
||||
if (this.unit === 'B/s' || this.unit === 'B') {
|
||||
const isRate = this.unit === 'B/s';
|
||||
if (window.formatBandwidth && isRate) {
|
||||
label = window.formatBandwidth(v);
|
||||
} else if (window.formatBytes) {
|
||||
label = window.formatBytes(v) + (isRate ? '/s' : '');
|
||||
} else {
|
||||
label = v.toFixed(0) + this.unit;
|
||||
}
|
||||
} else if (this.unit === '%' && this.totalValue) {
|
||||
// 当提供了总量时,将百分比转换为实际数值显示(例如内存显示 2GB 而非 25%)
|
||||
const absVal = v * (this.totalValue / 100);
|
||||
label = window.formatBytes ? window.formatBytes(absVal) : absVal.toFixed(0);
|
||||
} else {
|
||||
label = (v >= 1000 ? (v / 1000).toFixed(1) + 'k' : v.toFixed(v < 10 && v > 0 ? 1 : 0)) + this.unit;
|
||||
}
|
||||
ctx.fillText(label, p.left - 8, y + 3);
|
||||
}
|
||||
|
||||
// Path
|
||||
// X-axis Timeline
|
||||
ctx.fillStyle = '#5a6380';
|
||||
ctx.font = '9px "JetBrains Mono", monospace';
|
||||
ctx.textAlign = 'center';
|
||||
const labelInterval = Math.max(1, Math.floor(len / 5));
|
||||
for (let i = 0; i < len; i += labelInterval) {
|
||||
const x = getX(i);
|
||||
ctx.fillText(formatTime(timestamps[i]), x, h - 8);
|
||||
}
|
||||
// Always show last label if not already shown
|
||||
if ((len - 1) % labelInterval !== 0) {
|
||||
ctx.fillText(formatTime(timestamps[len - 1]), getX(len - 1), h - 8);
|
||||
}
|
||||
|
||||
if (series) {
|
||||
// Draw Stacked Area
|
||||
const modes = [
|
||||
{ name: 'idle', color: 'rgba(34, 197, 94, 0.4)', stroke: '#22c55e' }, // Green
|
||||
{ name: 'other', color: 'rgba(168, 85, 247, 0.4)', stroke: '#a855f7' }, // Purple
|
||||
{ name: 'irq', color: 'rgba(249, 115, 22, 0.4)', stroke: '#f97316' }, // Orange
|
||||
{ name: 'iowait', color: 'rgba(239, 68, 68, 0.4)', stroke: '#ef4444' }, // Red
|
||||
{ name: 'system', color: 'rgba(234, 179, 8, 0.4)', stroke: '#eab308' }, // Yellow
|
||||
{ name: 'user', color: 'rgba(99, 102, 241, 0.4)', stroke: '#6366f1' } // Indigo
|
||||
];
|
||||
|
||||
let currentBase = new Array(len).fill(0);
|
||||
let prevBase = new Array(len).fill(0);
|
||||
|
||||
modes.forEach(mode => {
|
||||
const vals = series[mode.name];
|
||||
if (!vals) return;
|
||||
|
||||
const prevVals = (this.prevData && this.prevData.series) ? this.prevData.series[mode.name] : null;
|
||||
const getPVal = (arr, idx) => (arr && idx < arr.length) ? arr[idx] : 0;
|
||||
|
||||
ctx.beginPath();
|
||||
ctx.moveTo(getX(0), getY(values[0]));
|
||||
ctx.moveTo(getX(0), getY(currentBase[0] + vals[0], getPVal(prevBase, 0) + getPVal(prevVals, 0)));
|
||||
for (let i = 1; i < len; i++) {
|
||||
ctx.lineTo(getX(i), getY(currentBase[i] + vals[i], getPVal(prevBase, i) + getPVal(prevVals, i)));
|
||||
}
|
||||
ctx.lineTo(getX(len - 1), getY(currentBase[len - 1], getPVal(prevBase, len - 1)));
|
||||
for (let i = len - 1; i >= 0; i--) {
|
||||
ctx.lineTo(getX(i), getY(currentBase[i], getPVal(prevBase, i)));
|
||||
}
|
||||
ctx.closePath();
|
||||
ctx.fillStyle = mode.color;
|
||||
ctx.fill();
|
||||
|
||||
// Stroke
|
||||
ctx.beginPath();
|
||||
ctx.moveTo(getX(0), getY(currentBase[0] + vals[0], getPVal(prevBase, 0) + getPVal(prevVals, 0)));
|
||||
for (let i = 1; i < len; i++) {
|
||||
ctx.lineTo(getX(i), getY(currentBase[i] + vals[i], getPVal(prevBase, i) + getPVal(prevVals, i)));
|
||||
}
|
||||
ctx.strokeStyle = mode.stroke;
|
||||
ctx.lineWidth = 1;
|
||||
ctx.stroke();
|
||||
|
||||
// Update boxes for next series
|
||||
for (let i = 0; i < len; i++) {
|
||||
currentBase[i] += vals[i];
|
||||
if (prevBase) prevBase[i] = (prevBase[i] || 0) + getPVal(prevVals, i);
|
||||
}
|
||||
});
|
||||
|
||||
// Add Legend at bottom right (moved up slightly)
|
||||
ctx.font = '9px sans-serif';
|
||||
ctx.textAlign = 'right';
|
||||
let lx = w - 10;
|
||||
let ly = h - 20; // Increased padding from bottom
|
||||
[...modes].reverse().forEach(m => {
|
||||
ctx.fillStyle = m.stroke;
|
||||
ctx.fillRect(lx - 10, ly - 8, 8, 8);
|
||||
ctx.fillStyle = '#5a6380';
|
||||
ctx.fillText(m.name.charAt(0).toUpperCase() + m.name.slice(1), lx - 15, ly - 1);
|
||||
lx -= 70; // Increased gap for safety
|
||||
});
|
||||
|
||||
} else {
|
||||
const useSimple = len > 100;
|
||||
const prevVals = this.prevData ? this.prevData.values : null;
|
||||
const getPVal = (i) => (prevVals && i < prevVals.length) ? prevVals[i] : 0;
|
||||
|
||||
ctx.beginPath();
|
||||
ctx.moveTo(getX(0), getY(values[0], getPVal(0)));
|
||||
for (let i = 1; i < len; i++) {
|
||||
const currY = getY(values[i], getPVal(i));
|
||||
if (useSimple) {
|
||||
ctx.lineTo(getX(i), currY);
|
||||
} else {
|
||||
const prevX = getX(i - 1);
|
||||
const currX = getX(i);
|
||||
const prevY = getY(values[i - 1]);
|
||||
const currY = getY(values[i]);
|
||||
const prevY = getY(values[i - 1], getPVal(i - 1));
|
||||
const midX = (prevX + currX) / 2;
|
||||
ctx.bezierCurveTo(midX, prevY, midX, currY, currX, currY);
|
||||
}
|
||||
}
|
||||
|
||||
// Stroke
|
||||
ctx.strokeStyle = '#6366f1';
|
||||
@@ -418,6 +687,7 @@ class MetricChart {
|
||||
ctx.fillStyle = '#6366f1';
|
||||
ctx.fill();
|
||||
}
|
||||
}
|
||||
|
||||
destroy() {
|
||||
window.removeEventListener('resize', this._resize);
|
||||
|
||||
@@ -4,8 +4,12 @@ document.addEventListener('DOMContentLoaded', () => {
|
||||
const userInput = document.getElementById('user');
|
||||
const passwordInput = document.getElementById('password');
|
||||
const databaseInput = document.getElementById('database');
|
||||
const vHostInput = document.getElementById('vHost');
|
||||
const vPortInput = document.getElementById('vPort');
|
||||
const vPasswordInput = document.getElementById('vPassword');
|
||||
|
||||
const btnTest = document.getElementById('btnTest');
|
||||
const btnTestValkey = document.getElementById('btnTestValkey');
|
||||
const btnInit = document.getElementById('btnInit');
|
||||
const messageBox = document.getElementById('messageBox');
|
||||
|
||||
@@ -14,7 +18,7 @@ document.addEventListener('DOMContentLoaded', () => {
|
||||
|
||||
const promForm = document.getElementById('promForm');
|
||||
const initForm = document.getElementById('initForm');
|
||||
const promName = document.getElementById('promName');
|
||||
const promName = document.getElementById('promSourceName');
|
||||
const promUrl = document.getElementById('promUrl');
|
||||
const promDesc = document.getElementById('promDesc');
|
||||
const btnPromTest = document.getElementById('btnPromTest');
|
||||
@@ -65,6 +69,7 @@ document.addEventListener('DOMContentLoaded', () => {
|
||||
promForm.style.display = 'block';
|
||||
initHeaderTitle.textContent = '配置 Prometheus';
|
||||
initHeaderDesc.textContent = '配置您的第一个 Prometheus 数据源监控连接';
|
||||
if (promName) promName.value = ''; // Ensure it's clear on load
|
||||
}
|
||||
} catch (err) {
|
||||
initForm.style.display = 'block';
|
||||
@@ -102,6 +107,34 @@ document.addEventListener('DOMContentLoaded', () => {
|
||||
}
|
||||
});
|
||||
|
||||
btnTestValkey.addEventListener('click', async () => {
|
||||
btnTestValkey.disabled = true;
|
||||
const oldText = btnTestValkey.textContent;
|
||||
btnTestValkey.textContent = '测试中...';
|
||||
try {
|
||||
const res = await fetch('/api/setup/test-valkey', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
host: vHostInput.value,
|
||||
port: vPortInput.value,
|
||||
password: vPasswordInput.value
|
||||
})
|
||||
});
|
||||
const data = await res.json();
|
||||
if (data.success) {
|
||||
showMessage('Valkey 连接成功!');
|
||||
} else {
|
||||
showMessage('Valkey 连接失败: ' + (data.error || '未知错误'), true);
|
||||
}
|
||||
} catch (err) {
|
||||
showMessage('Valkey 请求失败: ' + err.message, true);
|
||||
} finally {
|
||||
btnTestValkey.disabled = false;
|
||||
btnTestValkey.textContent = oldText;
|
||||
}
|
||||
});
|
||||
|
||||
btnInit.addEventListener('click', async () => {
|
||||
btnInit.disabled = true;
|
||||
const oldText = btnInit.textContent;
|
||||
@@ -115,7 +148,10 @@ document.addEventListener('DOMContentLoaded', () => {
|
||||
port: portInput.value,
|
||||
user: userInput.value,
|
||||
password: passwordInput.value,
|
||||
database: databaseInput.value
|
||||
database: databaseInput.value,
|
||||
vHost: vHostInput.value,
|
||||
vPort: vPortInput.value,
|
||||
vPassword: vPasswordInput.value
|
||||
})
|
||||
});
|
||||
const data = await res.json();
|
||||
|
||||
@@ -28,6 +28,15 @@ function formatBandwidth(bytesPerSec, decimals = 2) {
|
||||
return value.toFixed(decimals) + ' ' + sizes[i];
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert bytes per second to MB/s (numeric string)
|
||||
*/
|
||||
function toMBps(bytesPerSec, decimals = 2) {
|
||||
if (!bytesPerSec || bytesPerSec === 0) return '0.00';
|
||||
const mbps = bytesPerSec / (1024 * 1024);
|
||||
return mbps.toFixed(decimals);
|
||||
}
|
||||
|
||||
/**
|
||||
* Format percentage
|
||||
*/
|
||||
@@ -102,3 +111,17 @@ function animateValue(element, start, end, duration = 600) {
|
||||
|
||||
requestAnimationFrame(update);
|
||||
}
|
||||
|
||||
/**
|
||||
* Debounce function to limit execution frequency
|
||||
*/
|
||||
function debounce(fn, delay) {
|
||||
let timer = null;
|
||||
return function (...args) {
|
||||
if (timer) clearTimeout(timer);
|
||||
timer = setTimeout(() => {
|
||||
fn.apply(this, args);
|
||||
timer = null;
|
||||
}, delay);
|
||||
};
|
||||
}
|
||||
|
||||
45
public/vendor/echarts.min.js
vendored
Normal file
45
public/vendor/echarts.min.js
vendored
Normal file
File diff suppressed because one or more lines are too long
1
public/vendor/world.json
vendored
Normal file
1
public/vendor/world.json
vendored
Normal file
File diff suppressed because one or more lines are too long
@@ -1,14 +1,20 @@
|
||||
const Redis = require('ioredis');
|
||||
|
||||
const host = process.env.VALKEY_HOST || 'localhost';
|
||||
const port = parseInt(process.env.VALKEY_PORT) || 6379;
|
||||
const password = process.env.VALKEY_PASSWORD || undefined;
|
||||
const db = parseInt(process.env.VALKEY_DB) || 0;
|
||||
const ttl = parseInt(process.env.VALKEY_TTL) || 30;
|
||||
|
||||
let redis = null;
|
||||
let ttl = 30;
|
||||
|
||||
try {
|
||||
function init() {
|
||||
if (redis) {
|
||||
redis.disconnect();
|
||||
}
|
||||
|
||||
const host = process.env.VALKEY_HOST || 'localhost';
|
||||
const port = parseInt(process.env.VALKEY_PORT) || 6379;
|
||||
const password = process.env.VALKEY_PASSWORD || undefined;
|
||||
const db = parseInt(process.env.VALKEY_DB) || 0;
|
||||
ttl = parseInt(process.env.VALKEY_TTL) || 30;
|
||||
|
||||
try {
|
||||
redis = new Redis({
|
||||
host,
|
||||
port,
|
||||
@@ -22,11 +28,15 @@ try {
|
||||
// Fail silently after one retry, we just won't cache
|
||||
console.warn('[Cache] Valkey connection failed, caching disabled:', err.message);
|
||||
});
|
||||
} catch (err) {
|
||||
} catch (err) {
|
||||
console.warn('[Cache] Valkey init failed:', err.message);
|
||||
}
|
||||
}
|
||||
|
||||
init();
|
||||
|
||||
const cache = {
|
||||
init,
|
||||
async get(key) {
|
||||
if (!redis) return null;
|
||||
try {
|
||||
@@ -53,6 +63,17 @@ const cache = {
|
||||
} catch (e) {
|
||||
// ignore
|
||||
}
|
||||
},
|
||||
|
||||
async checkHealth() {
|
||||
if (!redis) return { status: 'down', error: 'Valkey client not initialized' };
|
||||
try {
|
||||
const result = await redis.ping();
|
||||
if (result === 'PONG') return { status: 'up' };
|
||||
return { status: 'down', error: 'Invalid ping response' };
|
||||
} catch (e) {
|
||||
return { status: 'down', error: e.message };
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@@ -1,130 +0,0 @@
|
||||
/**
|
||||
* Database Integrity Check
|
||||
* Runs at startup to ensure all required tables exist.
|
||||
* Recreates the database if any tables are missing.
|
||||
*/
|
||||
require('dotenv').config();
|
||||
const mysql = require('mysql2/promise');
|
||||
const db = require('./db');
|
||||
const path = require('path');
|
||||
const fs = require('fs');
|
||||
|
||||
const REQUIRED_TABLES = [
|
||||
'users',
|
||||
'prometheus_sources',
|
||||
'site_settings',
|
||||
'traffic_stats'
|
||||
];
|
||||
|
||||
async function checkAndFixDatabase() {
|
||||
// Only run if .env is already configured
|
||||
const envPath = path.join(__dirname, '..', '.env');
|
||||
if (!fs.existsSync(envPath)) return;
|
||||
|
||||
const dbHost = process.env.MYSQL_HOST || 'localhost';
|
||||
const dbUser = process.env.MYSQL_USER || 'root';
|
||||
const dbPass = process.env.MYSQL_PASSWORD || '';
|
||||
const dbPort = parseInt(process.env.MYSQL_PORT) || 3306;
|
||||
const dbName = process.env.MYSQL_DATABASE || 'display_wall';
|
||||
|
||||
try {
|
||||
// Check tables
|
||||
const [rows] = await db.query("SHOW TABLES");
|
||||
const existingTables = rows.map(r => Object.values(r)[0]);
|
||||
|
||||
const missingTables = REQUIRED_TABLES.filter(t => !existingTables.includes(t));
|
||||
|
||||
if (missingTables.length > 0) {
|
||||
console.log(`[Database Integrity] ⚠️ Missing tables: ${missingTables.join(', ')}`);
|
||||
await recreateDatabase(dbHost, dbPort, dbUser, dbPass, dbName);
|
||||
} else {
|
||||
// console.log(`[Database Integrity] ✅ All tables accounted for.`);
|
||||
}
|
||||
} catch (err) {
|
||||
if (err.code === 'ER_BAD_DB_ERROR') {
|
||||
console.log(`[Database Integrity] ⚠️ Database "${dbName}" does not exist.`);
|
||||
await recreateDatabase(dbHost, dbPort, dbUser, dbPass, dbName);
|
||||
} else {
|
||||
console.error('[Database Integrity] ❌ Error checking integrity:', err.message);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function recreateDatabase(host, port, user, password, dbName) {
|
||||
console.log(`[Database Integrity] 🔄 Re-initializing database "${dbName}"...`);
|
||||
|
||||
let connection;
|
||||
try {
|
||||
connection = await mysql.createConnection({ host, port, user, password });
|
||||
|
||||
// Drop and create database
|
||||
await connection.query(`DROP DATABASE IF EXISTS \`${dbName}\``);
|
||||
await connection.query(`CREATE DATABASE \`${dbName}\` CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci`);
|
||||
await connection.query(`USE \`${dbName}\``);
|
||||
|
||||
// Recreate all tables
|
||||
console.log(' - Creating table "users"...');
|
||||
await connection.query(`
|
||||
CREATE TABLE IF NOT EXISTS users (
|
||||
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||
username VARCHAR(255) NOT NULL UNIQUE,
|
||||
password VARCHAR(255) NOT NULL,
|
||||
salt VARCHAR(255) NOT NULL,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci
|
||||
`);
|
||||
|
||||
console.log(' - Creating table "prometheus_sources"...');
|
||||
await connection.query(`
|
||||
CREATE TABLE IF NOT EXISTS prometheus_sources (
|
||||
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||
name VARCHAR(255) NOT NULL,
|
||||
url VARCHAR(500) NOT NULL,
|
||||
description TEXT,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci
|
||||
`);
|
||||
|
||||
console.log(' - Creating table "site_settings"...');
|
||||
await connection.query(`
|
||||
CREATE TABLE IF NOT EXISTS site_settings (
|
||||
id INT PRIMARY KEY DEFAULT 1,
|
||||
page_name VARCHAR(255) DEFAULT '数据可视化展示大屏',
|
||||
title VARCHAR(255) DEFAULT '数据可视化展示大屏',
|
||||
logo_url TEXT,
|
||||
default_theme VARCHAR(20) DEFAULT 'dark',
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci
|
||||
`);
|
||||
await connection.query(`
|
||||
INSERT INTO site_settings (id, page_name, title, default_theme)
|
||||
VALUES (1, '数据可视化展示大屏', '数据可视化展示大屏', 'dark')
|
||||
`);
|
||||
|
||||
console.log(' - Creating table "traffic_stats"...');
|
||||
await connection.query(`
|
||||
CREATE TABLE IF NOT EXISTS traffic_stats (
|
||||
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||
rx_bytes BIGINT UNSIGNED DEFAULT 0,
|
||||
tx_bytes BIGINT UNSIGNED DEFAULT 0,
|
||||
rx_bandwidth DOUBLE DEFAULT 0,
|
||||
tx_bandwidth DOUBLE DEFAULT 0,
|
||||
timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
UNIQUE INDEX (timestamp)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci
|
||||
`);
|
||||
|
||||
console.log(`[Database Integrity] ✅ Re-initialization complete.`);
|
||||
|
||||
// Refresh db pool in the main app context
|
||||
db.initPool();
|
||||
|
||||
} catch (err) {
|
||||
console.error('[Database Integrity] ❌ Critical failure during re-initialization:', err.message);
|
||||
} finally {
|
||||
if (connection) await connection.end();
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = checkAndFixDatabase;
|
||||
238
server/db-schema-check.js
Normal file
238
server/db-schema-check.js
Normal file
@@ -0,0 +1,238 @@
|
||||
/**
|
||||
* Database schema check
|
||||
* Ensures required tables and columns exist at startup.
|
||||
*/
|
||||
const path = require('path');
|
||||
require('dotenv').config({ path: path.join(__dirname, '..', '.env') });
|
||||
const db = require('./db');
|
||||
const fs = require('fs');
|
||||
|
||||
const SCHEMA = {
|
||||
users: {
|
||||
createSql: `
|
||||
CREATE TABLE IF NOT EXISTS users (
|
||||
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||
username VARCHAR(255) NOT NULL UNIQUE,
|
||||
password VARCHAR(255) NOT NULL,
|
||||
salt VARCHAR(255) NOT NULL,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci
|
||||
`,
|
||||
columns: [
|
||||
{ name: 'username', sql: "ALTER TABLE users ADD COLUMN username VARCHAR(255) NOT NULL UNIQUE AFTER id" },
|
||||
{ name: 'password', sql: "ALTER TABLE users ADD COLUMN password VARCHAR(255) NOT NULL AFTER username" },
|
||||
{ name: 'salt', sql: "ALTER TABLE users ADD COLUMN salt VARCHAR(255) NOT NULL AFTER password" }
|
||||
]
|
||||
},
|
||||
prometheus_sources: {
|
||||
createSql: `
|
||||
CREATE TABLE IF NOT EXISTS prometheus_sources (
|
||||
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||
name VARCHAR(255) NOT NULL,
|
||||
url VARCHAR(500) NOT NULL,
|
||||
description TEXT,
|
||||
is_server_source TINYINT(1) DEFAULT 1,
|
||||
is_overview_source TINYINT(1) DEFAULT 1,
|
||||
is_detail_source TINYINT(1) DEFAULT 1,
|
||||
type VARCHAR(50) DEFAULT 'prometheus',
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci
|
||||
`,
|
||||
columns: [
|
||||
{ name: 'name', sql: "ALTER TABLE prometheus_sources ADD COLUMN name VARCHAR(255) NOT NULL AFTER id" },
|
||||
{ name: 'url', sql: "ALTER TABLE prometheus_sources ADD COLUMN url VARCHAR(500) NOT NULL AFTER name" },
|
||||
{ name: 'description', sql: "ALTER TABLE prometheus_sources ADD COLUMN description TEXT AFTER url" },
|
||||
{ name: 'is_server_source', sql: "ALTER TABLE prometheus_sources ADD COLUMN is_server_source TINYINT(1) DEFAULT 1 AFTER description" },
|
||||
{ name: 'is_overview_source', sql: "ALTER TABLE prometheus_sources ADD COLUMN is_overview_source TINYINT(1) DEFAULT 1 AFTER is_server_source" },
|
||||
{ name: 'is_detail_source', sql: "ALTER TABLE prometheus_sources ADD COLUMN is_detail_source TINYINT(1) DEFAULT 1 AFTER is_overview_source" },
|
||||
{ name: 'type', sql: "ALTER TABLE prometheus_sources ADD COLUMN type VARCHAR(50) DEFAULT 'prometheus' AFTER is_detail_source" }
|
||||
]
|
||||
},
|
||||
site_settings: {
|
||||
createSql: `
|
||||
CREATE TABLE IF NOT EXISTS site_settings (
|
||||
id INT PRIMARY KEY DEFAULT 1,
|
||||
page_name VARCHAR(255) DEFAULT '数据可视化展示大屏',
|
||||
show_page_name TINYINT(1) DEFAULT 1,
|
||||
title VARCHAR(255) DEFAULT '数据可视化展示大屏',
|
||||
logo_url TEXT,
|
||||
logo_url_dark TEXT,
|
||||
favicon_url TEXT,
|
||||
default_theme VARCHAR(20) DEFAULT 'dark',
|
||||
show_95_bandwidth TINYINT(1) DEFAULT 0,
|
||||
p95_type VARCHAR(20) DEFAULT 'tx',
|
||||
require_login_for_server_details TINYINT(1) DEFAULT 1,
|
||||
blackbox_source_id INT,
|
||||
latency_source VARCHAR(100),
|
||||
latency_dest VARCHAR(100),
|
||||
latency_target VARCHAR(255),
|
||||
icp_filing VARCHAR(255),
|
||||
ps_filing VARCHAR(255),
|
||||
show_server_ip TINYINT(1) DEFAULT 0,
|
||||
ip_metric_name VARCHAR(100) DEFAULT NULL,
|
||||
ip_label_name VARCHAR(100) DEFAULT 'address',
|
||||
custom_metrics JSON DEFAULT NULL,
|
||||
cdn_url VARCHAR(500) DEFAULT NULL,
|
||||
prometheus_cache_ttl INT DEFAULT 30,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci
|
||||
`,
|
||||
seedSql: `
|
||||
INSERT IGNORE INTO site_settings (
|
||||
id, page_name, show_page_name, title, default_theme, show_95_bandwidth, p95_type, require_login_for_server_details, prometheus_cache_ttl
|
||||
) VALUES (
|
||||
1, '数据可视化展示大屏', 1, '数据可视化展示大屏', 'dark', 0, 'tx', 1, 30
|
||||
)
|
||||
`,
|
||||
columns: [
|
||||
{ name: 'page_name', sql: "ALTER TABLE site_settings ADD COLUMN page_name VARCHAR(255) DEFAULT '数据可视化展示大屏' AFTER id" },
|
||||
{ name: 'show_page_name', sql: "ALTER TABLE site_settings ADD COLUMN show_page_name TINYINT(1) DEFAULT 1 AFTER page_name" },
|
||||
{ name: 'title', sql: "ALTER TABLE site_settings ADD COLUMN title VARCHAR(255) DEFAULT '数据可视化展示大屏' AFTER show_page_name" },
|
||||
{ name: 'logo_url', sql: "ALTER TABLE site_settings ADD COLUMN logo_url TEXT AFTER title" },
|
||||
{ name: 'logo_url_dark', sql: "ALTER TABLE site_settings ADD COLUMN logo_url_dark TEXT AFTER logo_url" },
|
||||
{ name: 'favicon_url', sql: "ALTER TABLE site_settings ADD COLUMN favicon_url TEXT AFTER logo_url_dark" },
|
||||
{ name: 'default_theme', sql: "ALTER TABLE site_settings ADD COLUMN default_theme VARCHAR(20) DEFAULT 'dark' AFTER favicon_url" },
|
||||
{ name: 'show_95_bandwidth', sql: "ALTER TABLE site_settings ADD COLUMN show_95_bandwidth TINYINT(1) DEFAULT 0 AFTER default_theme" },
|
||||
{ name: 'p95_type', sql: "ALTER TABLE site_settings ADD COLUMN p95_type VARCHAR(20) DEFAULT 'tx' AFTER show_95_bandwidth" },
|
||||
{ name: 'require_login_for_server_details', sql: "ALTER TABLE site_settings ADD COLUMN require_login_for_server_details TINYINT(1) DEFAULT 1 AFTER p95_type" },
|
||||
{ name: 'blackbox_source_id', sql: "ALTER TABLE site_settings ADD COLUMN blackbox_source_id INT AFTER require_login_for_server_details" },
|
||||
{ name: 'latency_source', sql: "ALTER TABLE site_settings ADD COLUMN latency_source VARCHAR(100) AFTER blackbox_source_id" },
|
||||
{ name: 'latency_dest', sql: "ALTER TABLE site_settings ADD COLUMN latency_dest VARCHAR(100) AFTER latency_source" },
|
||||
{ name: 'latency_target', sql: "ALTER TABLE site_settings ADD COLUMN latency_target VARCHAR(255) AFTER latency_dest" },
|
||||
{ name: 'icp_filing', sql: "ALTER TABLE site_settings ADD COLUMN icp_filing VARCHAR(255) AFTER latency_target" },
|
||||
{ name: 'ps_filing', sql: "ALTER TABLE site_settings ADD COLUMN ps_filing VARCHAR(255) AFTER icp_filing" },
|
||||
{ name: 'show_server_ip', sql: "ALTER TABLE site_settings ADD COLUMN show_server_ip TINYINT(1) DEFAULT 0 AFTER ps_filing" },
|
||||
{ name: 'ip_metric_name', sql: "ALTER TABLE site_settings ADD COLUMN ip_metric_name VARCHAR(100) DEFAULT NULL AFTER show_server_ip" },
|
||||
{ name: 'ip_label_name', sql: "ALTER TABLE site_settings ADD COLUMN ip_label_name VARCHAR(100) DEFAULT 'address' AFTER ip_metric_name" },
|
||||
{ name: 'custom_metrics', sql: "ALTER TABLE site_settings ADD COLUMN custom_metrics JSON DEFAULT NULL AFTER ip_label_name" },
|
||||
{ name: 'cdn_url', sql: "ALTER TABLE site_settings ADD COLUMN cdn_url VARCHAR(500) DEFAULT NULL AFTER custom_metrics" },
|
||||
{ name: 'prometheus_cache_ttl', sql: "ALTER TABLE site_settings ADD COLUMN prometheus_cache_ttl INT DEFAULT 30 AFTER cdn_url" }
|
||||
]
|
||||
},
|
||||
traffic_stats: {
|
||||
createSql: `
|
||||
CREATE TABLE IF NOT EXISTS traffic_stats (
|
||||
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||
rx_bytes BIGINT UNSIGNED DEFAULT 0,
|
||||
tx_bytes BIGINT UNSIGNED DEFAULT 0,
|
||||
rx_bandwidth DOUBLE DEFAULT 0,
|
||||
tx_bandwidth DOUBLE DEFAULT 0,
|
||||
timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
UNIQUE INDEX (timestamp)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci
|
||||
`,
|
||||
columns: [
|
||||
{ name: 'rx_bytes', sql: "ALTER TABLE traffic_stats ADD COLUMN rx_bytes BIGINT UNSIGNED DEFAULT 0 AFTER id" },
|
||||
{ name: 'tx_bytes', sql: "ALTER TABLE traffic_stats ADD COLUMN tx_bytes BIGINT UNSIGNED DEFAULT 0 AFTER rx_bytes" },
|
||||
{ name: 'rx_bandwidth', sql: "ALTER TABLE traffic_stats ADD COLUMN rx_bandwidth DOUBLE DEFAULT 0 AFTER tx_bytes" },
|
||||
{ name: 'tx_bandwidth', sql: "ALTER TABLE traffic_stats ADD COLUMN tx_bandwidth DOUBLE DEFAULT 0 AFTER rx_bandwidth" }
|
||||
]
|
||||
},
|
||||
server_locations: {
|
||||
createSql: `
|
||||
CREATE TABLE IF NOT EXISTS server_locations (
|
||||
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||
ip VARCHAR(255) NOT NULL UNIQUE,
|
||||
country CHAR(2),
|
||||
country_name VARCHAR(100),
|
||||
region VARCHAR(100),
|
||||
city VARCHAR(100),
|
||||
latitude DOUBLE,
|
||||
longitude DOUBLE,
|
||||
last_updated TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci
|
||||
`,
|
||||
columns: [
|
||||
{ name: 'ip', sql: "ALTER TABLE server_locations ADD COLUMN ip VARCHAR(255) NOT NULL UNIQUE AFTER id" },
|
||||
{ name: 'country', sql: "ALTER TABLE server_locations ADD COLUMN country CHAR(2) AFTER ip" },
|
||||
{ name: 'country_name', sql: "ALTER TABLE server_locations ADD COLUMN country_name VARCHAR(100) AFTER country" },
|
||||
{ name: 'region', sql: "ALTER TABLE server_locations ADD COLUMN region VARCHAR(100) AFTER country_name" },
|
||||
{ name: 'city', sql: "ALTER TABLE server_locations ADD COLUMN city VARCHAR(100) AFTER region" },
|
||||
{ name: 'latitude', sql: "ALTER TABLE server_locations ADD COLUMN latitude DOUBLE AFTER city" },
|
||||
{ name: 'longitude', sql: "ALTER TABLE server_locations ADD COLUMN longitude DOUBLE AFTER latitude" }
|
||||
]
|
||||
},
|
||||
latency_routes: {
|
||||
createSql: `
|
||||
CREATE TABLE IF NOT EXISTS latency_routes (
|
||||
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||
source_id INT NOT NULL,
|
||||
latency_source VARCHAR(100) NOT NULL,
|
||||
latency_dest VARCHAR(100) NOT NULL,
|
||||
latency_target VARCHAR(255) NOT NULL,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci
|
||||
`,
|
||||
columns: [
|
||||
{ name: 'source_id', sql: "ALTER TABLE latency_routes ADD COLUMN source_id INT NOT NULL AFTER id" },
|
||||
{ name: 'latency_source', sql: "ALTER TABLE latency_routes ADD COLUMN latency_source VARCHAR(100) NOT NULL AFTER source_id" },
|
||||
{ name: 'latency_dest', sql: "ALTER TABLE latency_routes ADD COLUMN latency_dest VARCHAR(100) NOT NULL AFTER latency_source" },
|
||||
{ name: 'latency_target', sql: "ALTER TABLE latency_routes ADD COLUMN latency_target VARCHAR(255) NOT NULL AFTER latency_dest" }
|
||||
]
|
||||
}
|
||||
};
|
||||
|
||||
async function ensureTable(tableName, tableSchema) {
|
||||
try {
|
||||
// 1. Ensure table exists
|
||||
await db.query(tableSchema.createSql);
|
||||
|
||||
// 2. Check columns
|
||||
const [columns] = await db.query(`SHOW COLUMNS FROM \`${tableName}\``);
|
||||
const existingColumns = new Set(columns.map((column) => column.Field));
|
||||
|
||||
console.log(`[Database Integrity] Table '${tableName}' verified (${columns.length} columns)`);
|
||||
|
||||
for (const column of tableSchema.columns || []) {
|
||||
if (!existingColumns.has(column.name)) {
|
||||
console.log(`[Database Integrity] Missing column '${column.name}' in '${tableName}'. Adding it...`);
|
||||
await db.query(column.sql);
|
||||
console.log(`[Database Integrity] Column '${column.name}' added to '${tableName}'.`);
|
||||
}
|
||||
}
|
||||
|
||||
// 3. Seed data
|
||||
if (tableSchema.seedSql) {
|
||||
const [rows] = await db.query(`SELECT count(*) as count FROM \`${tableName}\``);
|
||||
if (rows[0].count === 0) {
|
||||
console.log(`[Database Integrity] Table '${tableName}' is empty. Seeding initial data...`);
|
||||
await db.query(tableSchema.seedSql);
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
console.error(`[Database Integrity] Error ensuring table '${tableName}':`, err.message);
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
async function db_migrate() {
|
||||
console.log('[Database Integrity] Starting comprehensive database audit...');
|
||||
|
||||
// Try to check if we can even connect
|
||||
try {
|
||||
const health = await db.checkHealth();
|
||||
if (health.status !== 'up') {
|
||||
console.warn(`[Database Integrity] initial health check failed: ${health.error}`);
|
||||
// If we can't connect, maybe the DB itself doesn't exist?
|
||||
// For now, we rely on the pool to handle connection retries/errors.
|
||||
}
|
||||
} catch (e) {
|
||||
// Ignore health check errors, let ensureTable handle the primary queries
|
||||
}
|
||||
|
||||
try {
|
||||
let tablesChecked = 0;
|
||||
for (const [tableName, tableSchema] of Object.entries(SCHEMA)) {
|
||||
await ensureTable(tableName, tableSchema);
|
||||
tablesChecked++;
|
||||
}
|
||||
console.log(`[Database Integrity] Audit complete. ${tablesChecked} tables verified and healthy.`);
|
||||
return true;
|
||||
} catch (err) {
|
||||
console.error('[Database Integrity] ❌ Audit failed:', err.message);
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = db_migrate;
|
||||
13
server/db.js
13
server/db.js
@@ -18,9 +18,20 @@ function initPool() {
|
||||
});
|
||||
}
|
||||
|
||||
async function checkHealth() {
|
||||
try {
|
||||
if (!pool) return { status: 'down', error: 'Database pool not initialized' };
|
||||
await pool.query('SELECT 1');
|
||||
return { status: 'up' };
|
||||
} catch (err) {
|
||||
return { status: 'down', error: err.message };
|
||||
}
|
||||
}
|
||||
|
||||
initPool();
|
||||
|
||||
module.exports = {
|
||||
query: (...args) => pool.query(...args),
|
||||
initPool
|
||||
initPool,
|
||||
checkHealth
|
||||
};
|
||||
|
||||
204
server/geo-service.js
Normal file
204
server/geo-service.js
Normal file
@@ -0,0 +1,204 @@
|
||||
const axios = require('axios');
|
||||
const net = require('net');
|
||||
const dns = require('dns').promises;
|
||||
const db = require('./db');
|
||||
|
||||
/**
|
||||
* Geo Location Service
|
||||
* Resolves IP addresses to geographical coordinates and country info.
|
||||
* Caches results in the database to minimize API calls.
|
||||
*/
|
||||
|
||||
const ipInfoToken = process.env.IPINFO_TOKEN;
|
||||
const enableExternalGeoLookup = process.env.ENABLE_EXTERNAL_GEO_LOOKUP === 'true';
|
||||
|
||||
/**
|
||||
* Normalizes geo data for consistent display
|
||||
*/
|
||||
function normalizeGeo(geo) {
|
||||
if (!geo) return geo;
|
||||
|
||||
// Custom normalization for TW to "Taipei, China" and JP to "Tokyo"
|
||||
const country = (geo.country || geo.country_code || '').toUpperCase();
|
||||
if (country === 'TW') {
|
||||
return {
|
||||
...geo,
|
||||
city: 'Taipei',
|
||||
country: 'TW',
|
||||
country_name: 'China',
|
||||
// Force Taipei coordinates for consistent 2D plotting
|
||||
loc: '25.0330,121.5654',
|
||||
latitude: 25.0330,
|
||||
longitude: 121.5654
|
||||
};
|
||||
} else if (country === 'JP') {
|
||||
return {
|
||||
...geo,
|
||||
city: 'Tokyo',
|
||||
country: 'JP',
|
||||
country_name: 'Japan',
|
||||
// Force Tokyo coordinates for consistent 2D plotting
|
||||
loc: '35.6895,139.6917',
|
||||
latitude: 35.6895,
|
||||
longitude: 139.6917
|
||||
};
|
||||
}
|
||||
return geo;
|
||||
}
|
||||
|
||||
async function getLocation(target) {
|
||||
// Normalize target (strip port if present, handle IPv6 brackets)
|
||||
let cleanTarget = target;
|
||||
if (cleanTarget.startsWith('[')) {
|
||||
const closingBracket = cleanTarget.indexOf(']');
|
||||
if (closingBracket !== -1) {
|
||||
cleanTarget = cleanTarget.substring(1, closingBracket);
|
||||
}
|
||||
} else {
|
||||
const parts = cleanTarget.split(':');
|
||||
if (parts.length === 2) {
|
||||
cleanTarget = parts[0];
|
||||
}
|
||||
}
|
||||
|
||||
// 1. Check if we already have this IP/Domain in DB (FASTEST)
|
||||
try {
|
||||
const [rows] = await db.query('SELECT * FROM server_locations WHERE ip = ?', [cleanTarget]);
|
||||
if (rows.length > 0) {
|
||||
const data = rows[0];
|
||||
const age = Date.now() - new Date(data.last_updated).getTime();
|
||||
if (age < 30 * 24 * 60 * 60 * 1000) {
|
||||
return normalizeGeo(data);
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
// console.error(`[Geo Service] DB check failed for ${cleanTarget}`);
|
||||
}
|
||||
|
||||
// 2. Resolve domain to IP if needed
|
||||
let cleanIp = cleanTarget;
|
||||
if (net.isIP(cleanTarget) === 0) {
|
||||
try {
|
||||
const lookup = await dns.lookup(cleanTarget);
|
||||
cleanIp = lookup.address;
|
||||
|
||||
// Secondary DB check with resolved IP
|
||||
const [rows] = await db.query('SELECT * FROM server_locations WHERE ip = ?', [cleanIp]);
|
||||
if (rows.length > 0) {
|
||||
const data = rows[0];
|
||||
// Cache the domain mapping to avoid future DNS lookups
|
||||
if (cleanTarget !== cleanIp) {
|
||||
try {
|
||||
await db.query(`
|
||||
INSERT INTO server_locations (ip, country, country_name, region, city, latitude, longitude)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?)
|
||||
ON DUPLICATE KEY UPDATE last_updated = CURRENT_TIMESTAMP
|
||||
`, [cleanTarget, data.country, data.country_name, data.region, data.city, data.latitude, data.longitude]);
|
||||
} catch(e) {}
|
||||
}
|
||||
return normalizeGeo(data);
|
||||
}
|
||||
} catch (err) {
|
||||
// Quiet DNS failure for tokens (legacy bug mitigation)
|
||||
if (!/^[0-9a-f]{16}$/i.test(cleanTarget)) {
|
||||
console.error(`[Geo Service] DNS resolution failed for ${cleanTarget}:`, err.message);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
// 3. Skip local/reserved IPs
|
||||
if (isLocalIp(cleanIp)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// 4. Resolve via ipinfo.io (LAST RESORT)
|
||||
if (!enableExternalGeoLookup) {
|
||||
return null;
|
||||
}
|
||||
|
||||
try {
|
||||
console.log(`[Geo Service] API lookup (ipinfo.io) for: ${cleanIp}`);
|
||||
const url = `https://ipinfo.io/${cleanIp}/json${ipInfoToken ? `?token=${ipInfoToken}` : ''}`;
|
||||
const response = await axios.get(url, { timeout: 5000 });
|
||||
const geo = normalizeGeo(response.data);
|
||||
|
||||
if (geo && geo.loc) {
|
||||
const [lat, lon] = geo.loc.split(',').map(Number);
|
||||
const locationData = {
|
||||
ip: cleanIp,
|
||||
country: geo.country,
|
||||
country_name: geo.country_name || geo.country, // ipinfo might not have country_name in basic response
|
||||
region: geo.region,
|
||||
city: geo.city,
|
||||
latitude: lat,
|
||||
longitude: lon
|
||||
};
|
||||
|
||||
// Save to DB
|
||||
await db.query(`
|
||||
INSERT INTO server_locations (ip, country, country_name, region, city, latitude, longitude)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?)
|
||||
ON DUPLICATE KEY UPDATE
|
||||
country = VALUES(country),
|
||||
country_name = VALUES(country_name),
|
||||
region = VALUES(region),
|
||||
city = VALUES(city),
|
||||
latitude = VALUES(latitude),
|
||||
longitude = VALUES(longitude)
|
||||
`, [
|
||||
locationData.ip,
|
||||
locationData.country,
|
||||
locationData.country_name,
|
||||
locationData.region,
|
||||
locationData.city,
|
||||
locationData.latitude,
|
||||
locationData.longitude
|
||||
]);
|
||||
|
||||
// Cache the domain target as well if it differs from the resolved IP
|
||||
if (cleanTarget !== cleanIp) {
|
||||
await db.query(`
|
||||
INSERT INTO server_locations (ip, country, country_name, region, city, latitude, longitude)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?)
|
||||
ON DUPLICATE KEY UPDATE
|
||||
country = VALUES(country),
|
||||
country_name = VALUES(country_name),
|
||||
region = VALUES(region),
|
||||
city = VALUES(city),
|
||||
latitude = VALUES(latitude),
|
||||
longitude = VALUES(longitude)
|
||||
`, [
|
||||
cleanTarget,
|
||||
locationData.country,
|
||||
locationData.country_name,
|
||||
locationData.region,
|
||||
locationData.city,
|
||||
locationData.latitude,
|
||||
locationData.longitude
|
||||
]);
|
||||
}
|
||||
|
||||
return locationData;
|
||||
}
|
||||
} catch (err) {
|
||||
console.error(`[Geo Service] Error resolving IP ${cleanIp}:`, err.message);
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
function isLocalIp(ip) {
|
||||
if (ip === 'localhost' || ip === '127.0.0.1' || ip === '::1') return true;
|
||||
|
||||
// RFC1918 private addresses
|
||||
const p1 = /^10\./;
|
||||
const p2 = /^172\.(1[6-9]|2[0-9]|3[0-1])\./;
|
||||
const p3 = /^192\.168\./;
|
||||
|
||||
return p1.test(ip) || p2.test(ip) || p3.test(ip);
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getLocation
|
||||
};
|
||||
1256
server/index.js
1256
server/index.js
File diff suppressed because it is too large
Load Diff
@@ -1,74 +1,40 @@
|
||||
/**
|
||||
* Database Initialization Script
|
||||
* Run: npm run init-db
|
||||
* Creates the required MySQL database and tables.
|
||||
*/
|
||||
require('dotenv').config();
|
||||
const path = require('path');
|
||||
require('dotenv').config({ path: path.join(__dirname, '..', '.env') });
|
||||
const mysql = require('mysql2/promise');
|
||||
const db_migrate = require('./db-schema-check');
|
||||
const db = require('./db');
|
||||
|
||||
async function initDatabase() {
|
||||
const connection = await mysql.createConnection({
|
||||
host: process.env.MYSQL_HOST || 'localhost',
|
||||
port: parseInt(process.env.MYSQL_PORT) || 3306,
|
||||
user: process.env.MYSQL_USER || 'root',
|
||||
password: process.env.MYSQL_PASSWORD || ''
|
||||
});
|
||||
|
||||
const host = process.env.MYSQL_HOST || 'localhost';
|
||||
const port = parseInt(process.env.MYSQL_PORT) || 3306;
|
||||
const user = process.env.MYSQL_USER || 'root';
|
||||
const password = process.env.MYSQL_PASSWORD || '';
|
||||
const dbName = process.env.MYSQL_DATABASE || 'display_wall';
|
||||
|
||||
console.log('🔧 Initializing database...\n');
|
||||
// 1. Create connection without database selected to create the DB itself
|
||||
const connection = await mysql.createConnection({
|
||||
host,
|
||||
port,
|
||||
user,
|
||||
password
|
||||
});
|
||||
|
||||
console.log('🔧 Initializing database environment...\n');
|
||||
|
||||
// Create database
|
||||
await connection.query(`CREATE DATABASE IF NOT EXISTS \`${dbName}\` CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci`);
|
||||
console.log(` ✅ Database "${dbName}" ready`);
|
||||
await connection.end();
|
||||
|
||||
await connection.query(`USE \`${dbName}\``);
|
||||
// 2. Re-initialize the standard pool so it can see the new DB
|
||||
db.initPool();
|
||||
|
||||
// Create users table
|
||||
await connection.query(`
|
||||
CREATE TABLE IF NOT EXISTS users (
|
||||
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||
username VARCHAR(255) NOT NULL UNIQUE,
|
||||
password VARCHAR(255) NOT NULL,
|
||||
salt VARCHAR(255) NOT NULL,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci
|
||||
`);
|
||||
console.log(' ✅ Table "users" ready');
|
||||
|
||||
// Create prometheus_sources table
|
||||
await connection.query(`
|
||||
CREATE TABLE IF NOT EXISTS prometheus_sources (
|
||||
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||
name VARCHAR(255) NOT NULL,
|
||||
url VARCHAR(500) NOT NULL,
|
||||
description TEXT,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci
|
||||
`);
|
||||
console.log(' ✅ Table "prometheus_sources" ready');
|
||||
|
||||
// Create site_settings table
|
||||
await connection.query(`
|
||||
CREATE TABLE IF NOT EXISTS site_settings (
|
||||
id INT PRIMARY KEY DEFAULT 1,
|
||||
page_name VARCHAR(255) DEFAULT '数据可视化展示大屏',
|
||||
title VARCHAR(255) DEFAULT '数据可视化展示大屏',
|
||||
logo_url TEXT,
|
||||
default_theme VARCHAR(20) DEFAULT 'dark',
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci
|
||||
`);
|
||||
// Insert default settings if not exists
|
||||
await connection.query(`
|
||||
INSERT IGNORE INTO site_settings (id, page_name, title, default_theme)
|
||||
VALUES (1, '数据可视化展示大屏', '数据可视化展示大屏', 'dark')
|
||||
`);
|
||||
console.log(' ✅ Table "site_settings" ready');
|
||||
// 3. Use the centralized schema tool to create/fix all tables
|
||||
console.log(' 📦 Initializing tables using schema-check tool...');
|
||||
await db_migrate();
|
||||
console.log(' ✅ Tables and columns ready');
|
||||
|
||||
console.log('\n🎉 Database initialization complete!\n');
|
||||
await connection.end();
|
||||
}
|
||||
|
||||
initDatabase().catch(err => {
|
||||
|
||||
134
server/latency-service.js
Normal file
134
server/latency-service.js
Normal file
@@ -0,0 +1,134 @@
|
||||
const axios = require('axios');
|
||||
const cache = require('./cache');
|
||||
const db = require('./db');
|
||||
|
||||
const POLL_INTERVAL = 10000; // 10 seconds
|
||||
|
||||
async function pollLatency() {
|
||||
try {
|
||||
const [routes] = await db.query(`
|
||||
SELECT r.*, s.url
|
||||
FROM latency_routes r
|
||||
JOIN prometheus_sources s ON r.source_id = s.id
|
||||
WHERE s.type = 'blackbox'
|
||||
`);
|
||||
|
||||
if (routes.length === 0) return;
|
||||
|
||||
// Poll each route
|
||||
await Promise.allSettled(routes.map(async (route) => {
|
||||
try {
|
||||
// Blackbox exporter probe URL
|
||||
// We assume ICMP module for now. If target is a URL, maybe use http_2xx
|
||||
let module = 'icmp';
|
||||
let target = route.latency_target;
|
||||
|
||||
if (target.startsWith('http://') || target.startsWith('https://')) {
|
||||
module = 'http_2xx';
|
||||
}
|
||||
|
||||
const probeUrl = `${route.url.replace(/\/+$/, '')}/probe?module=${module}&target=${encodeURIComponent(target)}`;
|
||||
|
||||
const startTime = Date.now();
|
||||
const response = await axios.get(probeUrl, {
|
||||
timeout: 5000,
|
||||
responseType: 'text',
|
||||
validateStatus: false
|
||||
});
|
||||
|
||||
if (typeof response.data !== 'string') {
|
||||
throw new Error('Response data is not a string');
|
||||
}
|
||||
|
||||
const lines = response.data.split('\n').map(l => l.trim()).filter(l => l && !l.startsWith('#'));
|
||||
|
||||
// 1. Check if the probe was successful
|
||||
let isProbeSuccess = false;
|
||||
for (const line of lines) {
|
||||
if (/^probe_success(\{.*\})?\s+1/.test(line)) {
|
||||
isProbeSuccess = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// 2. Extract latency from priority metrics
|
||||
const targetMetrics = [
|
||||
'probe_icmp_duration_seconds',
|
||||
'probe_http_duration_seconds',
|
||||
'probe_duration_seconds'
|
||||
];
|
||||
|
||||
let foundLatency = null;
|
||||
for (const metricName of targetMetrics) {
|
||||
let bestLine = null;
|
||||
|
||||
// First pass: look for phase="rtt" which is the most accurate "ping"
|
||||
for (const line of lines) {
|
||||
if (line.startsWith(metricName) && line.includes('phase="rtt"')) {
|
||||
bestLine = line;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Second pass: if no rtt phase, look for a line without phases (legacy format) or just the first line
|
||||
if (!bestLine) {
|
||||
for (const line of lines) {
|
||||
if (line.startsWith(metricName)) {
|
||||
// Prefer lines without {} if possible, otherwise take the first one
|
||||
if (!line.includes('{')) {
|
||||
bestLine = line;
|
||||
break;
|
||||
}
|
||||
if (!bestLine) bestLine = line;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (bestLine) {
|
||||
// Regex to capture the number, including scientific notation
|
||||
const regex = new RegExp(`^${metricName}(?:\\{[^}]*\\})?\\s+([\\d.eE+-]+)`);
|
||||
const match = bestLine.match(regex);
|
||||
|
||||
if (match) {
|
||||
const val = parseFloat(match[1]);
|
||||
if (!isNaN(val)) {
|
||||
foundLatency = val * 1000; // convert to ms
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// 3. Final decision
|
||||
// If it's a success, use found latency. If success=0 or missing, handle carefully.
|
||||
let latency;
|
||||
if (isProbeSuccess && foundLatency !== null) {
|
||||
latency = foundLatency;
|
||||
} else {
|
||||
// If probe failed or metrics missing, do not show 0, show null (Measurement in progress/Error)
|
||||
latency = null;
|
||||
}
|
||||
|
||||
// Save to Valkey
|
||||
await cache.set(`latency:route:${route.id}`, latency, 60);
|
||||
} catch (err) {
|
||||
await cache.set(`latency:route:${route.id}`, null, 60);
|
||||
}
|
||||
}));
|
||||
} catch (err) {
|
||||
console.error('[Latency] Service error:', err.message);
|
||||
}
|
||||
}
|
||||
|
||||
let intervalId = null;
|
||||
|
||||
function start() {
|
||||
if (intervalId) clearInterval(intervalId);
|
||||
pollLatency(); // initial run
|
||||
intervalId = setInterval(pollLatency, POLL_INTERVAL);
|
||||
console.log('[Latency] Background service started (polling Blackbox Exporter directly)');
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
start
|
||||
};
|
||||
@@ -1,12 +1,48 @@
|
||||
const axios = require('axios');
|
||||
const http = require('http');
|
||||
const https = require('https');
|
||||
const cache = require('./cache');
|
||||
const crypto = require('crypto');
|
||||
|
||||
const QUERY_TIMEOUT = 10000;
|
||||
|
||||
// Reusable agents to handle potential redirect issues and protocol mismatches
|
||||
function getCacheKey(type, baseUrl, expr, extra = '') {
|
||||
return `prom_${type}:${crypto.createHash('md5').update(`${baseUrl}:${expr}:${extra}`).digest('hex')}`;
|
||||
}
|
||||
|
||||
const httpAgent = new http.Agent({ keepAlive: true });
|
||||
const httpsAgent = new https.Agent({ keepAlive: true, rejectUnauthorized: false });
|
||||
const httpsAgent = new https.Agent({ keepAlive: true });
|
||||
|
||||
const serverIdMap = new Map(); // token -> { instance, job, source, lastSeen }
|
||||
|
||||
function getSecret() {
|
||||
// Use the env variable populated by index.js initialization
|
||||
return process.env.APP_SECRET || 'fallback-secret-for-safety';
|
||||
}
|
||||
|
||||
// Periodic cleanup of serverIdMap to prevent infinite growth
|
||||
setInterval(() => {
|
||||
const now = Date.now();
|
||||
const TTL = 24 * 60 * 60 * 1000; // 24 hours
|
||||
for (const [token, data] of serverIdMap.entries()) {
|
||||
if (now - (data.lastSeen || 0) > TTL) {
|
||||
serverIdMap.delete(token);
|
||||
}
|
||||
}
|
||||
}, 3600000); // Once per hour
|
||||
|
||||
function getServerToken(instance, job, source) {
|
||||
const hash = crypto.createHmac('sha256', getSecret())
|
||||
.update(`${instance}:${job}:${source}`)
|
||||
.digest('hex')
|
||||
.substring(0, 16);
|
||||
|
||||
// Update lastSeen timestamp
|
||||
const data = serverIdMap.get(hash);
|
||||
if (data) data.lastSeen = Date.now();
|
||||
|
||||
return hash;
|
||||
}
|
||||
|
||||
/**
|
||||
* Normalize URL and ensure protocol
|
||||
@@ -36,12 +72,12 @@ function createClient(baseUrl) {
|
||||
/**
|
||||
* Test Prometheus connection
|
||||
*/
|
||||
async function testConnection(url) {
|
||||
async function testConnection(url, customTimeout = null) {
|
||||
const normalized = normalizeUrl(url);
|
||||
try {
|
||||
// Using native fetch to avoid follow-redirects/axios "protocol mismatch" issues in some Node environments
|
||||
const controller = new AbortController();
|
||||
const timer = setTimeout(() => controller.abort(), QUERY_TIMEOUT);
|
||||
const timer = setTimeout(() => controller.abort(), customTimeout || QUERY_TIMEOUT);
|
||||
|
||||
// Node native fetch - handles http/https automatically
|
||||
const res = await fetch(`${normalized}/api/v1/status/buildinfo`, {
|
||||
@@ -98,6 +134,38 @@ async function query(baseUrl, expr) {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all targets from Prometheus
|
||||
*/
|
||||
async function getTargets(baseUrl) {
|
||||
const url = normalizeUrl(baseUrl);
|
||||
try {
|
||||
const controller = new AbortController();
|
||||
const timer = setTimeout(() => controller.abort(), QUERY_TIMEOUT);
|
||||
|
||||
const res = await fetch(`${url}/api/v1/targets`, {
|
||||
signal: controller.signal
|
||||
});
|
||||
|
||||
clearTimeout(timer);
|
||||
|
||||
if (!res.ok) {
|
||||
throw new Error(`Prometheus returned HTTP ${res.status}`);
|
||||
}
|
||||
|
||||
const data = await res.json();
|
||||
if (data.status !== 'success') {
|
||||
throw new Error(`Prometheus targets fetch failed: ${data.error || 'unknown error'}`);
|
||||
}
|
||||
return data.data.activeTargets || [];
|
||||
} catch (err) {
|
||||
if (err.name === 'AbortError') {
|
||||
throw new Error('Prometheus targets fetch timed out');
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute a Prometheus range query
|
||||
*/
|
||||
@@ -133,9 +201,6 @@ async function queryRange(baseUrl, expr, start, end, step) {
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* Get overview metrics from a single Prometheus source
|
||||
*/
|
||||
async function getOverviewMetrics(url, sourceName) {
|
||||
// Run all queries in parallel
|
||||
const [
|
||||
@@ -147,9 +212,11 @@ async function getOverviewMetrics(url, sourceName) {
|
||||
diskFreeResult,
|
||||
netRxResult,
|
||||
netTxResult,
|
||||
traffic24hRxResult,
|
||||
traffic24hTxResult,
|
||||
upResult
|
||||
netRx24hResult,
|
||||
netTx24hResult,
|
||||
targetsResult,
|
||||
conntrackEntriesResult,
|
||||
conntrackLimitResult
|
||||
] = await Promise.all([
|
||||
// CPU usage per instance: 1 - avg idle
|
||||
query(url, '100 - (avg by (instance, job) (rate(node_cpu_seconds_total{mode="idle"}[1m])) * 100)').catch(() => []),
|
||||
@@ -159,31 +226,47 @@ async function getOverviewMetrics(url, sourceName) {
|
||||
query(url, 'node_memory_MemTotal_bytes').catch(() => []),
|
||||
// Memory available per instance
|
||||
query(url, 'node_memory_MemAvailable_bytes').catch(() => []),
|
||||
// Disk total per instance (root filesystem + /data)
|
||||
query(url, 'sum by (instance, job) (node_filesystem_size_bytes{mountpoint=~"/|/data",fstype!="tmpfs"})').catch(() => []),
|
||||
// Disk free per instance (root filesystem + /data)
|
||||
query(url, 'sum by (instance, job) (node_filesystem_free_bytes{mountpoint=~"/|/data",fstype!="tmpfs"})').catch(() => []),
|
||||
// Disk total per instance (excluding virtual fs and FUSE/rclone mounts)
|
||||
query(url, 'sum by (instance, job) (node_filesystem_size_bytes{fstype!~"tmpfs|autofs|proc|sysfs|fuse.*", mountpoint!~"/tmp.*|/var/lib/docker/.*|/run/.*"})').catch(() => []),
|
||||
// Disk free per instance
|
||||
query(url, 'sum by (instance, job) (node_filesystem_free_bytes{fstype!~"tmpfs|autofs|proc|sysfs|fuse.*", mountpoint!~"/tmp.*|/var/lib/docker/.*|/run/.*"})').catch(() => []),
|
||||
// Network receive rate (bytes/sec)
|
||||
query(url, 'sum by (instance, job) (rate(node_network_receive_bytes_total{device!~"lo|veth.*|docker.*|br-.*"}[1m]))').catch(() => []),
|
||||
// Network transmit rate (bytes/sec)
|
||||
query(url, 'sum by (instance, job) (rate(node_network_transmit_bytes_total{device!~"lo|veth.*|docker.*|br-.*"}[1m]))').catch(() => []),
|
||||
// Total traffic received in last 24h
|
||||
// 24h Network receive total (bytes)
|
||||
query(url, 'sum by (instance, job) (increase(node_network_receive_bytes_total{device!~"lo|veth.*|docker.*|br-.*"}[24h]))').catch(() => []),
|
||||
// Total traffic transmitted in last 24h
|
||||
// 24h Network transmit total (bytes)
|
||||
query(url, 'sum by (instance, job) (increase(node_network_transmit_bytes_total{device!~"lo|veth.*|docker.*|br-.*"}[24h]))').catch(() => []),
|
||||
// Up instances (at least one successful scrape in last 5m)
|
||||
// We broaden the job filter to catch more variations of node-exporter jobs
|
||||
query(url, 'max_over_time(up{job=~".*node.*|.*exporter.*|.*host.*"}[5m])').catch(() => [])
|
||||
// Targets status from /api/v1/targets
|
||||
getTargets(url).catch(() => []),
|
||||
// Conntrack entries
|
||||
query(url, 'node_nf_conntrack_entries').catch(() => []),
|
||||
// Conntrack limits
|
||||
query(url, 'node_nf_conntrack_entries_limit').catch(() => [])
|
||||
]);
|
||||
|
||||
// Fetch 24h detailed traffic using the A*duration logic
|
||||
const traffic24hSum = await get24hTrafficSum(url).catch(() => ({ rx: 0, tx: 0 }));
|
||||
|
||||
// Build per-instance data map
|
||||
const instances = new Map();
|
||||
|
||||
const getOrCreate = (metric) => {
|
||||
const key = metric.instance;
|
||||
if (!instances.has(key)) {
|
||||
instances.set(key, {
|
||||
instance: key,
|
||||
const originalInstance = metric.instance || 'Unknown';
|
||||
const job = metric.job || 'Unknown';
|
||||
const token = getServerToken(originalInstance, job, sourceName);
|
||||
|
||||
// Store mapping for detail queries
|
||||
serverIdMap.set(token, { instance: originalInstance, source: sourceName, job, lastSeen: Date.now() });
|
||||
|
||||
// Also store in Valkey for resilience across restarts
|
||||
cache.set(`server_token:${token}`, originalInstance, 86400).catch(()=>{});
|
||||
|
||||
if (!instances.has(token)) {
|
||||
instances.set(token, {
|
||||
instance: token, // This is the masked IP SENT TO FRONTEND
|
||||
originalInstance, // Keep internal for aggregation/parsing
|
||||
job: metric.job || 'Unknown',
|
||||
source: sourceName,
|
||||
cpuPercent: 0,
|
||||
@@ -194,10 +277,17 @@ async function getOverviewMetrics(url, sourceName) {
|
||||
diskUsed: 0,
|
||||
netRx: 0,
|
||||
netTx: 0,
|
||||
up: false
|
||||
traffic24hRx: 0,
|
||||
traffic24hTx: 0,
|
||||
conntrackEntries: 0,
|
||||
conntrackLimit: 0,
|
||||
up: false,
|
||||
memPercent: 0,
|
||||
diskPercent: 0,
|
||||
conntrackPercent: 0
|
||||
});
|
||||
}
|
||||
const inst = instances.get(key);
|
||||
const inst = instances.get(token);
|
||||
// If job was Unknown but we now have a job name, update it
|
||||
if (inst.job === 'Unknown' && metric.job) {
|
||||
inst.job = metric.job;
|
||||
@@ -205,10 +295,17 @@ async function getOverviewMetrics(url, sourceName) {
|
||||
return inst;
|
||||
};
|
||||
|
||||
// Parse UP status
|
||||
for (const r of upResult) {
|
||||
const inst = getOrCreate(r.metric);
|
||||
inst.up = parseFloat(r.value[1]) === 1;
|
||||
// Initialize instances from targets first (to ensure we have all servers even if they have no metrics)
|
||||
for (const target of targetsResult) {
|
||||
const labels = target.labels || {};
|
||||
const instance = labels.instance;
|
||||
const job = labels.job || '';
|
||||
|
||||
// Include every target from the activeTargets list
|
||||
if (instance) {
|
||||
const inst = getOrCreate(labels);
|
||||
inst.up = target.health === 'up';
|
||||
}
|
||||
}
|
||||
|
||||
// Parse CPU usage
|
||||
@@ -253,14 +350,39 @@ async function getOverviewMetrics(url, sourceName) {
|
||||
inst.netTx = parseFloat(r.value[1]) || 0;
|
||||
}
|
||||
|
||||
// Final check: If an instance has non-zero CPU or Memory total data but is marked offline,
|
||||
// it means we missed its 'up' metric due to job labels, but it's clearly sending data.
|
||||
// Parse 24h traffic
|
||||
for (const r of netRx24hResult) {
|
||||
const inst = getOrCreate(r.metric);
|
||||
inst.traffic24hRx = parseFloat(r.value[1]) || 0;
|
||||
}
|
||||
for (const r of netTx24hResult) {
|
||||
const inst = getOrCreate(r.metric);
|
||||
inst.traffic24hTx = parseFloat(r.value[1]) || 0;
|
||||
}
|
||||
|
||||
// Parse conntrack
|
||||
for (const r of conntrackEntriesResult) {
|
||||
const inst = getOrCreate(r.metric);
|
||||
inst.conntrackEntries = parseFloat(r.value[1]) || 0;
|
||||
}
|
||||
for (const r of conntrackLimitResult) {
|
||||
const inst = getOrCreate(r.metric);
|
||||
inst.conntrackLimit = parseFloat(r.value[1]) || 0;
|
||||
}
|
||||
|
||||
for (const inst of instances.values()) {
|
||||
if (!inst.up && (inst.cpuPercent > 0 || inst.memTotal > 0)) {
|
||||
inst.up = true;
|
||||
}
|
||||
// Calculate percentages on backend
|
||||
inst.memPercent = inst.memTotal > 0 ? (inst.memUsed / inst.memTotal * 100) : 0;
|
||||
inst.diskPercent = inst.diskTotal > 0 ? (inst.diskUsed / inst.diskTotal * 100) : 0;
|
||||
inst.conntrackPercent = inst.conntrackLimit > 0 ? (inst.conntrackEntries / inst.conntrackLimit * 100) : 0;
|
||||
}
|
||||
|
||||
const allInstancesList = Array.from(instances.values());
|
||||
const activeInstances = allInstancesList.filter(inst => inst.up);
|
||||
|
||||
// Aggregate
|
||||
let totalCpuUsed = 0, totalCpuCores = 0;
|
||||
let totalMemUsed = 0, totalMemTotal = 0;
|
||||
@@ -268,7 +390,7 @@ async function getOverviewMetrics(url, sourceName) {
|
||||
let totalNetRx = 0, totalNetTx = 0;
|
||||
let totalTraffic24hRx = 0, totalTraffic24hTx = 0;
|
||||
|
||||
for (const inst of instances.values()) {
|
||||
for (const inst of activeInstances) {
|
||||
totalCpuUsed += (inst.cpuPercent / 100) * inst.cpuCores;
|
||||
totalCpuCores += inst.cpuCores;
|
||||
totalMemUsed += inst.memUsed;
|
||||
@@ -279,16 +401,13 @@ async function getOverviewMetrics(url, sourceName) {
|
||||
totalNetTx += inst.netTx;
|
||||
}
|
||||
|
||||
// Parse 24h traffic
|
||||
for (const r of traffic24hRxResult) {
|
||||
totalTraffic24hRx += parseFloat(r.value[1]) || 0;
|
||||
}
|
||||
for (const r of traffic24hTxResult) {
|
||||
totalTraffic24hTx += parseFloat(r.value[1]) || 0;
|
||||
}
|
||||
// Use the pre-calculated 24h traffic
|
||||
totalTraffic24hRx = traffic24hSum.rx;
|
||||
totalTraffic24hTx = traffic24hSum.tx;
|
||||
|
||||
return {
|
||||
totalServers: instances.size,
|
||||
totalServers: allInstancesList.length,
|
||||
activeServers: activeInstances.length,
|
||||
cpu: {
|
||||
used: totalCpuUsed,
|
||||
total: totalCpuCores,
|
||||
@@ -311,19 +430,78 @@ async function getOverviewMetrics(url, sourceName) {
|
||||
},
|
||||
traffic24h: {
|
||||
rx: totalTraffic24hRx,
|
||||
tx: totalTraffic24hTx
|
||||
tx: totalTraffic24hTx,
|
||||
total: totalTraffic24hRx + totalTraffic24hTx
|
||||
},
|
||||
servers: Array.from(instances.values())
|
||||
servers: allInstancesList
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* Get network traffic history (past 24h, 15-min intervals)
|
||||
* Calculate total traffic from bandwidth data points using the A*duration logic
|
||||
*/
|
||||
function calculateTrafficFromHistory(values) {
|
||||
if (!values || values.length < 2) return 0;
|
||||
|
||||
let totalBytes = 0;
|
||||
for (let i = 0; i < values.length - 1; i++) {
|
||||
const [tsA, valA] = values[i];
|
||||
const [tsB] = values[i+1];
|
||||
const duration = tsB - tsA;
|
||||
totalBytes += parseFloat(valA) * duration;
|
||||
}
|
||||
return totalBytes;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get total traffic for the past 24h using Prometheus increase() for stability and accuracy
|
||||
*/
|
||||
async function get24hTrafficSum(url) {
|
||||
try {
|
||||
const [rxResult, txResult] = await Promise.all([
|
||||
query(url, 'sum(increase(node_network_receive_bytes_total{device!~"lo|veth.*|docker.*|br-.*"}[24h]))').catch(() => []),
|
||||
query(url, 'sum(increase(node_network_transmit_bytes_total{device!~"lo|veth.*|docker.*|br-.*"}[24h]))').catch(() => [])
|
||||
]);
|
||||
|
||||
const rx = rxResult.length > 0 ? parseFloat(rxResult[0].value[1]) : 0;
|
||||
const tx = txResult.length > 0 ? parseFloat(txResult[0].value[1]) : 0;
|
||||
|
||||
return { rx, tx };
|
||||
} catch (err) {
|
||||
console.error(`[Prometheus] get24hTrafficSum error:`, err.message);
|
||||
return { rx: 0, tx: 0 };
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get total traffic for a specific server in the past 24h
|
||||
*/
|
||||
async function get24hServerTrafficSum(url, instance, job) {
|
||||
const node = resolveToken(instance);
|
||||
|
||||
const rxExpr = `sum(increase(node_network_receive_bytes_total{instance="${node}",job="${job}",device!~'tap.*|veth.*|br.*|docker.*|virbr*|podman.*|lo.*|vmbr.*|fwbr.|ip.*|gre.*|virbr.*|vnet.*'}[24h]))`;
|
||||
const txExpr = `sum(increase(node_network_transmit_bytes_total{instance="${node}",job="${job}",device!~'tap.*|veth.*|br.*|docker.*|virbr*|podman.*|lo.*|vmbr.*|fwbr.|ip.*|gre.*|virbr.*|vnet.*'}[24h]))`;
|
||||
|
||||
const [rxResult, txResult] = await Promise.all([
|
||||
query(url, rxExpr).catch(() => []),
|
||||
query(url, txExpr).catch(() => [])
|
||||
]);
|
||||
|
||||
const rx = rxResult.length > 0 ? parseFloat(rxResult[0].value[1]) : 0;
|
||||
const tx = txResult.length > 0 ? parseFloat(txResult[0].value[1]) : 0;
|
||||
|
||||
return { rx, tx };
|
||||
}
|
||||
|
||||
/**
|
||||
* Get network traffic history (past 24h, 5-min intervals for chart)
|
||||
*/
|
||||
async function getNetworkHistory(url) {
|
||||
const now = Math.floor(Date.now() / 1000);
|
||||
const step = 300; // 5 minutes for better resolution on chart
|
||||
const now = Math.floor(Date.now() / 1000 / step) * step; // Sync to step boundary
|
||||
const start = now - 86400; // 24h ago
|
||||
const step = 900; // 15 minutes
|
||||
|
||||
const [rxResult, txResult] = await Promise.all([
|
||||
queryRange(url,
|
||||
@@ -375,9 +553,9 @@ function mergeNetworkHistories(histories) {
|
||||
* Get CPU usage history (past 1h, 1-min intervals)
|
||||
*/
|
||||
async function getCpuHistory(url) {
|
||||
const now = Math.floor(Date.now() / 1000);
|
||||
const start = now - 3600; // 1h ago
|
||||
const step = 60; // 1 minute
|
||||
const now = Math.floor(Date.now() / 1000 / step) * step; // Sync to step boundary
|
||||
const start = now - 3600; // 1h ago
|
||||
|
||||
const result = await queryRange(url,
|
||||
'100 - (avg(rate(node_cpu_seconds_total{mode="idle"}[1m])) * 100)',
|
||||
@@ -412,74 +590,298 @@ function mergeCpuHistories(histories) {
|
||||
}
|
||||
|
||||
|
||||
async function resolveToken(token) {
|
||||
if (serverIdMap.has(token)) {
|
||||
return serverIdMap.get(token).instance;
|
||||
}
|
||||
const cachedInstance = await cache.get(`server_token:${token}`);
|
||||
if (cachedInstance) return cachedInstance;
|
||||
|
||||
return token;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get detailed metrics for a specific server (node)
|
||||
*/
|
||||
async function getServerDetails(baseUrl, instance, job) {
|
||||
async function getServerDetails(baseUrl, instance, job, settings = {}) {
|
||||
const url = normalizeUrl(baseUrl);
|
||||
const node = instance;
|
||||
const node = await resolveToken(instance);
|
||||
|
||||
// Queries based on the requested dashboard structure
|
||||
const queries = {
|
||||
cpuIowait: `avg(rate(node_cpu_seconds_total{mode="iowait", instance="${node}"}[1m])) * 100`,
|
||||
cpuOther: `avg(rate(node_cpu_seconds_total{mode=~"nice|steal|guest|guest_nice", instance="${node}"}[1m])) * 100`,
|
||||
cpuBusy: `100 * (1 - avg(rate(node_cpu_seconds_total{mode="idle", instance="${node}"}[1m])))`,
|
||||
sysLoad: `node_load1{instance="${node}",job="${job}"} * 100 / count(count(node_cpu_seconds_total{instance="${node}",job="${job}"}) by (cpu))`,
|
||||
memUsedPct: `(1 - (node_memory_MemAvailable_bytes{instance="${node}", job="${job}"} / node_memory_MemTotal_bytes{instance="${node}", job="${job}"})) * 100`,
|
||||
swapUsedPct: `((node_memory_SwapTotal_bytes{instance="${node}",job="${job}"} - node_memory_SwapFree_bytes{instance="${node}",job="${job}"}) / (node_memory_SwapTotal_bytes{instance="${node}",job="${job}"})) * 100`,
|
||||
rootFsUsedPct: `100 - ((node_filesystem_avail_bytes{instance="${node}",job="${job}",mountpoint="/",fstype!="rootfs"} * 100) / node_filesystem_size_bytes{instance="${node}",job="${job}",mountpoint="/",fstype!="rootfs"})`,
|
||||
rootFsUsedPct: `100 - ((node_filesystem_avail_bytes{instance="${node}",job="${job}",mountpoint="/",fstype!~"rootfs|tmpfs"} * 100) / node_filesystem_size_bytes{instance="${node}",job="${job}",mountpoint="/",fstype!~"rootfs|tmpfs"})`,
|
||||
cpuCores: `count(count(node_cpu_seconds_total{instance="${node}",job="${job}"}) by (cpu))`,
|
||||
memTotal: `node_memory_MemTotal_bytes{instance="${node}",job="${job}"}`,
|
||||
swapTotal: `node_memory_SwapTotal_bytes{instance="${node}",job="${job}"}`,
|
||||
rootFsTotal: `node_filesystem_size_bytes{instance="${node}",job="${job}",mountpoint="/",fstype!~"rootfs|tmpfs"}`,
|
||||
uptime: `node_time_seconds{instance="${node}",job="${job}"} - node_boot_time_seconds{instance="${node}",job="${job}"}`,
|
||||
netRx: `sum(rate(node_network_receive_bytes_total{instance="${node}",job="${job}",device!~'tap.*|veth.*|br.*|docker.*|virbr*|podman.*|lo.*|vmbr.*|fwbr.|ip.*|gre.*|virbr.*|vnet.*'}[1m]))`,
|
||||
netTx: `sum(rate(node_network_transmit_bytes_total{instance="${node}",job="${job}",device!~'tap.*|veth.*|br.*|docker.*|virbr*|podman.*|lo.*|vmbr.*|fwbr.|ip.*|gre.*|virbr.*|vnet.*'}[1m]))`
|
||||
netTx: `sum(rate(node_network_transmit_bytes_total{instance="${node}",job="${job}",device!~'tap.*|veth.*|br.*|docker.*|virbr*|podman.*|lo.*|vmbr.*|fwbr.|ip.*|gre.*|virbr.*|vnet.*'}[1m]))`,
|
||||
sockstatTcp: `node_sockstat_TCP_inuse{instance="${node}",job="${job}"}`,
|
||||
sockstatTcpMem: `node_sockstat_TCP_mem{instance="${node}",job="${job}"} * 4096`,
|
||||
conntrackEntries: `node_nf_conntrack_entries{instance="${node}",job="${job}"}`,
|
||||
conntrackLimit: `node_nf_conntrack_entries_limit{instance="${node}",job="${job}"}`,
|
||||
conntrackUsedPct: `(node_nf_conntrack_entries{instance="${node}",job="${job}"} / node_nf_conntrack_entries_limit{instance="${node}",job="${job}"}) * 100`,
|
||||
// Get individual partitions (excluding virtual and FUSE mounts)
|
||||
partitions_size: `node_filesystem_size_bytes{instance="${node}", job="${job}", fstype!~"tmpfs|autofs|proc|sysfs|fuse.*", mountpoint!~"/tmp.*|/var/lib/docker/.*|/run/.*"}`,
|
||||
partitions_free: `node_filesystem_free_bytes{instance="${node}", job="${job}", fstype!~"tmpfs|autofs|proc|sysfs|fuse.*", mountpoint!~"/tmp.*|/var/lib/docker/.*|/run/.*"}`
|
||||
};
|
||||
|
||||
const results = {};
|
||||
const queryPromises = Object.entries(queries).map(async ([key, expr]) => {
|
||||
try {
|
||||
const res = await query(url, expr);
|
||||
if (key.startsWith('partitions_')) {
|
||||
results[key] = res.map(r => ({
|
||||
mountpoint: r.metric.mountpoint,
|
||||
value: parseFloat(r.value[1]) || 0
|
||||
}));
|
||||
} else {
|
||||
results[key] = res.length > 0 ? parseFloat(res[0].value[1]) : 0;
|
||||
}
|
||||
} catch (e) {
|
||||
console.error(`[Prometheus] Error querying ${key} for ${node}:`, e.message);
|
||||
results[key] = 0;
|
||||
results[key] = key.startsWith('partitions_') ? [] : 0;
|
||||
}
|
||||
});
|
||||
|
||||
await Promise.all(queryPromises);
|
||||
|
||||
// Process custom metrics from settings
|
||||
results.custom_data = [];
|
||||
try {
|
||||
const customMetrics = typeof settings.custom_metrics === 'string'
|
||||
? JSON.parse(settings.custom_metrics)
|
||||
: (settings.custom_metrics || []);
|
||||
|
||||
if (Array.isArray(customMetrics) && customMetrics.length > 0) {
|
||||
const customPromises = customMetrics.map(async (cfg) => {
|
||||
if (!cfg.metric) return null;
|
||||
try {
|
||||
const expr = `${cfg.metric}{instance="${node}",job="${job}"}`;
|
||||
const res = await query(url, expr);
|
||||
if (res && res.length > 0) {
|
||||
const val = res[0].metric[cfg.label || 'address'] || res[0].value[1];
|
||||
|
||||
// If this metric is marked as an IP source, update the main IP fields
|
||||
if (cfg.is_ip && !results.ipv4?.length && !results.ipv6?.length) {
|
||||
if (val.includes(':')) {
|
||||
results.ipv6 = [val];
|
||||
results.ipv4 = [];
|
||||
} else {
|
||||
results.ipv4 = [val];
|
||||
results.ipv6 = [];
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
name: cfg.name || cfg.metric,
|
||||
value: val
|
||||
};
|
||||
}
|
||||
} catch (e) {
|
||||
console.error(`[Prometheus] Custom metric error (${cfg.metric}):`, e.message);
|
||||
}
|
||||
return null;
|
||||
});
|
||||
|
||||
const customResults = await Promise.all(customPromises);
|
||||
results.custom_data = customResults.filter(r => r !== null);
|
||||
}
|
||||
} catch (err) {
|
||||
console.error('[Prometheus] Error processing custom metrics:', err.message);
|
||||
}
|
||||
|
||||
// Ensure IP discovery fallback if no custom IP metric found
|
||||
if ((!results.ipv4 || results.ipv4.length === 0) && (!results.ipv6 || results.ipv6.length === 0)) {
|
||||
try {
|
||||
const targets = await getTargets(baseUrl);
|
||||
const matchedTarget = targets.find(t => t.labels && t.labels.instance === node && t.labels.job === job);
|
||||
if (matchedTarget) {
|
||||
const scrapeUrl = matchedTarget.scrapeUrl || '';
|
||||
try {
|
||||
const urlObj = new URL(scrapeUrl);
|
||||
const host = urlObj.hostname;
|
||||
if (host.includes(':')) {
|
||||
results.ipv6 = [host];
|
||||
results.ipv4 = [];
|
||||
} else {
|
||||
results.ipv4 = [host];
|
||||
results.ipv6 = [];
|
||||
}
|
||||
} catch (e) {
|
||||
const host = scrapeUrl.split('//').pop().split('/')[0].split(':')[0];
|
||||
if (host) {
|
||||
results.ipv4 = [host];
|
||||
results.ipv6 = [];
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
console.error(`[Prometheus] Target fallback error for ${node}:`, e.message);
|
||||
}
|
||||
}
|
||||
|
||||
// Final sanitization
|
||||
results.ipv4 = results.ipv4 || [];
|
||||
results.ipv6 = results.ipv6 || [];
|
||||
|
||||
// Group partitions
|
||||
const partitionsMap = {};
|
||||
(results.partitions_size || []).forEach(p => {
|
||||
partitionsMap[p.mountpoint] = { mountpoint: p.mountpoint, size: p.value, free: 0 };
|
||||
});
|
||||
(results.partitions_free || []).forEach(p => {
|
||||
if (partitionsMap[p.mountpoint]) {
|
||||
partitionsMap[p.mountpoint].free = p.value;
|
||||
}
|
||||
});
|
||||
|
||||
results.partitions = Object.values(partitionsMap).map(p => ({
|
||||
...p,
|
||||
used: p.size - p.free,
|
||||
percent: p.size > 0 ? ((p.size - p.free) / p.size * 100) : 0
|
||||
})).sort((a, b) => a.mountpoint.localeCompare(b.mountpoint));
|
||||
|
||||
// Calculate total disk size
|
||||
results.totalDiskSize = results.partitions.reduce((sum, p) => sum + (p.size || 0), 0);
|
||||
|
||||
delete results.partitions_size;
|
||||
delete results.partitions_free;
|
||||
|
||||
// Add 24h traffic sum for this specific server
|
||||
try {
|
||||
const traffic24h = await get24hServerTrafficSum(baseUrl, instance, job);
|
||||
results.traffic24h = traffic24h;
|
||||
} catch (e) {
|
||||
console.error(`[Prometheus] Error fetching 24h traffic for ${node}:`, e.message);
|
||||
results.traffic24h = { rx: 0, tx: 0 };
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get historical metrics for a specific server (node)
|
||||
*/
|
||||
async function getServerHistory(baseUrl, instance, job, metric, range = '1h', start = null, end = null) {
|
||||
async function getServerHistory(baseUrl, instance, job, metric, range = '1h', start = null, end = null, p95Type = 'tx') {
|
||||
const url = normalizeUrl(baseUrl);
|
||||
const node = instance;
|
||||
const node = await resolveToken(instance);
|
||||
|
||||
// CPU Busy history: 100 - idle
|
||||
if (metric === 'cpuBusy') {
|
||||
const expr = `100 * (1 - avg(rate(node_cpu_seconds_total{mode="idle", instance="${node}"}[1m])))`;
|
||||
const rangeObj = parseRange(range, start, end);
|
||||
const result = await queryRange(url, expr, rangeObj.queryStart, rangeObj.queryEnd, rangeObj.step);
|
||||
|
||||
if (!result || result.length === 0) return { timestamps: [], values: [] };
|
||||
|
||||
return {
|
||||
timestamps: result[0].values.map(v => v[0] * 1000),
|
||||
values: result[0].values.map(v => parseFloat(v[1]))
|
||||
};
|
||||
}
|
||||
|
||||
// Map metric keys to Prometheus expressions
|
||||
const metricMap = {
|
||||
cpuBusy: `100 * (1 - avg(rate(node_cpu_seconds_total{mode="idle", instance="${node}"}[1m])))`,
|
||||
sysLoad: `node_load1{instance="${node}",job="${job}"} * 100 / count(count(node_cpu_seconds_total{instance="${node}",job="${job}"}) by (cpu))`,
|
||||
memUsedPct: `(1 - (node_memory_MemAvailable_bytes{instance="${node}", job="${job}"} / node_memory_MemTotal_bytes{instance="${node}", job="${job}"})) * 100`,
|
||||
swapUsedPct: `((node_memory_SwapTotal_bytes{instance="${node}",job="${job}"} - node_memory_SwapFree_bytes{instance="${node}",job="${job}"}) / (node_memory_SwapTotal_bytes{instance="${node}",job="${job}"})) * 100`,
|
||||
rootFsUsedPct: `100 - ((node_filesystem_avail_bytes{instance="${node}",job="${job}",mountpoint="/",fstype!="rootfs"} * 100) / node_filesystem_size_bytes{instance="${node}",job="${job}",mountpoint="/",fstype!="rootfs"})`,
|
||||
netRx: `sum(rate(node_network_receive_bytes_total{instance="${node}",job="${job}",device!~'tap.*|veth.*|br.*|docker.*|virbr*|podman.*|lo.*|vmbr.*|fwbr.|ip.*|gre.*|virbr.*|vnet.*'}[1m]))`,
|
||||
netTx: `sum(rate(node_network_transmit_bytes_total{instance="${node}",job="${job}",device!~'tap.*|veth.*|br.*|docker.*|virbr*|podman.*|lo.*|vmbr.*|fwbr.|ip.*|gre.*|virbr.*|vnet.*'}[1m]))`
|
||||
netTx: `sum(rate(node_network_transmit_bytes_total{instance="${node}",job="${job}",device!~'tap.*|veth.*|br.*|docker.*|virbr*|podman.*|lo.*|vmbr.*|fwbr.|ip.*|gre.*|virbr.*|vnet.*'}[1m]))`,
|
||||
sockstatTcp: `node_sockstat_TCP_inuse{instance="${node}",job="${job}"}`,
|
||||
sockstatTcpMem: `node_sockstat_TCP_mem{instance="${node}",job="${job}"} * 4096`,
|
||||
conntrackUsedPct: `(node_nf_conntrack_entries{instance="${node}",job="${job}"} / node_nf_conntrack_entries_limit{instance="${node}",job="${job}"}) * 100`
|
||||
};
|
||||
|
||||
const rangeObj = parseRange(range, start, end);
|
||||
|
||||
if (metric === 'networkTrend') {
|
||||
const txExpr = metricMap.netTx;
|
||||
const rxExpr = metricMap.netRx;
|
||||
const [txResult, rxResult] = await Promise.all([
|
||||
queryRange(url, txExpr, rangeObj.queryStart, rangeObj.queryEnd, rangeObj.step),
|
||||
queryRange(url, rxExpr, rangeObj.queryStart, rangeObj.queryEnd, rangeObj.step)
|
||||
]);
|
||||
|
||||
if (txResult.length === 0 && rxResult.length === 0) return { timestamps: [], rx: [], tx: [] };
|
||||
|
||||
const timestamps = (txResult.length > 0 ? txResult[0] : rxResult[0]).values.map(v => v[0] * 1000);
|
||||
const tx = txResult.length > 0 ? txResult[0].values.map(v => parseFloat(v[1])) : new Array(timestamps.length).fill(0);
|
||||
const rx = rxResult.length > 0 ? rxResult[0].values.map(v => parseFloat(v[1])) : new Array(timestamps.length).fill(0);
|
||||
|
||||
// Calculate statistics on backend
|
||||
let rxTotal = 0;
|
||||
let txTotal = 0;
|
||||
for (let i = 0; i < timestamps.length - 1; i++) {
|
||||
const duration = (timestamps[i+1] - timestamps[i]) / 1000;
|
||||
rxTotal += (rx[i] || 0) * duration;
|
||||
txTotal += (tx[i] || 0) * duration;
|
||||
}
|
||||
|
||||
// Calculate P95 based on p95Type
|
||||
let combined = [];
|
||||
if (p95Type === 'rx') {
|
||||
combined = [...rx];
|
||||
} else if (p95Type === 'both') {
|
||||
combined = tx.map((t, i) => (t || 0) + (rx[i] || 0));
|
||||
} else if (p95Type === 'max') {
|
||||
combined = tx.map((t, i) => Math.max(t || 0, rx[i] || 0));
|
||||
} else {
|
||||
// Default to tx
|
||||
combined = [...tx];
|
||||
}
|
||||
|
||||
const sorted = combined.sort((a, b) => a - b);
|
||||
const p95Idx = Math.floor(sorted.length * 0.95);
|
||||
const p95 = sorted.length > 0 ? sorted[p95Idx] : 0;
|
||||
|
||||
return {
|
||||
timestamps,
|
||||
tx,
|
||||
rx,
|
||||
stats: {
|
||||
rxTotal,
|
||||
txTotal,
|
||||
p95,
|
||||
total: rxTotal + txTotal
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
const expr = metricMap[metric];
|
||||
if (!expr) throw new Error('Invalid metric for history');
|
||||
|
||||
try {
|
||||
const result = await queryRange(url, expr, rangeObj.queryStart, rangeObj.queryEnd, rangeObj.step);
|
||||
if (!result || result.length === 0) return { timestamps: [], values: [] };
|
||||
|
||||
return {
|
||||
timestamps: result[0].values.map(v => v[0] * 1000),
|
||||
values: result[0].values.map(v => parseFloat(v[1]))
|
||||
};
|
||||
} catch (err) {
|
||||
console.error(`[Prometheus] Error fetching history for ${metric} on ${node}:`, err.message);
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
function parseRange(range, start, end) {
|
||||
let duration, step, queryStart, queryEnd;
|
||||
|
||||
if (start && end) {
|
||||
queryStart = Math.floor(new Date(start).getTime() / 1000);
|
||||
queryEnd = Math.floor(new Date(end).getTime() / 1000);
|
||||
duration = queryEnd - queryStart;
|
||||
if (duration <= 0) throw new Error('End time must be after start time');
|
||||
// Reasonable step for fixed range
|
||||
step = Math.max(15, Math.floor(duration / 100));
|
||||
} else {
|
||||
// Relative range logic
|
||||
const rangeMap = {
|
||||
'15m': { duration: 900, step: 15 },
|
||||
'30m': { duration: 1800, step: 30 },
|
||||
@@ -495,14 +897,12 @@ async function getServerHistory(baseUrl, instance, job, metric, range = '1h', st
|
||||
duration = rangeMap[range].duration;
|
||||
step = rangeMap[range].step;
|
||||
} else {
|
||||
// Try to parse relative time string like "2h", "30m", "1d"
|
||||
const match = range.match(/^(\d+)([smhd])$/);
|
||||
if (match) {
|
||||
const val = parseInt(match[1]);
|
||||
const unit = match[2];
|
||||
const multipliers = { s: 1, m: 60, h: 3600, d: 86400 };
|
||||
duration = val * (multipliers[unit] || 3600);
|
||||
// Calculate a reasonable step for ~60-120 data points
|
||||
step = Math.max(15, Math.floor(duration / 100));
|
||||
} else {
|
||||
duration = 3600;
|
||||
@@ -512,30 +912,48 @@ async function getServerHistory(baseUrl, instance, job, metric, range = '1h', st
|
||||
queryEnd = Math.floor(Date.now() / 1000);
|
||||
queryStart = queryEnd - duration;
|
||||
}
|
||||
|
||||
try {
|
||||
const result = await queryRange(url, expr, queryStart, queryEnd, step);
|
||||
if (!result || result.length === 0) return { timestamps: [], values: [] };
|
||||
|
||||
return {
|
||||
timestamps: result[0].values.map(v => v[0] * 1000),
|
||||
values: result[0].values.map(v => parseFloat(v[1]))
|
||||
};
|
||||
} catch (err) {
|
||||
console.error(`[Prometheus] Error fetching history for ${metric} on ${node}:`, err.message);
|
||||
throw err;
|
||||
}
|
||||
return { queryStart, queryEnd, step };
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
testConnection,
|
||||
query,
|
||||
queryRange,
|
||||
getTargets,
|
||||
getOverviewMetrics,
|
||||
get24hTrafficSum,
|
||||
getNetworkHistory,
|
||||
mergeNetworkHistories,
|
||||
getCpuHistory,
|
||||
mergeCpuHistories,
|
||||
getServerDetails,
|
||||
getServerHistory
|
||||
getServerHistory,
|
||||
resolveToken,
|
||||
getLatency: async (blackboxUrl, target) => {
|
||||
if (!blackboxUrl || !target) return null;
|
||||
try {
|
||||
const normalized = normalizeUrl(blackboxUrl);
|
||||
|
||||
const queryExpr = `(
|
||||
probe_icmp_duration_seconds{phase="rtt", instance="${target}"} or
|
||||
probe_icmp_duration_seconds{phase="rtt", target="${target}"} or
|
||||
probe_http_duration_seconds{phase="rtt", instance="${target}"} or
|
||||
probe_http_duration_seconds{phase="rtt", target="${target}"} or
|
||||
probe_icmp_duration_seconds{instance="${target}"} or
|
||||
probe_icmp_duration_seconds{target="${target}"} or
|
||||
probe_duration_seconds{instance="${target}"} or
|
||||
probe_duration_seconds{target="${target}"}
|
||||
)`;
|
||||
|
||||
const result = await query(normalized, queryExpr);
|
||||
if (result && result.length > 0) {
|
||||
return parseFloat(result[0].value[1]) * 1000;
|
||||
}
|
||||
return null;
|
||||
} catch (err) {
|
||||
console.error(`[Prometheus] Error fetching latency for ${target}:`, err.message);
|
||||
return null;
|
||||
}
|
||||
},
|
||||
getCacheKey
|
||||
};
|
||||
|
||||
187
update.sh
Normal file
187
update.sh
Normal file
@@ -0,0 +1,187 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
SERVICE_NAME="promdatapanel"
|
||||
DEFAULT_APP_DIR="/opt/promdata-panel"
|
||||
ZIP_URL="https://git.littlediary.cn/CN-JS-HuiBai/PromdataPanel/archive/main.zip"
|
||||
|
||||
GREEN='\033[0;32m'
|
||||
BLUE='\033[0;34m'
|
||||
RED='\033[0;31m'
|
||||
YELLOW='\033[1;33m'
|
||||
NC='\033[0m'
|
||||
|
||||
APP_DIR=""
|
||||
TEMP_DIR=""
|
||||
BACKUP_DIR=""
|
||||
ROLLBACK_REQUIRED=false
|
||||
|
||||
echo -e "${BLUE}=== Starting PromdataPanel Update ===${NC}"
|
||||
|
||||
cleanup() {
|
||||
if [ -n "${TEMP_DIR}" ] && [ -d "${TEMP_DIR}" ]; then
|
||||
rm -rf "${TEMP_DIR}"
|
||||
fi
|
||||
}
|
||||
|
||||
rollback() {
|
||||
if [ "$ROLLBACK_REQUIRED" != true ] || [ -z "${BACKUP_DIR}" ] || [ ! -d "${BACKUP_DIR}" ]; then
|
||||
return
|
||||
fi
|
||||
|
||||
echo -e "${YELLOW}Update failed. Restoring previous application state...${NC}"
|
||||
rsync -a --delete --exclude '.env' "${BACKUP_DIR}/" "${APP_DIR}/"
|
||||
}
|
||||
|
||||
trap 'rollback' ERR
|
||||
trap cleanup EXIT
|
||||
|
||||
validate_app_dir() {
|
||||
local dir="$1"
|
||||
[ -n "$dir" ] || return 1
|
||||
[ -d "$dir" ] || return 1
|
||||
[ -f "$dir/package.json" ] || return 1
|
||||
[ -f "$dir/server/index.js" ] || return 1
|
||||
[ -f "$dir/public/index.html" ] || return 1
|
||||
return 0
|
||||
}
|
||||
|
||||
detect_app_dir() {
|
||||
local service_dir=""
|
||||
if command -v systemctl >/dev/null 2>&1 && systemctl list-unit-files | grep -q "^${SERVICE_NAME}\.service"; then
|
||||
echo "Detecting application directory from systemd service..."
|
||||
service_dir=$(systemctl show -p WorkingDirectory "$SERVICE_NAME" | cut -d= -f2-)
|
||||
if validate_app_dir "$service_dir"; then
|
||||
APP_DIR="$service_dir"
|
||||
return
|
||||
fi
|
||||
fi
|
||||
|
||||
local current_dir
|
||||
current_dir=$(pwd)
|
||||
if validate_app_dir "$current_dir"; then
|
||||
APP_DIR="$current_dir"
|
||||
return
|
||||
fi
|
||||
|
||||
if validate_app_dir "$DEFAULT_APP_DIR"; then
|
||||
APP_DIR="$DEFAULT_APP_DIR"
|
||||
return
|
||||
fi
|
||||
|
||||
echo -e "${RED}Error: Could not locate a valid PromdataPanel application directory.${NC}"
|
||||
echo -e "${YELLOW}Expected markers: package.json, server/index.js, public/index.html${NC}"
|
||||
exit 1
|
||||
}
|
||||
|
||||
ensure_tool() {
|
||||
local cmd="$1"
|
||||
if command -v "$cmd" >/dev/null 2>&1; then
|
||||
return
|
||||
fi
|
||||
|
||||
echo -e "${BLUE}${cmd} is not installed. Attempting to install it...${NC}"
|
||||
if command -v apt-get >/dev/null 2>&1; then
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y "$cmd"
|
||||
elif command -v dnf >/dev/null 2>&1; then
|
||||
sudo dnf install -y "$cmd"
|
||||
elif command -v yum >/dev/null 2>&1; then
|
||||
sudo yum install -y "$cmd"
|
||||
elif command -v apk >/dev/null 2>&1; then
|
||||
sudo apk add "$cmd"
|
||||
else
|
||||
echo -e "${RED}Error: '${cmd}' is not installed and could not be auto-installed.${NC}"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
update_from_git() {
|
||||
echo -e "${BLUE}Git repository detected. Pulling latest code...${NC}"
|
||||
if [ -n "$(git status --porcelain)" ]; then
|
||||
echo -e "${RED}Error: Working tree has local changes. Commit or stash them before updating.${NC}"
|
||||
exit 1
|
||||
fi
|
||||
git pull --ff-only
|
||||
}
|
||||
|
||||
update_from_zip() {
|
||||
echo -e "${BLUE}No git repository found. Updating via ZIP archive with staging and rollback...${NC}"
|
||||
ensure_tool curl
|
||||
ensure_tool unzip
|
||||
ensure_tool rsync
|
||||
|
||||
TEMP_DIR=$(mktemp -d "${TMPDIR:-/tmp}/promdatapanel-update-XXXXXX")
|
||||
BACKUP_DIR="${TEMP_DIR}/backup"
|
||||
local archive_path="${TEMP_DIR}/latest.zip"
|
||||
local extracted_folder=""
|
||||
local staging_dir=""
|
||||
|
||||
echo "Downloading latest version (main branch)..."
|
||||
curl -fL "$ZIP_URL" -o "$archive_path"
|
||||
|
||||
echo "Extracting archive..."
|
||||
unzip -q "$archive_path" -d "$TEMP_DIR"
|
||||
extracted_folder=$(find "$TEMP_DIR" -mindepth 1 -maxdepth 1 -type d ! -name backup | head -n 1)
|
||||
|
||||
if ! validate_app_dir "$extracted_folder"; then
|
||||
echo -e "${RED}Extraction failed or archive structure is invalid.${NC}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
staging_dir="${TEMP_DIR}/staging"
|
||||
mkdir -p "$staging_dir"
|
||||
rsync -a --exclude '.git' "$extracted_folder/" "$staging_dir/"
|
||||
|
||||
if [ -f "${APP_DIR}/.env" ]; then
|
||||
cp "${APP_DIR}/.env" "${staging_dir}/.env"
|
||||
fi
|
||||
|
||||
echo "Installing dependencies in staging directory..."
|
||||
(
|
||||
cd "$staging_dir"
|
||||
npm install --production
|
||||
)
|
||||
|
||||
echo "Creating rollback backup..."
|
||||
rsync -a --delete --exclude '.env' "${APP_DIR}/" "${BACKUP_DIR}/"
|
||||
|
||||
echo "Applying staged update..."
|
||||
ROLLBACK_REQUIRED=true
|
||||
rsync -a --delete --exclude '.env' "${staging_dir}/" "${APP_DIR}/"
|
||||
}
|
||||
|
||||
restart_service() {
|
||||
if command -v systemctl >/dev/null 2>&1 && systemctl is-active --quiet "$SERVICE_NAME"; then
|
||||
echo -e "${BLUE}Restarting systemd service: ${SERVICE_NAME}...${NC}"
|
||||
sudo systemctl restart "$SERVICE_NAME"
|
||||
return
|
||||
fi
|
||||
|
||||
if command -v pm2 >/dev/null 2>&1 && pm2 list | grep -q "$SERVICE_NAME"; then
|
||||
echo -e "${BLUE}Restarting with PM2...${NC}"
|
||||
pm2 restart "$SERVICE_NAME"
|
||||
return
|
||||
fi
|
||||
|
||||
echo -e "${YELLOW}Warning: Could not detect an active systemd service or PM2 process named '${SERVICE_NAME}'.${NC}"
|
||||
echo -e "${YELLOW}Please restart the application manually.${NC}"
|
||||
}
|
||||
|
||||
detect_app_dir
|
||||
echo -e "${BLUE}Application directory: ${APP_DIR}${NC}"
|
||||
cd "$APP_DIR"
|
||||
|
||||
if [ -d ".git" ]; then
|
||||
update_from_git
|
||||
echo -e "${BLUE}Updating npm dependencies...${NC}"
|
||||
npm install --production
|
||||
else
|
||||
update_from_zip
|
||||
fi
|
||||
|
||||
restart_service
|
||||
ROLLBACK_REQUIRED=false
|
||||
|
||||
echo -e "${GREEN}=== Update successfully finished ===${NC}"
|
||||
Reference in New Issue
Block a user