Files
PromdataPanel/server/index.js
2026-04-04 18:52:13 +08:00

636 lines
22 KiB
JavaScript

require('dotenv').config();
const express = require('express');
const cors = require('cors');
const path = require('path');
const db = require('./db');
const prometheusService = require('./prometheus-service');
const app = express();
const PORT = process.env.PORT || 3000;
const HOST = process.env.HOST || '0.0.0.0';
app.use(cors());
app.use(express.json());
const fs = require('fs');
const crypto = require('crypto');
let isDbInitialized = false;
const sessions = new Map(); // Simple session store: sessionId -> {userId, username}
// Middleware: Check Auth
function requireAuth(req, res, next) {
const sessionId = getCookie(req, 'session_id');
if (sessionId && sessions.has(sessionId)) {
req.user = sessions.get(sessionId);
return next();
}
res.status(401).json({ error: 'Auth required' });
}
// Helper: Get Cookie
function getCookie(req, name) {
const matches = req.headers.cookie && req.headers.cookie.match(new RegExp('(?:^|; )' + name.replace(/([\.$?*|{}\(\)\[\]\\\/\+^])/g, '\\$1') + '=([^;]*)'));
return matches ? decodeURIComponent(matches[1]) : undefined;
}
async function checkDb() {
try {
const fs = require('fs');
if (!fs.existsSync(path.join(__dirname, '..', '.env'))) {
isDbInitialized = false;
return;
}
const [rows] = await db.query("SHOW TABLES LIKE 'prometheus_sources'");
isDbInitialized = rows.length > 0;
} catch (err) {
isDbInitialized = false;
}
}
checkDb();
// --- Auth API ---
app.post('/api/auth/login', async (req, res) => {
const { username, password } = req.body;
try {
const [rows] = await db.query('SELECT * FROM users WHERE username = ?', [username]);
if (rows.length === 0) return res.status(401).json({ error: 'Invalid credentials' });
const user = rows[0];
const hash = crypto.pbkdf2Sync(password, user.salt, 1000, 64, 'sha512').toString('hex');
if (hash === user.password) {
const sessionId = crypto.randomBytes(32).toString('hex');
sessions.set(sessionId, { id: user.id, username: user.username });
res.setHeader('Set-Cookie', `session_id=${sessionId}; Path=/; HttpOnly; SameSite=Strict; Max-Age=86400`);
res.json({ success: true, username: user.username });
} else {
res.status(401).json({ error: 'Invalid credentials' });
}
} catch (err) {
res.status(500).json({ error: 'Login failed' });
}
});
app.post('/api/auth/logout', (req, res) => {
const sessionId = getCookie(req, 'session_id');
if (sessionId) sessions.delete(sessionId);
res.setHeader('Set-Cookie', 'session_id=; Path=/; HttpOnly; Max-Age=0');
res.json({ success: true });
});
app.get('/api/auth/status', (req, res) => {
const sessionId = getCookie(req, 'session_id');
if (sessionId && sessions.has(sessionId)) {
res.json({ authenticated: true, username: sessions.get(sessionId).username });
} else {
res.json({ authenticated: false });
}
});
// Setup API Routes
app.post('/api/setup/test', async (req, res) => {
const { host, port, user, password } = req.body;
try {
const mysql = require('mysql2/promise');
const connection = await mysql.createConnection({
host: host || 'localhost',
port: parseInt(port) || 3306,
user: user || 'root',
password: password || ''
});
await connection.ping();
await connection.end();
res.json({ success: true, message: 'Connection successful' });
} catch (err) {
res.status(400).json({ success: false, error: err.message });
}
});
app.post('/api/setup/init', async (req, res) => {
const { host, port, user, password, database } = req.body;
try {
const mysql = require('mysql2/promise');
const connection = await mysql.createConnection({
host: host || 'localhost',
port: parseInt(port) || 3306,
user: user || 'root',
password: password || ''
});
const dbName = database || 'display_wall';
// Create database
await connection.query(`CREATE DATABASE IF NOT EXISTS \`${dbName}\` CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci`);
await connection.query(`USE \`${dbName}\``);
// Create tables
await connection.query(`
CREATE TABLE IF NOT EXISTS prometheus_sources (
id INT AUTO_INCREMENT PRIMARY KEY,
name VARCHAR(255) NOT NULL,
url VARCHAR(500) NOT NULL,
description TEXT,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci
`);
await connection.query(`
CREATE TABLE IF NOT EXISTS users (
id INT AUTO_INCREMENT PRIMARY KEY,
username VARCHAR(255) NOT NULL UNIQUE,
password VARCHAR(255) NOT NULL,
salt VARCHAR(255) NOT NULL,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci
`);
await connection.query(`
CREATE TABLE IF NOT EXISTS traffic_stats (
id INT AUTO_INCREMENT PRIMARY KEY,
rx_bytes BIGINT UNSIGNED DEFAULT 0,
tx_bytes BIGINT UNSIGNED DEFAULT 0,
rx_bandwidth DOUBLE DEFAULT 0,
tx_bandwidth DOUBLE DEFAULT 0,
timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
UNIQUE INDEX (timestamp)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci
`);
await connection.end();
// Save to .env
const envContent = `MYSQL_HOST=${host || 'localhost'}
MYSQL_PORT=${port || '3306'}
MYSQL_USER=${user || 'root'}
MYSQL_PASSWORD=${password || ''}
MYSQL_DATABASE=${dbName}
PORT=${process.env.PORT || 3000}
HOST=${process.env.HOST || '0.0.0.0'}
REFRESH_INTERVAL=${process.env.REFRESH_INTERVAL || 5000}
`;
fs.writeFileSync(path.join(__dirname, '..', '.env'), envContent);
// Update process.env
process.env.MYSQL_HOST = host;
process.env.MYSQL_PORT = port;
process.env.MYSQL_USER = user;
process.env.MYSQL_PASSWORD = password;
process.env.MYSQL_DATABASE = dbName;
// Re-initialize pool
db.initPool();
isDbInitialized = true;
res.json({ success: true, message: 'Initialization complete' });
} catch (err) {
console.error('Initialization error:', err);
res.status(500).json({ success: false, error: err.message });
}
});
// Setup Status Check
app.get('/api/setup/status', async (req, res) => {
try {
if (!isDbInitialized) {
return res.json({ initialized: false, step: 'db' });
}
const [rows] = await db.query('SELECT COUNT(*) as count FROM users');
if (rows[0].count === 0) {
return res.json({ initialized: true, needsAdmin: true, step: 'admin' });
}
res.json({ initialized: true, needsAdmin: false, step: 'prom' });
} catch (err) {
console.error('Status check error:', err);
res.json({ initialized: false, step: 'db' });
}
});
// Create First Admin
app.post('/api/setup/admin', async (req, res) => {
const { username, password } = req.body;
if (!username || !password) return res.status(400).json({ error: 'Username and password are required' });
try {
const [rows] = await db.query('SELECT COUNT(*) as count FROM users');
if (rows[0].count > 0) return res.status(403).json({ error: 'Admin already exists' });
const salt = crypto.randomBytes(16).toString('hex');
const hash = crypto.pbkdf2Sync(password, salt, 1000, 64, 'sha512').toString('hex');
await db.query('INSERT INTO users (username, password, salt) VALUES (?, ?, ?)', [username, hash, salt]);
res.json({ success: true, message: 'Admin account created' });
} catch (err) {
console.error('Admin creation error:', err);
res.status(500).json({ error: err.message });
}
});
// Middleware to protect routes & enforce setup
app.use(async (req, res, next) => {
// Allow system files and setup APIs
if (req.path.startsWith('/api/setup') || req.path === '/init.html' || req.path.startsWith('/css/') || req.path.startsWith('/js/') || req.path.startsWith('/fonts/')) {
return next();
}
// Enforce DB setup
if (!isDbInitialized) {
if (req.path.startsWith('/api/')) {
return res.status(503).json({ error: 'Database not initialized', needSetup: true });
}
return res.redirect('/init.html');
}
// Enforce User setup
try {
const [rows] = await db.query('SELECT COUNT(*) as count FROM users');
if (rows[0].count === 0) {
if (req.path.startsWith('/api/')) {
return res.status(503).json({ error: 'Admin not configured', needAdmin: true });
}
return res.redirect('/init.html?step=admin');
}
} catch (err) {
// If table doesn't exist, it's a DB initialization issue
}
if (req.path === '/init.html') {
return res.redirect('/');
}
next();
});
app.use(express.static(path.join(__dirname, '..', 'public')));
// ==================== Prometheus Source CRUD ====================
// Get all Prometheus sources
app.get('/api/sources', async (req, res) => {
try {
const [rows] = await db.query('SELECT * FROM prometheus_sources ORDER BY created_at DESC');
// Test connectivity for each source
const sourcesWithStatus = await Promise.all(rows.map(async (source) => {
try {
const response = await prometheusService.testConnection(source.url);
return { ...source, status: 'online', version: response };
} catch (e) {
return { ...source, status: 'offline', version: null };
}
}));
res.json(sourcesWithStatus);
} catch (err) {
console.error('Error fetching sources:', err);
res.status(500).json({ error: 'Failed to fetch sources' });
}
});
// Add a new Prometheus source
app.post('/api/sources', requireAuth, async (req, res) => {
let { name, url, description } = req.body;
if (!name || !url) {
return res.status(400).json({ error: 'Name and URL are required' });
}
if (!/^https?:\/\//i.test(url)) url = 'http://' + url;
try {
const [result] = await db.query(
'INSERT INTO prometheus_sources (name, url, description) VALUES (?, ?, ?)',
[name, url, description || '']
);
const [rows] = await db.query('SELECT * FROM prometheus_sources WHERE id = ?', [result.insertId]);
// Trigger history preloading in background
setImmediate(() => preloadSourceHistory(url));
res.status(201).json(rows[0]);
} catch (err) {
console.error('Error adding source:', err);
res.status(500).json({ error: 'Failed to add source' });
}
});
// Update a Prometheus source
app.put('/api/sources/:id', requireAuth, async (req, res) => {
let { name, url, description } = req.body;
if (url && !/^https?:\/\//i.test(url)) url = 'http://' + url;
try {
await db.query(
'UPDATE prometheus_sources SET name = ?, url = ?, description = ? WHERE id = ?',
[name, url, description || '', req.params.id]
);
const [rows] = await db.query('SELECT * FROM prometheus_sources WHERE id = ?', [req.params.id]);
res.json(rows[0]);
} catch (err) {
console.error('Error updating source:', err);
res.status(500).json({ error: 'Failed to update source' });
}
});
// Delete a Prometheus source
app.delete('/api/sources/:id', requireAuth, async (req, res) => {
try {
await db.query('DELETE FROM prometheus_sources WHERE id = ?', [req.params.id]);
res.json({ message: 'Source deleted' });
} catch (err) {
console.error('Error deleting source:', err);
res.status(500).json({ error: 'Failed to delete source' });
}
});
// Test connection to a Prometheus source
app.post('/api/sources/test', async (req, res) => {
let { url } = req.body;
if (url && !/^https?:\/\//i.test(url)) url = 'http://' + url;
try {
const version = await prometheusService.testConnection(url);
res.json({ status: 'ok', version });
} catch (err) {
res.status(400).json({ status: 'error', message: err.message });
}
});
// ==================== Metrics Aggregation ====================
// Get all aggregated metrics from all Prometheus sources
app.get('/api/metrics/overview', async (req, res) => {
try {
const [sources] = await db.query('SELECT * FROM prometheus_sources');
if (sources.length === 0) {
return res.json({
totalServers: 0,
cpu: { used: 0, total: 0, percent: 0 },
memory: { used: 0, total: 0, percent: 0 },
disk: { used: 0, total: 0, percent: 0 },
network: { total: 0, rx: 0, tx: 0 },
traffic24h: { rx: 0, tx: 0, total: 0 },
servers: []
});
}
const allMetrics = await Promise.all(sources.map(source =>
prometheusService.getOverviewMetrics(source.url, source.name).catch(err => {
console.error(`Error fetching metrics from ${source.name}:`, err.message);
return null;
})
));
const validMetrics = allMetrics.filter(m => m !== null);
// Aggregate across all sources
let totalServers = 0;
let cpuUsed = 0, cpuTotal = 0;
let memUsed = 0, memTotal = 0;
let diskUsed = 0, diskTotal = 0;
let netRx = 0, netTx = 0;
let traffic24hRx = 0, traffic24hTx = 0;
let allServers = [];
for (const m of validMetrics) {
totalServers += m.totalServers;
cpuUsed += m.cpu.used;
cpuTotal += m.cpu.total;
memUsed += m.memory.used;
memTotal += m.memory.total;
diskUsed += m.disk.used;
diskTotal += m.disk.total;
netRx += m.network.rx;
netTx += m.network.tx;
traffic24hRx += m.traffic24h.rx;
traffic24hTx += m.traffic24h.tx;
allServers = allServers.concat(m.servers);
}
// --- 24h Traffic from DB ---
try {
// Get the oldest record within 24h and the latest overall
const [oldest] = await db.query('SELECT rx_bytes, tx_bytes, timestamp FROM traffic_stats WHERE timestamp >= NOW() - INTERVAL 1 DAY ORDER BY timestamp ASC LIMIT 1');
const [latest] = await db.query('SELECT rx_bytes, tx_bytes, timestamp FROM traffic_stats ORDER BY timestamp DESC LIMIT 1');
if (oldest.length > 0 && latest.length > 0) {
// Calculate difference. Handle counter resets (though unlikely for aggregated total)
traffic24hRx = latest[0].rx_bytes > oldest[0].rx_bytes ? (latest[0].rx_bytes - oldest[0].rx_bytes) : 0;
traffic24hTx = latest[0].tx_bytes > oldest[0].tx_bytes ? (latest[0].tx_bytes - oldest[0].tx_bytes) : 0;
}
} catch (err) {
console.error('Error calculating 24h traffic from DB:', err);
// Fallback to what we got from Prometheus directly in each source if DB fails
}
res.json({
totalServers,
cpu: {
used: cpuUsed,
total: cpuTotal,
percent: cpuTotal > 0 ? (cpuUsed / cpuTotal * 100) : 0
},
memory: {
used: memUsed,
total: memTotal,
percent: memTotal > 0 ? (memUsed / memTotal * 100) : 0
},
disk: {
used: diskUsed,
total: diskTotal,
percent: diskTotal > 0 ? (diskUsed / diskTotal * 100) : 0
},
network: {
total: netRx + netTx,
rx: netRx,
tx: netTx
},
traffic24h: {
rx: traffic24hRx,
tx: traffic24hTx,
total: traffic24hRx + traffic24hTx
},
servers: allServers
});
} catch (err) {
console.error('Error fetching overview metrics:', err);
res.status(500).json({ error: 'Failed to fetch metrics' });
}
});
// Get network traffic history from DB (past 24h)
app.get('/api/metrics/network-history', async (req, res) => {
try {
const [rows] = await db.query('SELECT rx_bandwidth, tx_bandwidth, UNIX_TIMESTAMP(timestamp) as ts FROM traffic_stats WHERE timestamp >= NOW() - INTERVAL 1 DAY ORDER BY ts ASC');
if (rows.length === 0) {
return res.json({ timestamps: [], rx: [], tx: [] });
}
res.json({
timestamps: rows.map(r => r.ts * 1000),
rx: rows.map(r => r.rx_bandwidth),
tx: rows.map(r => r.tx_bandwidth)
});
} catch (err) {
console.error('Error fetching network history from DB:', err);
res.status(500).json({ error: 'Failed to fetch network history' });
}
});
// Get CPU usage history for sparklines
app.get('/api/metrics/cpu-history', async (req, res) => {
try {
const [sources] = await db.query('SELECT * FROM prometheus_sources');
if (sources.length === 0) {
return res.json({ timestamps: [], values: [] });
}
const allHistories = await Promise.all(sources.map(source =>
prometheusService.getCpuHistory(source.url).catch(err => {
console.error(`Error fetching CPU history from ${source.name}:`, err.message);
return null;
})
));
const validHistories = allHistories.filter(h => h !== null);
if (validHistories.length === 0) {
return res.json({ timestamps: [], values: [] });
}
const merged = prometheusService.mergeCpuHistories(validHistories);
res.json(merged);
} catch (err) {
console.error('Error fetching CPU history:', err);
res.status(500).json({ error: 'Failed to fetch CPU history' });
}
});
// SPA fallback
app.get('*', (req, res) => {
res.sendFile(path.join(__dirname, '..', 'public', 'index.html'));
});
// Preload history for all existing sources if table is empty
async function initialPreload() {
if (!isDbInitialized) return;
try {
const [stats] = await db.query('SELECT COUNT(*) as count FROM traffic_stats');
if (stats[0].count === 0) {
console.log('[Initial Preloader] Database empty, preloading history for all sources...');
const [sources] = await db.query('SELECT * FROM prometheus_sources');
for (const source of sources) {
await preloadSourceHistory(source.url);
}
}
} catch (err) {
console.error('[Initial Preloader] Error:', err);
}
}
// Preload history for a new source
async function preloadSourceHistory(url) {
if (!isDbInitialized) return;
console.log(`[History Preloader] Starting preloading for ${url}...`);
try {
const history = await prometheusService.getTrafficHistoryRange(url);
if (!history || history.length === 0) return;
for (const p of history) {
await db.query(`
INSERT INTO traffic_stats (timestamp, rx_bytes, tx_bytes, rx_bandwidth, tx_bandwidth)
VALUES (FROM_UNIXTIME(?), ?, ?, ?, ?)
ON DUPLICATE KEY UPDATE
rx_bytes = rx_bytes + VALUES(rx_bytes),
tx_bytes = tx_bytes + VALUES(tx_bytes),
rx_bandwidth = rx_bandwidth + VALUES(rx_bandwidth),
tx_bandwidth = tx_bandwidth + VALUES(tx_bandwidth)
`, [p.ts, Math.round(p.rxBytes), Math.round(p.txBytes), p.rxBW, p.txBW]);
}
console.log(`[History Preloader] Successfully preloaded ${history.length} points for ${url}.`);
} catch (err) {
console.error(`[History Preloader] Error preloading ${url}:`, err.message);
}
}
async function recordTrafficStats() {
if (!isDbInitialized) return;
try {
const [sources] = await db.query('SELECT * FROM prometheus_sources');
if (sources.length === 0) return;
let totalRxBytes = 0;
let totalTxBytes = 0;
let totalRxBandwidth = 0;
let totalTxBandwidth = 0;
const results = await Promise.all(sources.map(async source => {
try {
const [rxBytesRes, txBytesRes, rxBWRes, txBWRes] = await Promise.all([
prometheusService.query(source.url, 'sum(node_network_receive_bytes_total{device!~"lo|veth.*|docker.*|br-.*"})'),
prometheusService.query(source.url, 'sum(node_network_transmit_bytes_total{device!~"lo|veth.*|docker.*|br-.*"})'),
prometheusService.query(source.url, 'sum(rate(node_network_receive_bytes_total{device!~"lo|veth.*|docker.*|br-.*"}[5m]))'),
prometheusService.query(source.url, 'sum(rate(node_network_transmit_bytes_total{device!~"lo|veth.*|docker.*|br-.*"}[5m]))')
]);
return {
rxBytes: (rxBytesRes.length > 0) ? parseFloat(rxBytesRes[0].value[1]) : 0,
txBytes: (txBytesRes.length > 0) ? parseFloat(txBytesRes[0].value[1]) : 0,
rxBW: (rxBWRes.length > 0) ? parseFloat(rxBWRes[0].value[1]) : 0,
txBW: (txBWRes.length > 0) ? parseFloat(txBWRes[0].value[1]) : 0
};
} catch (e) {
return { rxBytes: 0, txBytes: 0, rxBW: 0, txBW: 0 };
}
}));
for (const r of results) {
totalRxBytes += r.rxBytes;
totalTxBytes += r.txBytes;
totalRxBandwidth += r.rxBW;
totalTxBandwidth += r.txBW;
}
if (totalRxBytes > 0 || totalTxBytes > 0) {
await db.query('INSERT INTO traffic_stats (rx_bytes, tx_bytes, rx_bandwidth, tx_bandwidth) VALUES (?, ?, ?, ?)', [
Math.round(totalRxBytes),
Math.round(totalTxBytes),
totalRxBandwidth,
totalTxBandwidth
]);
console.log(`[Traffic Recorder] Saved stats: BW_RX=${totalRxBandwidth}, BW_TX=${totalTxBandwidth}`);
}
} catch (err) {
console.error('[Traffic Recorder] Error recording stats:', err);
}
}
// Check if need to create traffic_stats table if db already initialized
async function ensureTrafficTable() {
if (!isDbInitialized) return;
try {
await db.query(`
CREATE TABLE IF NOT EXISTS traffic_stats (
id INT AUTO_INCREMENT PRIMARY KEY,
rx_bytes BIGINT UNSIGNED DEFAULT 0,
tx_bytes BIGINT UNSIGNED DEFAULT 0,
rx_bandwidth DOUBLE DEFAULT 0,
tx_bandwidth DOUBLE DEFAULT 0,
timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
UNIQUE INDEX (timestamp)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci
`);
// Add columns if missing for existing tables
try { await db.query('ALTER TABLE traffic_stats ADD COLUMN rx_bandwidth DOUBLE DEFAULT 0'); } catch(e) {}
try { await db.query('ALTER TABLE traffic_stats ADD COLUMN tx_bandwidth DOUBLE DEFAULT 0'); } catch(e) {}
try { await db.query('ALTER TABLE traffic_stats ADD UNIQUE INDEX (timestamp)'); } catch(e) {}
} catch (err) {}
}
ensureTrafficTable().then(() => {
initialPreload();
});
// Record traffic every 5 minutes
setInterval(recordTrafficStats, 5 * 60 * 1000);
// Initial record after a short delay
setTimeout(recordTrafficStats, 10000);
app.listen(PORT, HOST, () => {
console.log(`\n 🚀 Data Visualization Display Wall`);
console.log(` 📊 Server running at http://${HOST === '0.0.0.0' ? 'localhost' : HOST}:${PORT}`);
console.log(` ⚙️ Configure Prometheus sources at http://${HOST === '0.0.0.0' ? 'localhost' : HOST}:${PORT}/settings\n`);
});