修复数据不显示的错误
This commit is contained in:
@@ -152,7 +152,8 @@ app.post('/api/setup/init', async (req, res) => {
|
||||
tx_bytes BIGINT UNSIGNED DEFAULT 0,
|
||||
rx_bandwidth DOUBLE DEFAULT 0,
|
||||
tx_bandwidth DOUBLE DEFAULT 0,
|
||||
timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
UNIQUE INDEX (timestamp)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci
|
||||
`);
|
||||
|
||||
@@ -296,6 +297,10 @@ app.post('/api/sources', requireAuth, async (req, res) => {
|
||||
[name, url, description || '']
|
||||
);
|
||||
const [rows] = await db.query('SELECT * FROM prometheus_sources WHERE id = ?', [result.insertId]);
|
||||
|
||||
// Trigger history preloading in background
|
||||
setImmediate(() => preloadSourceHistory(url));
|
||||
|
||||
res.status(201).json(rows[0]);
|
||||
} catch (err) {
|
||||
console.error('Error adding source:', err);
|
||||
@@ -498,6 +503,48 @@ app.get('*', (req, res) => {
|
||||
res.sendFile(path.join(__dirname, '..', 'public', 'index.html'));
|
||||
});
|
||||
|
||||
// Preload history for all existing sources if table is empty
|
||||
async function initialPreload() {
|
||||
if (!isDbInitialized) return;
|
||||
try {
|
||||
const [stats] = await db.query('SELECT COUNT(*) as count FROM traffic_stats');
|
||||
if (stats[0].count === 0) {
|
||||
console.log('[Initial Preloader] Database empty, preloading history for all sources...');
|
||||
const [sources] = await db.query('SELECT * FROM prometheus_sources');
|
||||
for (const source of sources) {
|
||||
await preloadSourceHistory(source.url);
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
console.error('[Initial Preloader] Error:', err);
|
||||
}
|
||||
}
|
||||
|
||||
// Preload history for a new source
|
||||
async function preloadSourceHistory(url) {
|
||||
if (!isDbInitialized) return;
|
||||
console.log(`[History Preloader] Starting preloading for ${url}...`);
|
||||
try {
|
||||
const history = await prometheusService.getTrafficHistoryRange(url);
|
||||
if (!history || history.length === 0) return;
|
||||
|
||||
for (const p of history) {
|
||||
await db.query(`
|
||||
INSERT INTO traffic_stats (timestamp, rx_bytes, tx_bytes, rx_bandwidth, tx_bandwidth)
|
||||
VALUES (FROM_UNIXTIME(?), ?, ?, ?, ?)
|
||||
ON DUPLICATE KEY UPDATE
|
||||
rx_bytes = rx_bytes + VALUES(rx_bytes),
|
||||
tx_bytes = tx_bytes + VALUES(tx_bytes),
|
||||
rx_bandwidth = rx_bandwidth + VALUES(rx_bandwidth),
|
||||
tx_bandwidth = tx_bandwidth + VALUES(tx_bandwidth)
|
||||
`, [p.ts, Math.round(p.rxBytes), Math.round(p.txBytes), p.rxBW, p.txBW]);
|
||||
}
|
||||
console.log(`[History Preloader] Successfully preloaded ${history.length} points for ${url}.`);
|
||||
} catch (err) {
|
||||
console.error(`[History Preloader] Error preloading ${url}:`, err.message);
|
||||
}
|
||||
}
|
||||
|
||||
async function recordTrafficStats() {
|
||||
if (!isDbInitialized) return;
|
||||
try {
|
||||
@@ -561,16 +608,20 @@ async function ensureTrafficTable() {
|
||||
tx_bytes BIGINT UNSIGNED DEFAULT 0,
|
||||
rx_bandwidth DOUBLE DEFAULT 0,
|
||||
tx_bandwidth DOUBLE DEFAULT 0,
|
||||
timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
UNIQUE INDEX (timestamp)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci
|
||||
`);
|
||||
// Add columns if missing for existing tables
|
||||
try { await db.query('ALTER TABLE traffic_stats ADD COLUMN rx_bandwidth DOUBLE DEFAULT 0'); } catch(e) {}
|
||||
try { await db.query('ALTER TABLE traffic_stats ADD COLUMN tx_bandwidth DOUBLE DEFAULT 0'); } catch(e) {}
|
||||
try { await db.query('ALTER TABLE traffic_stats ADD UNIQUE INDEX (timestamp)'); } catch(e) {}
|
||||
} catch (err) {}
|
||||
}
|
||||
|
||||
ensureTrafficTable();
|
||||
ensureTrafficTable().then(() => {
|
||||
initialPreload();
|
||||
});
|
||||
|
||||
// Record traffic every 5 minutes
|
||||
setInterval(recordTrafficStats, 5 * 60 * 1000);
|
||||
|
||||
@@ -402,6 +402,51 @@ function mergeCpuHistories(histories) {
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Get aggregated traffic history range for preloading (past 24h, 5-min intervals)
|
||||
*/
|
||||
async function getTrafficHistoryRange(url) {
|
||||
const now = Math.floor(Date.now() / 1000);
|
||||
const start = now - 86400; // 24h ago
|
||||
const step = 300; // 5 minutes
|
||||
|
||||
const queries = [
|
||||
'sum(node_network_receive_bytes_total{device!~"lo|veth.*|docker.*|br-.*"})',
|
||||
'sum(node_network_transmit_bytes_total{device!~"lo|veth.*|docker.*|br-.*"})',
|
||||
'sum(rate(node_network_receive_bytes_total{device!~"lo|veth.*|docker.*|br-.*"}[5m]))',
|
||||
'sum(rate(node_network_transmit_bytes_total{device!~"lo|veth.*|docker.*|br-.*"}[5m]))'
|
||||
];
|
||||
|
||||
const results = await Promise.all(queries.map(q =>
|
||||
queryRange(url, q, start, now, step).catch(() => [])
|
||||
));
|
||||
|
||||
const rxBytesRes = results[0];
|
||||
const txBytesRes = results[1];
|
||||
const rxBWRes = results[2];
|
||||
const txBWRes = results[3];
|
||||
|
||||
// Map results by timestamp
|
||||
const dataMap = new Map();
|
||||
|
||||
const process = (res, field) => {
|
||||
if (res.length > 0 && res[0].values) {
|
||||
for (const [ts, val] of res[0].values) {
|
||||
const entry = dataMap.get(ts) || { ts, rxBytes: 0, txBytes: 0, rxBW: 0, txBW: 0 };
|
||||
entry[field] = parseFloat(val) || 0;
|
||||
dataMap.set(ts, entry);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
process(rxBytesRes, 'rxBytes');
|
||||
process(txBytesRes, 'txBytes');
|
||||
process(rxBWRes, 'rxBW');
|
||||
process(txBWRes, 'txBW');
|
||||
|
||||
return Array.from(dataMap.values());
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
testConnection,
|
||||
query,
|
||||
@@ -410,5 +455,6 @@ module.exports = {
|
||||
getNetworkHistory,
|
||||
mergeNetworkHistories,
|
||||
getCpuHistory,
|
||||
mergeCpuHistories
|
||||
mergeCpuHistories,
|
||||
getTrafficHistoryRange
|
||||
};
|
||||
|
||||
Reference in New Issue
Block a user