修改请求数据库方式
This commit is contained in:
@@ -314,9 +314,6 @@ app.post('/api/sources', requireAuth, async (req, res) => {
|
||||
);
|
||||
const [rows] = await db.query('SELECT * FROM prometheus_sources WHERE id = ?', [result.insertId]);
|
||||
|
||||
// Trigger history preloading in background
|
||||
setImmediate(() => preloadSourceHistory(url));
|
||||
|
||||
res.status(201).json(rows[0]);
|
||||
} catch (err) {
|
||||
console.error('Error adding source:', err);
|
||||
@@ -560,47 +557,6 @@ app.get('*', (req, res) => {
|
||||
res.sendFile(path.join(__dirname, '..', 'public', 'index.html'));
|
||||
});
|
||||
|
||||
// Preload history for all existing sources if table is empty
|
||||
async function initialPreload() {
|
||||
if (!isDbInitialized) return;
|
||||
try {
|
||||
const [stats] = await db.query('SELECT COUNT(*) as count FROM traffic_stats');
|
||||
if (stats[0].count === 0) {
|
||||
console.log('[Initial Preloader] Database empty, preloading history for all sources...');
|
||||
const [sources] = await db.query('SELECT * FROM prometheus_sources');
|
||||
for (const source of sources) {
|
||||
await preloadSourceHistory(source.url);
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
console.error('[Initial Preloader] Error:', err);
|
||||
}
|
||||
}
|
||||
|
||||
// Preload history for a new source
|
||||
async function preloadSourceHistory(url) {
|
||||
if (!isDbInitialized) return;
|
||||
console.log(`[History Preloader] Starting preloading for ${url}...`);
|
||||
try {
|
||||
const history = await prometheusService.getTrafficHistoryRange(url);
|
||||
if (!history || history.length === 0) return;
|
||||
|
||||
for (const p of history) {
|
||||
await db.query(`
|
||||
INSERT INTO traffic_stats (timestamp, rx_bytes, tx_bytes, rx_bandwidth, tx_bandwidth)
|
||||
VALUES (FROM_UNIXTIME(?), ?, ?, ?, ?)
|
||||
ON DUPLICATE KEY UPDATE
|
||||
rx_bytes = rx_bytes + VALUES(rx_bytes),
|
||||
tx_bytes = tx_bytes + VALUES(tx_bytes),
|
||||
rx_bandwidth = rx_bandwidth + VALUES(rx_bandwidth),
|
||||
tx_bandwidth = tx_bandwidth + VALUES(tx_bandwidth)
|
||||
`, [p.ts, Math.round(p.rxBytes), Math.round(p.txBytes), p.rxBW, p.txBW]);
|
||||
}
|
||||
console.log(`[History Preloader] Successfully preloaded ${history.length} points for ${url}.`);
|
||||
} catch (err) {
|
||||
console.error(`[History Preloader] Error preloading ${url}:`, err.message);
|
||||
}
|
||||
}
|
||||
|
||||
async function recordTrafficStats() {
|
||||
if (!isDbInitialized) return;
|
||||
@@ -655,9 +611,7 @@ async function recordTrafficStats() {
|
||||
}
|
||||
|
||||
// Check and fix database integrity on startup
|
||||
checkAndFixDatabase().then(() => {
|
||||
initialPreload();
|
||||
});
|
||||
checkAndFixDatabase();
|
||||
|
||||
// Record traffic every 5 seconds (17,280 points/day)
|
||||
setInterval(recordTrafficStats, 5 * 1000);
|
||||
|
||||
@@ -411,50 +411,6 @@ function mergeCpuHistories(histories) {
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Get aggregated traffic history range for preloading (past 24h, 5-min intervals)
|
||||
*/
|
||||
async function getTrafficHistoryRange(url) {
|
||||
const now = Math.floor(Date.now() / 1000);
|
||||
const start = now - 86400; // 24h ago
|
||||
const step = 5; // 5 seconds (17,280 points for 24h)
|
||||
|
||||
const queries = [
|
||||
'sum(node_network_receive_bytes_total{device!~"lo|veth.*|docker.*|br-.*"})',
|
||||
'sum(node_network_transmit_bytes_total{device!~"lo|veth.*|docker.*|br-.*"})',
|
||||
'sum(rate(node_network_receive_bytes_total{device!~"lo|veth.*|docker.*|br-.*"}[5m]))',
|
||||
'sum(rate(node_network_transmit_bytes_total{device!~"lo|veth.*|docker.*|br-.*"}[5m]))'
|
||||
];
|
||||
|
||||
const results = await Promise.all(queries.map(q =>
|
||||
queryRange(url, q, start, now, step).catch(() => [])
|
||||
));
|
||||
|
||||
const rxBytesRes = results[0];
|
||||
const txBytesRes = results[1];
|
||||
const rxBWRes = results[2];
|
||||
const txBWRes = results[3];
|
||||
|
||||
// Map results by timestamp
|
||||
const dataMap = new Map();
|
||||
|
||||
const process = (res, field) => {
|
||||
if (res.length > 0 && res[0].values) {
|
||||
for (const [ts, val] of res[0].values) {
|
||||
const entry = dataMap.get(ts) || { ts, rxBytes: 0, txBytes: 0, rxBW: 0, txBW: 0 };
|
||||
entry[field] = parseFloat(val) || 0;
|
||||
dataMap.set(ts, entry);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
process(rxBytesRes, 'rxBytes');
|
||||
process(txBytesRes, 'txBytes');
|
||||
process(rxBWRes, 'rxBW');
|
||||
process(txBWRes, 'txBW');
|
||||
|
||||
return Array.from(dataMap.values());
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
testConnection,
|
||||
@@ -464,6 +420,5 @@ module.exports = {
|
||||
getNetworkHistory,
|
||||
mergeNetworkHistories,
|
||||
getCpuHistory,
|
||||
mergeCpuHistories,
|
||||
getTrafficHistoryRange
|
||||
mergeCpuHistories
|
||||
};
|
||||
|
||||
Reference in New Issue
Block a user