优化数据库稳定性

This commit is contained in:
CN-JS-HuiBai
2026-04-11 18:14:04 +08:00
parent a876c854f4
commit d7ac1bedb4
3 changed files with 79 additions and 26 deletions

View File

@@ -28,6 +28,7 @@ const SESSION_TTL_SECONDS = parseInt(process.env.SESSION_TTL_SECONDS, 10) || 864
const PASSWORD_ITERATIONS = parseInt(process.env.PASSWORD_ITERATIONS, 10) || 210000;
const ALLOW_REMOTE_SETUP = process.env.ALLOW_REMOTE_SETUP === 'true';
const COOKIE_SECURE = process.env.COOKIE_SECURE === 'true';
const APP_SECRET = process.env.APP_SECRET || crypto.randomBytes(32).toString('hex');
const RATE_LIMITS = {
login: { windowMs: 15 * 60 * 1000, max: 8 },
setup: { windowMs: 10 * 60 * 1000, max: 20 }
@@ -619,6 +620,7 @@ COOKIE_SECURE=${process.env.COOKIE_SECURE || 'false'}
SESSION_TTL_SECONDS=${process.env.SESSION_TTL_SECONDS || SESSION_TTL_SECONDS}
PASSWORD_ITERATIONS=${process.env.PASSWORD_ITERATIONS || PASSWORD_ITERATIONS}
ENABLE_EXTERNAL_GEO_LOOKUP=${process.env.ENABLE_EXTERNAL_GEO_LOOKUP || 'false'}
APP_SECRET=${process.env.APP_SECRET || APP_SECRET}
`;
fs.writeFileSync(path.join(__dirname, '..', '.env'), envContent);
@@ -1384,6 +1386,8 @@ app.get('/api/metrics/latency', async (req, res) => {
const server = http.createServer(app);
const wss = new WebSocket.Server({ server });
let isBroadcastRunning = false;
let cachedLatencyRoutes = null;
let lastRoutesUpdate = 0;
function broadcast(data) {
const message = JSON.stringify(data);
@@ -1394,21 +1398,25 @@ function broadcast(data) {
});
}
// Broadcast loop
async function broadcastMetrics() {
if (isBroadcastRunning) return;
isBroadcastRunning = true;
try {
const overview = await getOverview();
// Also include latencies in the broadcast to make map lines real-time
const [routes] = await db.query(`
SELECT r.*, s.url, s.type as source_type
FROM latency_routes r
JOIN prometheus_sources s ON r.source_id = s.id
`);
// Refresh routes list every 60 seconds or if it hasn't been fetched yet
const now = Date.now();
if (!cachedLatencyRoutes || now - lastRoutesUpdate > 60000) {
const [routes] = await db.query(`
SELECT r.*, s.url, s.type as source_type
FROM latency_routes r
JOIN prometheus_sources s ON r.source_id = s.id
`);
cachedLatencyRoutes = routes;
lastRoutesUpdate = now;
}
const latencyResults = await Promise.all(routes.map(async (route) => {
const latencyResults = await Promise.all(cachedLatencyRoutes.map(async (route) => {
let latency = await cache.get(`latency:route:${route.id}`);
if (latency === null && route.source_type === 'prometheus') {
latency = await prometheusService.getLatency(route.url, route.latency_target);
@@ -1478,6 +1486,16 @@ async function start() {
}
}, 3600000); // Once per hour
// Periodic cleanup of sessions Map to prevent memory growth
setInterval(() => {
const now = Date.now();
for (const [sessionId, session] of sessions.entries()) {
if (session.expiresAt && session.expiresAt <= now) {
sessions.delete(sessionId);
}
}
}, 300000); // Once every 5 minutes
server.listen(PORT, HOST, () => {
console.log(`\n 🚀 Data Visualization Display Wall (WebSocket Enabled)`);
console.log(` 📊 Server running at http://${HOST === '0.0.0.0' ? 'localhost' : HOST}:${PORT}`);

View File

@@ -876,10 +876,8 @@ module.exports = {
getLatency: async (blackboxUrl, target) => {
if (!blackboxUrl || !target) return null;
try {
const normalized = blackboxUrl.trim().replace(/\/+$/, '');
const normalized = normalizeUrl(blackboxUrl);
// Construct a single optimized query searching for priority metrics and common labels
// Prioritize probe_icmp_duration_seconds OVER probe_duration_seconds
const queryExpr = `(
probe_icmp_duration_seconds{phase="rtt", instance="${target}"} or
probe_icmp_duration_seconds{phase="rtt", target="${target}"} or
@@ -891,14 +889,9 @@ module.exports = {
probe_duration_seconds{target="${target}"}
)`;
const params = new URLSearchParams({ query: queryExpr });
const res = await fetch(`${normalized}/api/v1/query?${params.toString()}`);
if (res.ok) {
const data = await res.json();
if (data.status === 'success' && data.data.result.length > 0) {
return parseFloat(data.data.result[0].value[1]) * 1000;
}
const result = await query(normalized, queryExpr);
if (result && result.length > 0) {
return parseFloat(result[0].value[1]) * 1000;
}
return null;
} catch (err) {