修复项目逻辑错误
新增数据库检查
This commit is contained in:
@@ -9,14 +9,30 @@ const crypto = require('crypto');
|
||||
const httpAgent = new http.Agent({ keepAlive: true });
|
||||
const httpsAgent = new https.Agent({ keepAlive: true });
|
||||
|
||||
const serverIdMap = new Map(); // token -> { instance, job, source }
|
||||
const serverIdMap = new Map(); // token -> { instance, job, source, lastSeen }
|
||||
const SECRET = process.env.APP_SECRET || crypto.randomBytes(32).toString('hex');
|
||||
|
||||
// Periodic cleanup of serverIdMap to prevent infinite growth
|
||||
setInterval(() => {
|
||||
const now = Date.now();
|
||||
const TTL = 24 * 60 * 60 * 1000; // 24 hours
|
||||
for (const [token, data] of serverIdMap.entries()) {
|
||||
if (now - (data.lastSeen || 0) > TTL) {
|
||||
serverIdMap.delete(token);
|
||||
}
|
||||
}
|
||||
}, 3600000); // Once per hour
|
||||
|
||||
function getServerToken(instance, job, source) {
|
||||
const hash = crypto.createHmac('sha256', SECRET)
|
||||
.update(`${instance}:${job}:${source}`)
|
||||
.digest('hex')
|
||||
.substring(0, 16);
|
||||
|
||||
// Update lastSeen timestamp
|
||||
const data = serverIdMap.get(hash);
|
||||
if (data) data.lastSeen = Date.now();
|
||||
|
||||
return hash;
|
||||
}
|
||||
|
||||
@@ -222,7 +238,7 @@ async function getOverviewMetrics(url, sourceName) {
|
||||
const token = getServerToken(originalInstance, job, sourceName);
|
||||
|
||||
// Store mapping for detail queries
|
||||
serverIdMap.set(token, { instance: originalInstance, source: sourceName, job });
|
||||
serverIdMap.set(token, { instance: originalInstance, source: sourceName, job, lastSeen: Date.now() });
|
||||
|
||||
if (!instances.has(token)) {
|
||||
instances.set(token, {
|
||||
@@ -582,76 +598,85 @@ async function getServerDetails(baseUrl, instance, job, settings = {}) {
|
||||
|
||||
await Promise.all(queryPromises);
|
||||
|
||||
// Add IP information
|
||||
// Process custom metrics from settings
|
||||
results.custom_data = [];
|
||||
try {
|
||||
let foundIp = false;
|
||||
const customMetrics = typeof settings.custom_metrics === 'string'
|
||||
? JSON.parse(settings.custom_metrics)
|
||||
: (settings.custom_metrics || []);
|
||||
|
||||
// 1. Try Custom Node Exporter Metric if configured
|
||||
if (settings.ip_metric_name) {
|
||||
try {
|
||||
const expr = `${settings.ip_metric_name}{instance="${node}",job="${job}"}`;
|
||||
const res = await query(url, expr);
|
||||
if (res && res.length > 0) {
|
||||
const address = res[0].metric[settings.ip_label_name || 'address'];
|
||||
if (address) {
|
||||
if (address.includes(':')) {
|
||||
results.ipv6 = [address];
|
||||
results.ipv4 = [];
|
||||
} else {
|
||||
results.ipv4 = [address];
|
||||
results.ipv6 = [];
|
||||
if (Array.isArray(customMetrics) && customMetrics.length > 0) {
|
||||
const customPromises = customMetrics.map(async (cfg) => {
|
||||
if (!cfg.metric) return null;
|
||||
try {
|
||||
const expr = `${cfg.metric}{instance="${node}",job="${job}"}`;
|
||||
const res = await query(url, expr);
|
||||
if (res && res.length > 0) {
|
||||
const val = res[0].metric[cfg.label || 'address'] || res[0].value[1];
|
||||
|
||||
// If this metric is marked as an IP source, update the main IP fields
|
||||
if (cfg.is_ip && !results.ipv4?.length && !results.ipv6?.length) {
|
||||
if (val.includes(':')) {
|
||||
results.ipv6 = [val];
|
||||
results.ipv4 = [];
|
||||
} else {
|
||||
results.ipv4 = [val];
|
||||
results.ipv6 = [];
|
||||
}
|
||||
}
|
||||
foundIp = true;
|
||||
|
||||
return {
|
||||
name: cfg.name || cfg.metric,
|
||||
value: val
|
||||
};
|
||||
}
|
||||
} catch (e) {
|
||||
console.error(`[Prometheus] Custom metric error (${cfg.metric}):`, e.message);
|
||||
}
|
||||
} catch (e) {
|
||||
console.error(`[Prometheus] Error querying custom IP metric ${settings.ip_metric_name}:`, e.message);
|
||||
}
|
||||
}
|
||||
|
||||
// 2. Fallback to Prometheus Targets API
|
||||
if (!foundIp) {
|
||||
try {
|
||||
const targets = await getTargets(baseUrl);
|
||||
const matchedTarget = targets.find(t => t.labels && t.labels.instance === node && t.labels.job === job);
|
||||
if (matchedTarget) {
|
||||
const scrapeUrl = matchedTarget.scrapeUrl || '';
|
||||
try {
|
||||
const urlObj = new URL(scrapeUrl);
|
||||
const host = urlObj.hostname;
|
||||
if (host.includes(':')) {
|
||||
results.ipv6 = [host];
|
||||
results.ipv4 = [];
|
||||
} else {
|
||||
results.ipv4 = [host];
|
||||
results.ipv6 = [];
|
||||
}
|
||||
foundIp = true;
|
||||
} catch (e) {
|
||||
// Simple fallback if URL parsing fails
|
||||
const host = scrapeUrl.split('//').pop().split('/')[0].split(':')[0];
|
||||
if (host) {
|
||||
results.ipv4 = [host];
|
||||
results.ipv6 = [];
|
||||
foundIp = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
console.error(`[Prometheus] Error fetching target info for ${node}:`, e.message);
|
||||
}
|
||||
}
|
||||
|
||||
if (!foundIp) {
|
||||
results.ipv4 = [];
|
||||
results.ipv6 = [];
|
||||
return null;
|
||||
});
|
||||
|
||||
const customResults = await Promise.all(customPromises);
|
||||
results.custom_data = customResults.filter(r => r !== null);
|
||||
}
|
||||
} catch (err) {
|
||||
console.error(`[Prometheus] Critical error resolving IPs for ${node}:`, err.message);
|
||||
results.ipv4 = results.ipv4 || [];
|
||||
results.ipv6 = results.ipv6 || [];
|
||||
console.error('[Prometheus] Error processing custom metrics:', err.message);
|
||||
}
|
||||
|
||||
// Ensure IP discovery fallback if no custom IP metric found
|
||||
if ((!results.ipv4 || results.ipv4.length === 0) && (!results.ipv6 || results.ipv6.length === 0)) {
|
||||
try {
|
||||
const targets = await getTargets(baseUrl);
|
||||
const matchedTarget = targets.find(t => t.labels && t.labels.instance === node && t.labels.job === job);
|
||||
if (matchedTarget) {
|
||||
const scrapeUrl = matchedTarget.scrapeUrl || '';
|
||||
try {
|
||||
const urlObj = new URL(scrapeUrl);
|
||||
const host = urlObj.hostname;
|
||||
if (host.includes(':')) {
|
||||
results.ipv6 = [host];
|
||||
results.ipv4 = [];
|
||||
} else {
|
||||
results.ipv4 = [host];
|
||||
results.ipv6 = [];
|
||||
}
|
||||
} catch (e) {
|
||||
const host = scrapeUrl.split('//').pop().split('/')[0].split(':')[0];
|
||||
if (host) {
|
||||
results.ipv4 = [host];
|
||||
results.ipv6 = [];
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
console.error(`[Prometheus] Target fallback error for ${node}:`, e.message);
|
||||
}
|
||||
}
|
||||
|
||||
// Final sanitization
|
||||
results.ipv4 = results.ipv4 || [];
|
||||
results.ipv6 = results.ipv6 || [];
|
||||
|
||||
// Group partitions
|
||||
const partitionsMap = {};
|
||||
(results.partitions_size || []).forEach(p => {
|
||||
|
||||
Reference in New Issue
Block a user