修复安全和请求策略问题

This commit is contained in:
CN-JS-HuiBai
2026-04-14 16:56:46 +08:00
parent 5e9dac6197
commit 44843475c8
4 changed files with 110 additions and 30 deletions

View File

@@ -248,9 +248,16 @@
function init() { function init() {
try { try {
console.log('[Init] Start...'); console.log('[Init] Start...');
// Clear existing intervals to prevent duplication on re-init
if (backgroundIntervals && backgroundIntervals.length > 0) {
backgroundIntervals.forEach(clearInterval);
}
backgroundIntervals = [];
// Resource Gauges Time // Resource Gauges Time
updateGaugesTime(); updateGaugesTime();
setInterval(updateGaugesTime, 1000); backgroundIntervals.push(setInterval(updateGaugesTime, 1000));
// Initial footer year // Initial footer year
if (dom.copyrightYear) { if (dom.copyrightYear) {

View File

@@ -85,7 +85,18 @@ async function getLocation(target) {
// Secondary DB check with resolved IP // Secondary DB check with resolved IP
const [rows] = await db.query('SELECT * FROM server_locations WHERE ip = ?', [cleanIp]); const [rows] = await db.query('SELECT * FROM server_locations WHERE ip = ?', [cleanIp]);
if (rows.length > 0) { if (rows.length > 0) {
return normalizeGeo(rows[0]); const data = rows[0];
// Cache the domain mapping to avoid future DNS lookups
if (cleanTarget !== cleanIp) {
try {
await db.query(`
INSERT INTO server_locations (ip, country, country_name, region, city, latitude, longitude)
VALUES (?, ?, ?, ?, ?, ?, ?)
ON DUPLICATE KEY UPDATE last_updated = CURRENT_TIMESTAMP
`, [cleanTarget, data.country, data.country_name, data.region, data.city, data.latitude, data.longitude]);
} catch(e) {}
}
return normalizeGeo(data);
} }
} catch (err) { } catch (err) {
// Quiet DNS failure for tokens (legacy bug mitigation) // Quiet DNS failure for tokens (legacy bug mitigation)
@@ -145,6 +156,29 @@ async function getLocation(target) {
locationData.longitude locationData.longitude
]); ]);
// Cache the domain target as well if it differs from the resolved IP
if (cleanTarget !== cleanIp) {
await db.query(`
INSERT INTO server_locations (ip, country, country_name, region, city, latitude, longitude)
VALUES (?, ?, ?, ?, ?, ?, ?)
ON DUPLICATE KEY UPDATE
country = VALUES(country),
country_name = VALUES(country_name),
region = VALUES(region),
city = VALUES(city),
latitude = VALUES(latitude),
longitude = VALUES(longitude)
`, [
cleanTarget,
locationData.country,
locationData.country_name,
locationData.region,
locationData.city,
locationData.latitude,
locationData.longitude
]);
}
return locationData; return locationData;
} }
} catch (err) { } catch (err) {

View File

@@ -29,6 +29,7 @@ const PASSWORD_ITERATIONS = parseInt(process.env.PASSWORD_ITERATIONS, 10) || 210
const ALLOW_REMOTE_SETUP = process.env.ALLOW_REMOTE_SETUP === 'true'; const ALLOW_REMOTE_SETUP = process.env.ALLOW_REMOTE_SETUP === 'true';
const COOKIE_SECURE = process.env.COOKIE_SECURE === 'true'; const COOKIE_SECURE = process.env.COOKIE_SECURE === 'true';
const APP_SECRET = process.env.APP_SECRET || crypto.randomBytes(32).toString('hex'); const APP_SECRET = process.env.APP_SECRET || crypto.randomBytes(32).toString('hex');
process.env.APP_SECRET = APP_SECRET;
const RATE_LIMITS = { const RATE_LIMITS = {
login: { windowMs: 15 * 60 * 1000, max: 8 }, login: { windowMs: 15 * 60 * 1000, max: 8 },
setup: { windowMs: 10 * 60 * 1000, max: 20 } setup: { windowMs: 10 * 60 * 1000, max: 20 }
@@ -1063,34 +1064,61 @@ async function getOverview(force = false) {
const validMetrics = allMetrics.filter(m => m !== null); const validMetrics = allMetrics.filter(m => m !== null);
// Aggregate across all sources // Use Maps to deduplicate servers across multiple Prometheus sources
let totalServers = 0; const uniqueOverviewServers = new Map();
const uniqueDetailServers = new Map();
for (const m of validMetrics) {
if (m.isOverview) {
for (const s of m.servers) {
// originalInstance is the true IP/host before token masking
const key = `${s.originalInstance}::${s.job}`;
if (!uniqueOverviewServers.has(key)) {
uniqueOverviewServers.set(key, s);
} else if (s.up && !uniqueOverviewServers.get(key).up) {
// Prefer 'up' status if duplicate
uniqueOverviewServers.set(key, s);
}
}
}
if (m.isDetail) {
for (const s of m.servers) {
const key = `${s.originalInstance}::${s.job}`;
if (!uniqueDetailServers.has(key)) {
uniqueDetailServers.set(key, s);
} else if (s.up && !uniqueDetailServers.get(key).up) {
uniqueDetailServers.set(key, s);
}
}
}
}
const allOverviewServers = Array.from(uniqueOverviewServers.values());
const allDetailServers = Array.from(uniqueDetailServers.values());
// Aggregate across unique deduplicated servers
let totalServers = allOverviewServers.length;
let activeServers = 0; let activeServers = 0;
let cpuUsed = 0, cpuTotal = 0; let cpuUsed = 0, cpuTotal = 0;
let memUsed = 0, memTotal = 0; let memUsed = 0, memTotal = 0;
let diskUsed = 0, diskTotal = 0; let diskUsed = 0, diskTotal = 0;
let netRx = 0, netTx = 0; let netRx = 0, netTx = 0;
let traffic24hRx = 0, traffic24hTx = 0; let traffic24hRx = 0, traffic24hTx = 0;
let allServers = [];
for (const m of validMetrics) { for (const inst of allOverviewServers) {
if (m.isOverview) { if (inst.up) {
totalServers += m.totalServers; activeServers++;
activeServers += (m.activeServers !== undefined ? m.activeServers : m.totalServers); cpuUsed += (inst.cpuPercent / 100) * inst.cpuCores;
cpuUsed += m.cpu.used; cpuTotal += inst.cpuCores;
cpuTotal += m.cpu.total; memUsed += inst.memUsed;
memUsed += m.memory.used; memTotal += inst.memTotal;
memTotal += m.memory.total; diskUsed += inst.diskUsed;
diskUsed += m.disk.used; diskTotal += inst.diskTotal;
diskTotal += m.disk.total; netRx += inst.netRx || 0;
netRx += m.network.rx; netTx += inst.netTx || 0;
netTx += m.network.tx; traffic24hRx += inst.traffic24hRx || 0;
traffic24hRx += m.traffic24h.rx; traffic24hTx += inst.traffic24hTx || 0;
traffic24hTx += m.traffic24h.tx;
}
if (m.isDetail) {
allServers = allServers.concat(m.servers);
} }
} }
@@ -1122,12 +1150,12 @@ async function getOverview(force = false) {
tx: traffic24hTx, tx: traffic24hTx,
total: traffic24hRx + traffic24hTx total: traffic24hRx + traffic24hTx
}, },
servers: allServers servers: allDetailServers
}; };
// --- Add Geo Information to Servers --- // --- Add Geo Information to Servers ---
const geoServers = await Promise.all(overview.servers.map(async (server) => { const geoServers = await Promise.all(overview.servers.map(async (server) => {
const realInstance = server.originalInstance || prometheusService.resolveToken(server.instance); const realInstance = server.originalInstance || await prometheusService.resolveToken(server.instance);
// Helper to get host from instance (handles IPv6 with brackets, IPv4:port, etc.) // Helper to get host from instance (handles IPv6 with brackets, IPv4:port, etc.)
let cleanIp = realInstance; let cleanIp = realInstance;
if (cleanIp.startsWith('[')) { if (cleanIp.startsWith('[')) {

View File

@@ -1,6 +1,7 @@
const axios = require('axios'); const axios = require('axios');
const http = require('http'); const http = require('http');
const https = require('https'); const https = require('https');
const cache = require('./cache'); // <-- ADD
const QUERY_TIMEOUT = 10000; const QUERY_TIMEOUT = 10000;
@@ -10,7 +11,11 @@ const httpAgent = new http.Agent({ keepAlive: true });
const httpsAgent = new https.Agent({ keepAlive: true }); const httpsAgent = new https.Agent({ keepAlive: true });
const serverIdMap = new Map(); // token -> { instance, job, source, lastSeen } const serverIdMap = new Map(); // token -> { instance, job, source, lastSeen }
const SECRET = process.env.APP_SECRET || crypto.randomBytes(32).toString('hex');
function getSecret() {
// Use the env variable populated by index.js initialization
return process.env.APP_SECRET || 'fallback-secret-for-safety';
}
// Periodic cleanup of serverIdMap to prevent infinite growth // Periodic cleanup of serverIdMap to prevent infinite growth
setInterval(() => { setInterval(() => {
@@ -24,7 +29,7 @@ setInterval(() => {
}, 3600000); // Once per hour }, 3600000); // Once per hour
function getServerToken(instance, job, source) { function getServerToken(instance, job, source) {
const hash = crypto.createHmac('sha256', SECRET) const hash = crypto.createHmac('sha256', getSecret())
.update(`${instance}:${job}:${source}`) .update(`${instance}:${job}:${source}`)
.digest('hex') .digest('hex')
.substring(0, 16); .substring(0, 16);
@@ -246,6 +251,9 @@ async function getOverviewMetrics(url, sourceName) {
// Store mapping for detail queries // Store mapping for detail queries
serverIdMap.set(token, { instance: originalInstance, source: sourceName, job, lastSeen: Date.now() }); serverIdMap.set(token, { instance: originalInstance, source: sourceName, job, lastSeen: Date.now() });
// Also store in Valkey for resilience across restarts
cache.set(`server_token:${token}`, originalInstance, 86400).catch(()=>{});
if (!instances.has(token)) { if (!instances.has(token)) {
instances.set(token, { instances.set(token, {
instance: token, // This is the masked IP SENT TO FRONTEND instance: token, // This is the masked IP SENT TO FRONTEND
@@ -559,10 +567,13 @@ function mergeCpuHistories(histories) {
} }
function resolveToken(token) { async function resolveToken(token) {
if (serverIdMap.has(token)) { if (serverIdMap.has(token)) {
return serverIdMap.get(token).instance; return serverIdMap.get(token).instance;
} }
const cachedInstance = await cache.get(`server_token:${token}`);
if (cachedInstance) return cachedInstance;
return token; return token;
} }
@@ -571,7 +582,7 @@ function resolveToken(token) {
*/ */
async function getServerDetails(baseUrl, instance, job, settings = {}) { async function getServerDetails(baseUrl, instance, job, settings = {}) {
const url = normalizeUrl(baseUrl); const url = normalizeUrl(baseUrl);
const node = resolveToken(instance); const node = await resolveToken(instance);
// Queries based on the requested dashboard structure // Queries based on the requested dashboard structure
const queries = { const queries = {
@@ -735,7 +746,7 @@ async function getServerDetails(baseUrl, instance, job, settings = {}) {
*/ */
async function getServerHistory(baseUrl, instance, job, metric, range = '1h', start = null, end = null, p95Type = 'tx') { async function getServerHistory(baseUrl, instance, job, metric, range = '1h', start = null, end = null, p95Type = 'tx') {
const url = normalizeUrl(baseUrl); const url = normalizeUrl(baseUrl);
const node = resolveToken(instance); const node = await resolveToken(instance);
// CPU Busy history: 100 - idle // CPU Busy history: 100 - idle
if (metric === 'cpuBusy') { if (metric === 'cpuBusy') {