Skip to content

Commit

Permalink
Commit Message:
Browse files Browse the repository at this point in the history
Enhance Hashrate Calculation Logic with Sliding Window Approach

Description:
This commit introduces a refined Hashrate calculation mechanism to provide more accurate real-time metrics for the Kaspa mining pool. The changes involve implementing a sliding window approach to track and calculate Hashrate based on the most recent set of shares, thereby addressing potential inaccuracies caused by long-term averaging.

Changes:

Update WorkerStats Structure:

Added a recentShares array to the WorkerStats interface to store the timestamp and difficulty of each share found by a miner.
Refine addShare Method:

Modified the addShare function in SharesManager to record each share's timestamp and difficulty in the recentShares array.
Introduced logic to filter out old shares that fall outside the defined sliding window (10 minutes).
Implement Sliding Window Hashrate Calculation:

Updated the calcHashRates method to compute the Hashrate using only the shares within the sliding window.
Calculated the average difficulty over the relevant shares and the time difference from the first share in the window to determine the Hashrate.
Updated the Prometheus metrics (minerHashRateGauge and poolHashRateGauge) with the new Hashrate values.
Ensure Prometheus Compatibility:

Integrated the refined Hashrate calculation seamlessly with existing Prometheus metrics, allowing for accurate real-time Hashrate reporting and long-term trend analysis.
Benefits:

Provides more accurate and responsive Hashrate measurements.
Enhances real-time monitoring capabilities, better reflecting the miners' current performance.
Prepares the system for long-term trend analysis through Prometheus, leveraging accurate real-time data.
Testing:

Ensure that the changes have been tested in a development environment.
Monitor Hashrate metrics post-deployment to validate the improvements.
  • Loading branch information
argonmining committed Aug 10, 2024
1 parent 94cfd9d commit 584dd67
Showing 1 changed file with 25 additions and 10 deletions.
35 changes: 25 additions & 10 deletions src/stratum/sharesManager.ts
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ export interface WorkerStats {
varDiffSharesFound: number;
varDiffWindow: number;
minDiff: number;
recentShares: { timestamp: number, difficulty: number }[];
}

type MinerData = {
Expand Down Expand Up @@ -74,7 +75,8 @@ export class SharesManager {
varDiffStartTime: Date.now(),
varDiffSharesFound: 0,
varDiffWindow: 0,
minDiff: 1 // Set to initial difficulty
minDiff: 1, // Set to initial difficulty
recentShares: [] // Initialize recentShares array
};
minerData.workerStats = workerStats;
if (DEBUG) this.monitoring.debug(`SharesManager: Created new worker stats for ${workerName}`);
Expand Down Expand Up @@ -111,16 +113,22 @@ export class SharesManager {
varDiffStartTime: Date.now(),
varDiffSharesFound: 0,
varDiffWindow: 0,
minDiff: currentDifficulty
minDiff: currentDifficulty,
recentShares: [] // Initialize recentShares array
}
});
} else {
minerData.workerStats.sharesFound++;
minerData.workerStats.varDiffSharesFound++;
minerData.workerStats.lastShare = timestamp;
minerData.workerStats.minDiff = currentDifficulty;
minerData.workerStats.recentShares.push({ timestamp: Date.now(), difficulty: currentDifficulty }); // Add the share to recentShares
}

// Clean up old shares outside the window
const windowSize = 10 * 60 * 1000; // 10 minutes window
minerData!.workerStats.recentShares = minerData!.workerStats.recentShares.filter(share => Date.now() - share.timestamp <= windowSize);

if (this.contributions.has(nonce)) {
metrics.updateGaugeInc(minerDuplicatedShares, [minerId, address]);
throw Error('Duplicate share');
Expand Down Expand Up @@ -192,20 +200,27 @@ export class SharesManager {

calcHashRates() {
let totalHashRate = 0;
const windowSize = 10 * 60 * 1000; // 10 minutes window

this.miners.forEach((minerData, address) => {
const timeDifference = (Date.now() - minerData.workerStats.startTime) / 1000; // Convert to seconds
const workerStats = minerData.workerStats;
const workerHashRate = (workerStats.minDiff * workerStats.varDiffSharesFound) / timeDifference;
metrics.updateGaugeValue(minerHashRateGauge, [minerData.workerStats.workerName, address], workerHashRate);
totalHashRate += workerHashRate;
if (DEBUG) this.monitoring.debug(`SharesManager: Worker ${workerStats.workerName} stats - Time: ${timeDifference}s, Difficulty: ${workerStats.minDiff}, HashRate: ${workerHashRate}H/s, SharesFound: ${workerStats.sharesFound}, StaleShares: ${workerStats.staleShares}, InvalidShares: ${workerStats.invalidShares}`);
const now = Date.now();
const relevantShares = minerData.workerStats.recentShares.filter(share => now - share.timestamp <= windowSize);

if (relevantShares.length === 0) return;

const avgDifficulty = relevantShares.reduce((acc, share) => acc + share.difficulty, 0) / relevantShares.length;
const timeDifference = (now - relevantShares[0].timestamp) / 1000; // in seconds

const workerHashRate = (avgDifficulty * relevantShares.length) / timeDifference;
metrics.updateGaugeValue(minerHashRateGauge, [minerData.workerStats.workerName, address], workerHashRate);
totalHashRate += workerHashRate;
});

metrics.updateGaugeValue(poolHashRateGauge, ['pool', this.poolAddress], totalHashRate);
if (DEBUG) {
this.monitoring.debug(`SharesManager: Total pool hash rate updated to ${totalHashRate} GH/s`);
this.monitoring.debug(`SharesManager: Total pool hash rate updated to ${totalHashRate} GH/s`);
}
}
}

getMiners() {
return this.miners;
Expand Down

0 comments on commit 584dd67

Please sign in to comment.