Skip to content

Commit

Permalink
Merge branch 'issues/612/application/min-stake-param' into issues/612…
Browse files Browse the repository at this point in the history
…/application/staking
  • Loading branch information
bryanchriswhite authored Oct 4, 2024
2 parents 8c7eadf + 1faf4ab commit 48ba5dd
Show file tree
Hide file tree
Showing 5 changed files with 139 additions and 97 deletions.
2 changes: 1 addition & 1 deletion api/poktroll/application/tx.pulsar.go

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

84 changes: 42 additions & 42 deletions pkg/crypto/protocol/relay_difficulty.go
Original file line number Diff line number Diff line change
Expand Up @@ -33,64 +33,51 @@ func IsRelayVolumeApplicable(relayHash, targetHash []byte) bool {
// on the target number of relays we want the network to mine and the new EMA of
// the number of relays.
func ComputeNewDifficultyTargetHash(prevTargetHash []byte, targetNumRelays, newRelaysEma uint64) []byte {
// TODO_MAINNET(@red-0ne): Use a language agnostic float implementation to ensure
// deterministic results and avoid loss of precision. Specifically, we need to
// use big.Rat, delay any computation.
// If targetNumRelays == newRelaysEma -> do not scale -> keep the same difficulty to mine relays
// TODO_IMPROVE: Figure out if there's a range (e.g. 5%) within which it is reasonable
// to keep the same difficulty.
if targetNumRelays == newRelaysEma {
return prevTargetHash
}

// Calculate the proportion of target relays relative to the EMA of actual volume applicable relays
difficultyScalingRatio := new(big.Float).Quo(
new(big.Float).SetUint64(targetNumRelays),
new(big.Float).SetUint64(newRelaysEma),
)
// If difficultyScalingRatio < 1 -> scale down -> increase difficulty to mine relays
// If difficultyScalingRatio > 1 -> scale up -> decrease difficulty to mine relays
if difficultyScalingRatio.Cmp(big.NewFloat(1)) == 0 {
return prevTargetHash
}

// You can't scale the base relay difficulty hash below BaseRelayDifficultyHashBz
isDecreasingDifficulty := difficultyScalingRatio.Cmp(big.NewFloat(1)) == 1
if isDecreasingDifficulty && bytes.Equal(prevTargetHash, BaseRelayDifficultyHashBz) {
difficultyScalingRatio := big.NewRat(int64(targetNumRelays), int64(newRelaysEma))
scaledDifficultyHashBz := ScaleRelayDifficultyHash(prevTargetHash, difficultyScalingRatio)
// Trim all the leftmost zeros from the big endian representation of the scaled
// hash to ensure that it only contains the meaningful bytes.
scaledDifficultyHashBz = bytes.TrimLeft(scaledDifficultyHashBz, "\x00")
// TODO_IMPROVE: Make it so scaling up -> increase difficulty while scaling down -> decrease difficulty.
// If scaledDifficultyHash is longer than BaseRelayDifficultyHashBz, then use
// BaseRelayDifficultyHashBz as we should not have a bigger hash than the base.
if len(scaledDifficultyHashBz) > len(BaseRelayDifficultyHashBz) {
return BaseRelayDifficultyHashBz
}

scaledDifficultyHash := ScaleRelayDifficultyHash(prevTargetHash, difficultyScalingRatio)
if len(scaledDifficultyHash) > len(BaseRelayDifficultyHashBz) {
return BaseRelayDifficultyHashBz
// Ensure the scaled hash is padded to (at least) the same length as the provided hash.
if len(scaledDifficultyHashBz) < len(prevTargetHash) {
return padBytesToLength(scaledDifficultyHashBz, len(prevTargetHash))
}

// Compute the new target hash by scaling the previous target hash based on the ratio
return ScaleRelayDifficultyHash(prevTargetHash, difficultyScalingRatio)
return scaledDifficultyHashBz
}

// ScaleRelayDifficultyHash scales the provided hash based on the given ratio.
// If the ratio is less than 1, the hash will be scaled down.
// DEV_NOTE: Only exposed publicly for testing purposes.
func ScaleRelayDifficultyHash(difficultyHashBz []byte, ratio *big.Float) []byte {
// Convert difficultyHashBz to a big.Float to minimize precision loss.
// TODO_MAINNET(@red-one): Use a language agnostic float implementation or arithmetic library
// to ensure deterministic results across different language implementations of the
// protocol.
prevHashInt := bytesToBigInt(difficultyHashBz)
prevHashFloat := new(big.Float).SetInt(prevHashInt)
func ScaleRelayDifficultyHash(
initialDifficultyHashBz []byte,
difficultyScalingRatio *big.Rat,
) []byte {
difficultyHashInt := bytesToBigInt(initialDifficultyHashBz)
difficultyHashRat := new(big.Rat).SetInt(difficultyHashInt)

// Scale the current by multiplying it by the ratio.
// TODO(@red-0ne): Ensure that the precision lost here doesn't cause any
// major issues by using big.Rat.
scaledHashFloat := new(big.Float).Mul(prevHashFloat, ratio)
scaledHashInt, _ := scaledHashFloat.Int(nil)
// scaledHashBz := make([]byte, len(BaseRelayDifficultyHashBz))
// scaledHashInt.FillBytes(scaledHashBz)
scaledHashBz := scaledHashInt.Bytes()

// Ensure the scaled hash is padded to (at least) the same length as the provided hash.
if len(scaledHashBz) < len(difficultyHashBz) {
paddedHash := make([]byte, len(difficultyHashBz))
copy(paddedHash[len(paddedHash)-len(scaledHashBz):], scaledHashBz)
return paddedHash
}

return scaledHashBz
scaledHashRat := new(big.Rat).Mul(difficultyHashRat, difficultyScalingRatio)
scaledHashInt := new(big.Int).Div(scaledHashRat.Num(), scaledHashRat.Denom())
// Convert the scaled hash to a byte slice.
return scaledHashInt.Bytes()
}

// GetRelayDifficultyProbability returns a fraction that determines the probability that a
Expand Down Expand Up @@ -127,3 +114,16 @@ func GetRelayDifficultyMultiplierToFloat32(relayDifficultyHash []byte) float32 {
func bytesToBigInt(b []byte) *big.Int {
return new(big.Int).SetBytes(b)
}

// padBytesToLength returns a zero padded representation of the given value.
// If the value is longer the desired length, it is returned as is.
func padBytesToLength(valueToPad []byte, length int) []byte {
paddingOffset := length - len(valueToPad)
if paddingOffset <= 0 {
return valueToPad
}

paddedScaledDifficultyHash := make([]byte, length)
copy(paddedScaledDifficultyHash[paddingOffset:], valueToPad)
return paddedScaledDifficultyHash
}
123 changes: 85 additions & 38 deletions pkg/crypto/protocol/relay_difficulty_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -197,15 +197,15 @@ func TestRelayDifficulty_ScaleDifficultyTargetHash(t *testing.T) {
tests := []struct {
desc string
prevDifficultyHashHex string
scalingRatio float64
scalingRatio big.Rat

expectedScaledDifficultyHashHex string // scaled but unbounded
expectedNewDifficultyHashHex string // uses the scaled result but bounded
}{
{
desc: "Scale by 1 (same number of relays)",
prevDifficultyHashHex: "ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff",
scalingRatio: 1,
scalingRatio: *big.NewRat(1, 1),

// Scaled hash == expected hash
expectedScaledDifficultyHashHex: "ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff",
Expand All @@ -216,7 +216,7 @@ func TestRelayDifficulty_ScaleDifficultyTargetHash(t *testing.T) {
{
desc: "Scale by 0.5 (allow less relays)",
prevDifficultyHashHex: "ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff",
scalingRatio: 0.5,
scalingRatio: *big.NewRat(1, 2),

// Scaled hash == expected hash
expectedScaledDifficultyHashHex: "7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff",
Expand All @@ -225,7 +225,7 @@ func TestRelayDifficulty_ScaleDifficultyTargetHash(t *testing.T) {
{
desc: "Scale by 2 (allow more relays)",
prevDifficultyHashHex: "7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff",
scalingRatio: 2,
scalingRatio: *big.NewRat(2, 1),

// Scaled hash == expected hash
expectedScaledDifficultyHashHex: "fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe",
Expand All @@ -236,7 +236,7 @@ func TestRelayDifficulty_ScaleDifficultyTargetHash(t *testing.T) {
{
desc: "Scale by 0.25 (allow less relays)",
prevDifficultyHashHex: "ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff",
scalingRatio: 0.25,
scalingRatio: *big.NewRat(1, 4),

// Scaled hash == expected hash
expectedScaledDifficultyHashHex: "3fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff",
Expand All @@ -245,7 +245,7 @@ func TestRelayDifficulty_ScaleDifficultyTargetHash(t *testing.T) {
{
desc: "Scale by 4 (allow more relays)",
prevDifficultyHashHex: "3fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff",
scalingRatio: 4,
scalingRatio: *big.NewRat(4, 1),

// Scaled hash == expected hash
expectedScaledDifficultyHashHex: "fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc",
Expand All @@ -256,42 +256,61 @@ func TestRelayDifficulty_ScaleDifficultyTargetHash(t *testing.T) {
{
desc: "Scale by 0.1 (allow less relays)",
prevDifficultyHashHex: "ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff",
scalingRatio: 0.1,
scalingRatio: *big.NewRat(1, 10),

// Scaled hash != expected hash
expectedScaledDifficultyHashHex: "19999999999999ffffffffffffffffffffffffffffffffffffffffffffffffff",
expectedNewDifficultyHashHex: "19999999999999999fffffffffffffffffffffffffffffffffffffffffffffff",
// Scaled hash == expected hash
// Scaling down 0xff..ff by 10 leads a non-Int result (...987.5 rounded down to ...987),
// making a scaling up of the result (0x19..99) by 10 not equal to the original hash.
expectedScaledDifficultyHashHex: "1999999999999999999999999999999999999999999999999999999999999999",
expectedNewDifficultyHashHex: "1999999999999999999999999999999999999999999999999999999999999999",
},
{
desc: "Scale by 10 (allow more relays)",
prevDifficultyHashHex: "1999999999999999999999999999999999999999999999999999999999999999",
scalingRatio: 10,
scalingRatio: *big.NewRat(10, 1),

// Scaled hash == expected hash
expectedScaledDifficultyHashHex: "fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff8",
expectedNewDifficultyHashHex: "fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff8",
expectedScaledDifficultyHashHex: "fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffa",
expectedNewDifficultyHashHex: "fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffa",
},

// Scale down and up by 10e-12 and 10e12
{
desc: "Scale by 10e-12 (allow less relays)",
prevDifficultyHashHex: "ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff",
scalingRatio: 10e-12,
scalingRatio: *big.NewRat(1, 10e12),

// Scaled hash != expected hash
expectedScaledDifficultyHashHex: "000000000afebff0bcb24a7fffffffffffffffffffffffffffffffffffffffff",
expectedNewDifficultyHashHex: "000000000afebff0bcb24aafefffffffffffffffffffffffffffffffffffffff",
expectedScaledDifficultyHashHex: "1c25c268497681c2650cb4be40d60df7311e9872477f201c409ec0",
expectedNewDifficultyHashHex: "00000000001c25c268497681c2650cb4be40d60df7311e9872477f201c409ec0",
},
{
desc: "Scale by 10e12 (allow more relays)",
prevDifficultyHashHex: "00000000001c25c268497681c2650cb4be40d60df7311e9872477f201c409ec0",
scalingRatio: *big.NewRat(10e12, 1),

// Scaled hash == expected hash
expectedScaledDifficultyHashHex: "fffffffffffffffffffffffffffffffffffffffffffffffffffff8cd94b80000",
expectedNewDifficultyHashHex: "fffffffffffffffffffffffffffffffffffffffffffffffffffff8cd94b80000",
},
{
desc: "Scale by 10e-12 (allow more relays) padding",
prevDifficultyHashHex: "0fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff",
scalingRatio: *big.NewRat(1, 10e12),

// Scaled hash != expected hash: Padding
expectedScaledDifficultyHashHex: "01c25c268497681c2650cb4be40d60df7311e9872477f201c409ec",
expectedNewDifficultyHashHex: "000000000001c25c268497681c2650cb4be40d60df7311e9872477f201c409ec",
},
{
desc: "Scale by 10e12 (allow more relays) truncating",
prevDifficultyHashHex: "0000000fffffffffffffffffffffffffffffffffffffffffffffffffffffffff",
scalingRatio: *big.NewRat(10e12, 1),

// Scaled hash != expected hash: Truncating
expectedScaledDifficultyHashHex: "9184e729fffffffffffffffffffffffffffffffffffffffffffffffff6e7b18d6000",
expectedNewDifficultyHashHex: "ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff",
},
// TODO_BETA(@red-0ne): See this comment: https://github.com/pokt-network/poktroll/pull/771#issuecomment-2364772430
// {
// desc: "Scale by 10e12 (allow more relays)",
// prevDifficultyHashHex: "000000000afebff0bcb24a7fffffffffffffffffffffffffffffffffffffffff",
// scalingRatio: 10e12,

// // Scaled hash != expected hash
// expectedScaledDifficultyHashHex: "63fffffffffffe4c079b8ffffffffffffffffffffffffffffffffff80000000000",
// expectedNewDifficultyHashHex: "0001357c299a88ea715eae88eddcd3879fffffffffffffffffffffffffe00000",
// },
}

for _, test := range tests {
Expand All @@ -303,8 +322,7 @@ func TestRelayDifficulty_ScaleDifficultyTargetHash(t *testing.T) {
expectedScaledHashBz, err := hex.DecodeString(test.expectedScaledDifficultyHashHex)
require.NoError(t, err)

ratio := new(big.Float).SetFloat64(test.scalingRatio)
scaledDifficultyHash := ScaleRelayDifficultyHash(currHashBz, ratio)
scaledDifficultyHash := ScaleRelayDifficultyHash(currHashBz, &test.scalingRatio)

isScaledHashAsExpected := bytes.Equal(expectedScaledHashBz, scaledDifficultyHash)
require.True(t, isScaledHashAsExpected, "expected scaled (unbounded) difficulty hash %x, but got %x", expectedScaledHashBz, scaledDifficultyHash)
Expand All @@ -313,9 +331,8 @@ func TestRelayDifficulty_ScaleDifficultyTargetHash(t *testing.T) {
expectedNewHashBz, err := hex.DecodeString(test.expectedNewDifficultyHashHex)
require.NoError(t, err)

// Multiplying by 10e12 to avoid precision loss
targetNumRelays := uint64(test.scalingRatio * 10e12)
newRelaysEma := uint64(1 * 10e12)
targetNumRelays := test.scalingRatio.Num().Uint64()
newRelaysEma := test.scalingRatio.Denom().Uint64()

newDifficultyHash := ComputeNewDifficultyTargetHash(currHashBz, targetNumRelays, newRelaysEma)
isNewHashAsExpected := bytes.Equal(expectedNewHashBz, newDifficultyHash)
Expand All @@ -328,10 +345,12 @@ func TestRelayDifficulty_ScaleDifficultyTargetHash(t *testing.T) {
if test.expectedNewDifficultyHashHex != test.expectedScaledDifficultyHashHex {
require.NotEqual(t, test.scalingRatio, 1, "should not reach this code path if scaling ratio is 1")
// New difficulty was padded
if test.scalingRatio < 1 {
require.Equal(t, len(scaledDifficultyHash), len(newDifficultyHash), "scaled down difficulty should have been padded")
} else if test.scalingRatio > 1 {
require.Greater(t, len(scaledDifficultyHash), len(newDifficultyHash), "scaled up difficulty should have been truncated")
if targetNumRelays < newRelaysEma {
require.Less(t, len(expectedScaledHashBz), len(newDifficultyHash))
require.Equal(t, len(expectedNewHashBz), len(newDifficultyHash), "scaled down difficulty should have been padded")
} else if targetNumRelays > newRelaysEma {
require.Greater(t, len(expectedScaledHashBz), len(newDifficultyHash))
require.Equal(t, len(expectedNewHashBz), len(newDifficultyHash), "scaled down difficulty should have been truncated")
}
}
})
Expand Down Expand Up @@ -362,6 +381,34 @@ func TestRelayDifficulty_EnsureRelayMiningProbabilityIsProportional(t *testing.T
}
}

// This test ensure that a difficulty hash byte representation that is trimmed
// of its meaningless zeros does not change its value.
// See the discussion below for more details:
// https://github.com/pokt-network/poktroll/pull/831#discussion_r1774183541
func TestRelayDifficulty_TruncateRelayDifficultyHashToBaseSizeDoesNotChangeItsValue(t *testing.T) {
difficultyInt := big.NewInt(256)
expectedDifficultyHash := []byte{0x01, 0x00}
// big.Int#Bytes returns a big endian representation, this means that any
// leftmost consecutive zeros are non-meaningful to the represented value.
difficultyHashBz := difficultyInt.Bytes()

// Ensure that big.Int do not produce non-meaningful bytes
require.Equal(t, expectedDifficultyHash, difficultyHashBz)

// Assuming that more non-meaningful bytes have been added to the difficulty,
// ensure that truncating them does not affect the value.

difficultyHashWithNonMeaningfulBz := append([]byte{0x00, 0x00, 0x00}, difficultyHashBz...)
nonTrimmedDifficultyInt := big.NewInt(0).SetBytes(difficultyHashWithNonMeaningfulBz)

trimmedDifficultyBz := bytes.TrimLeft(difficultyHashWithNonMeaningfulBz, "\x00")
trimmedDifficultyInt := big.NewInt(0).SetBytes(trimmedDifficultyBz)

require.Len(t, trimmedDifficultyBz, 2)
require.Equal(t, difficultyInt, trimmedDifficultyInt)
require.Equal(t, difficultyInt, nonTrimmedDifficultyInt)
}

// scaleRelaysFromActualToTarget scales the number of relays (i.e. estimated offchain serviced relays)
// down to the number of expected on-chain volume applicable relays
func scaleRelaysFromActualToTarget(t *testing.T, relayDifficultyProbability *big.Rat, numRelays uint64) uint64 {
Expand All @@ -380,9 +427,9 @@ func scaleRelaysFromActualToTarget(t *testing.T, relayDifficultyProbability *big
func TestRelayDifficulty_EnsureRelayMiningMultiplierIsProportional(t *testing.T) {
// Target Num Relays is the target number of volume applicable relays a session tree should have.
const (
targetNumRelays = uint64(10e2) // Target number of volume applicable relays
lowVolumeService = 1e4 // Number of actual off-chain relays serviced by a RelayMiner
highVolumeService = 1e6 // Number of actual off-chain relays serviced by a RelayMiner
targetNumRelays = uint64(10e3) // Target number of volume applicable relays
lowVolumeService = 1e5 // Number of actual off-chain relays serviced by a RelayMiner
highVolumeService = 1e7 // Number of actual off-chain relays serviced by a RelayMiner
allowableDelta = 0.05 // Allow a 5% error margin between estimated probabilities and results
)

Expand Down
2 changes: 1 addition & 1 deletion x/application/types/message_update_param.go
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ import (
sdkerrors "github.com/cosmos/cosmos-sdk/types/errors"
)

var _ cosmostypes.Msg = &MsgUpdateParam{}
var _ cosmostypes.Msg = (*MsgUpdateParam)(nil)

func NewMsgUpdateParam(authority string, name string, asType any) *MsgUpdateParam {
var asTypeIface isMsgUpdateParam_AsType
Expand Down
Loading

0 comments on commit 48ba5dd

Please sign in to comment.