diff --git a/indexer/packages/v4-protos/src/codegen/dydxprotocol/ratelimit/limit_params.ts b/indexer/packages/v4-protos/src/codegen/dydxprotocol/ratelimit/limit_params.ts index d78b9120d7..827006640c 100644 --- a/indexer/packages/v4-protos/src/codegen/dydxprotocol/ratelimit/limit_params.ts +++ b/indexer/packages/v4-protos/src/codegen/dydxprotocol/ratelimit/limit_params.ts @@ -1,3 +1,4 @@ +import { Duration, DurationSDKType } from "../../google/protobuf/duration"; import * as _m0 from "protobufjs/minimal"; import { DeepPartial } from "../../helpers"; /** LimitParams defines rate limit params on a denom. */ @@ -34,10 +35,10 @@ export interface LimitParamsSDKType { export interface Limiter { /** - * period_sec is the rolling time period for which the limit applies + * period is the rolling time period for which the limit applies * e.g. 3600 (an hour) */ - periodSec: number; + period?: Duration; /** * baseline_minimum is the minimum maximum withdrawal coin amount within the * time period. @@ -57,10 +58,10 @@ export interface Limiter { export interface LimiterSDKType { /** - * period_sec is the rolling time period for which the limit applies + * period is the rolling time period for which the limit applies * e.g. 3600 (an hour) */ - period_sec: number; + period?: DurationSDKType; /** * baseline_minimum is the minimum maximum withdrawal coin amount within the * time period. @@ -134,7 +135,7 @@ export const LimitParams = { function createBaseLimiter(): Limiter { return { - periodSec: 0, + period: undefined, baselineMinimum: new Uint8Array(), baselineTvlPpm: 0 }; @@ -142,8 +143,8 @@ function createBaseLimiter(): Limiter { export const Limiter = { encode(message: Limiter, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { - if (message.periodSec !== 0) { - writer.uint32(16).uint32(message.periodSec); + if (message.period !== undefined) { + Duration.encode(message.period, writer.uint32(10).fork()).ldelim(); } if (message.baselineMinimum.length !== 0) { @@ -166,8 +167,8 @@ export const Limiter = { const tag = reader.uint32(); switch (tag >>> 3) { - case 2: - message.periodSec = reader.uint32(); + case 1: + message.period = Duration.decode(reader, reader.uint32()); break; case 3: @@ -189,7 +190,7 @@ export const Limiter = { fromPartial(object: DeepPartial): Limiter { const message = createBaseLimiter(); - message.periodSec = object.periodSec ?? 0; + message.period = object.period !== undefined && object.period !== null ? Duration.fromPartial(object.period) : undefined; message.baselineMinimum = object.baselineMinimum ?? new Uint8Array(); message.baselineTvlPpm = object.baselineTvlPpm ?? 0; return message; diff --git a/indexer/packages/v4-protos/src/codegen/dydxprotocol/ratelimit/query.ts b/indexer/packages/v4-protos/src/codegen/dydxprotocol/ratelimit/query.ts index def00e01da..d6afad42db 100644 --- a/indexer/packages/v4-protos/src/codegen/dydxprotocol/ratelimit/query.ts +++ b/indexer/packages/v4-protos/src/codegen/dydxprotocol/ratelimit/query.ts @@ -1,4 +1,5 @@ import { LimitParams, LimitParamsSDKType } from "./limit_params"; +import { Duration, DurationSDKType } from "../../google/protobuf/duration"; import * as _m0 from "protobufjs/minimal"; import { DeepPartial } from "../../helpers"; /** ListLimitParamsRequest is a request type of the ListLimitParams RPC method. */ @@ -46,13 +47,13 @@ export interface QueryCapacityByDenomRequestSDKType { /** CapacityResult is a specific rate limit for a denom. */ export interface CapacityResult { - periodSec: number; + period?: Duration; capacity: Uint8Array; } /** CapacityResult is a specific rate limit for a denom. */ export interface CapacityResultSDKType { - period_sec: number; + period?: DurationSDKType; capacity: Uint8Array; } /** @@ -206,15 +207,15 @@ export const QueryCapacityByDenomRequest = { function createBaseCapacityResult(): CapacityResult { return { - periodSec: 0, + period: undefined, capacity: new Uint8Array() }; } export const CapacityResult = { encode(message: CapacityResult, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { - if (message.periodSec !== 0) { - writer.uint32(8).uint32(message.periodSec); + if (message.period !== undefined) { + Duration.encode(message.period, writer.uint32(10).fork()).ldelim(); } if (message.capacity.length !== 0) { @@ -234,7 +235,7 @@ export const CapacityResult = { switch (tag >>> 3) { case 1: - message.periodSec = reader.uint32(); + message.period = Duration.decode(reader, reader.uint32()); break; case 2: @@ -252,7 +253,7 @@ export const CapacityResult = { fromPartial(object: DeepPartial): CapacityResult { const message = createBaseCapacityResult(); - message.periodSec = object.periodSec ?? 0; + message.period = object.period !== undefined && object.period !== null ? Duration.fromPartial(object.period) : undefined; message.capacity = object.capacity ?? new Uint8Array(); return message; } diff --git a/proto/dydxprotocol/ratelimit/limit_params.proto b/proto/dydxprotocol/ratelimit/limit_params.proto index 5df88b3da4..d9d01ed8b9 100644 --- a/proto/dydxprotocol/ratelimit/limit_params.proto +++ b/proto/dydxprotocol/ratelimit/limit_params.proto @@ -2,6 +2,7 @@ syntax = "proto3"; package dydxprotocol.ratelimit; import "gogoproto/gogo.proto"; +import "google/protobuf/duration.proto"; option go_package = "github.com/dydxprotocol/v4-chain/protocol/x/ratelimit/types"; @@ -17,9 +18,10 @@ message LimitParams { // Limiter defines one rate-limiter on a specfic denom. message Limiter { - // period_sec is the rolling time period for which the limit applies + // period is the rolling time period for which the limit applies // e.g. 3600 (an hour) - uint32 period_sec = 2; + google.protobuf.Duration period = 1 + [ (gogoproto.nullable) = false, (gogoproto.stdduration) = true ]; // baseline_minimum is the minimum maximum withdrawal coin amount within the // time period. // e.g. 100_000_000_000 uusdc for 100k USDC; 5e22 adv4tnt for 50k DV4TNT diff --git a/proto/dydxprotocol/ratelimit/query.proto b/proto/dydxprotocol/ratelimit/query.proto index d50b9cd1ea..f357f2d860 100644 --- a/proto/dydxprotocol/ratelimit/query.proto +++ b/proto/dydxprotocol/ratelimit/query.proto @@ -4,6 +4,7 @@ package dydxprotocol.ratelimit; import "gogoproto/gogo.proto"; import "google/api/annotations.proto"; import "dydxprotocol/ratelimit/limit_params.proto"; +import "google/protobuf/duration.proto"; option go_package = "github.com/dydxprotocol/v4-chain/protocol/x/ratelimit/types"; @@ -36,7 +37,8 @@ message QueryCapacityByDenomRequest { string denom = 1; } // CapacityResult is a specific rate limit for a denom. message CapacityResult { - uint32 period_sec = 1; + google.protobuf.Duration period = 1 + [ (gogoproto.nullable) = false, (gogoproto.stdduration) = true ]; bytes capacity = 2 [ (gogoproto.customtype) = "github.com/dydxprotocol/v4-chain/protocol/dtypes.SerializableInt", diff --git a/protocol/app/app.go b/protocol/app/app.go index c965bf41d6..b270b05682 100644 --- a/protocol/app/app.go +++ b/protocol/app/app.go @@ -588,10 +588,22 @@ func New( transferModule := transfer.NewAppModule(app.TransferKeeper) transferIBCModule := transfer.NewIBCModule(app.TransferKeeper) + app.BlockTimeKeeper = *blocktimemodulekeeper.NewKeeper( + appCodec, + keys[blocktimemoduletypes.StoreKey], + // set the governance and delaymsg module accounts as the authority for conducting upgrades + []string{ + lib.GovModuleAddress.String(), + delaymsgmoduletypes.ModuleAddress.String(), + }, + ) + blockTimeModule := blocktimemodule.NewAppModule(appCodec, app.BlockTimeKeeper) + app.RatelimitKeeper = *ratelimitmodulekeeper.NewKeeper( appCodec, keys[ratelimitmoduletypes.StoreKey], app.BankKeeper, + app.BlockTimeKeeper, // set the governance and delaymsg module accounts as the authority for conducting upgrades []string{ lib.GovModuleAddress.String(), @@ -792,17 +804,6 @@ func New( ) assetsModule := assetsmodule.NewAppModule(appCodec, app.AssetsKeeper) - app.BlockTimeKeeper = *blocktimemodulekeeper.NewKeeper( - appCodec, - keys[blocktimemoduletypes.StoreKey], - // set the governance and delaymsg module accounts as the authority for conducting upgrades - []string{ - lib.GovModuleAddress.String(), - delaymsgmoduletypes.ModuleAddress.String(), - }, - ) - blockTimeModule := blocktimemodule.NewAppModule(appCodec, app.BlockTimeKeeper) - app.DelayMsgKeeper = *delaymsgmodulekeeper.NewKeeper( appCodec, keys[delaymsgmoduletypes.StoreKey], diff --git a/protocol/lib/metrics/constants.go b/protocol/lib/metrics/constants.go index 86932182ac..3523dabb5a 100644 --- a/protocol/lib/metrics/constants.go +++ b/protocol/lib/metrics/constants.go @@ -397,6 +397,11 @@ const ( ValidatorNumFills = "validator_num_fills" ValidatorNumMatchedTakerOrders = "validator_num_matched_taker_orders" ValidatorVolumeQuoteQuantums = "validator_volume_quote_quantums" + + // x/ratelimit + Capacity = "capacity" + RateLimitDenom = "rate_limit_denom" + LimiterIndex = "limiter_index" ) const ( diff --git a/protocol/x/blocktime/keeper/keeper.go b/protocol/x/blocktime/keeper/keeper.go index 6b1bd39cae..67f4160e8b 100644 --- a/protocol/x/blocktime/keeper/keeper.go +++ b/protocol/x/blocktime/keeper/keeper.go @@ -79,6 +79,12 @@ func (k Keeper) GetPreviousBlockInfo(ctx sdk.Context) types.BlockInfo { return info } +// GetTimeSinceLastBlock returns the time delta between the current block time and the last block time. +func (k Keeper) GetTimeSinceLastBlock(ctx sdk.Context) time.Duration { + prevBlockInfo := k.GetPreviousBlockInfo(ctx) + return ctx.BlockTime().Sub(prevBlockInfo.Timestamp) +} + func (k Keeper) SetPreviousBlockInfo(ctx sdk.Context, info *types.BlockInfo) { store := ctx.KVStore(k.storeKey) b := k.cdc.MustMarshal(info) diff --git a/protocol/x/blocktime/keeper/keeper_test.go b/protocol/x/blocktime/keeper/keeper_test.go index 90d66e671f..0e13dad1dc 100644 --- a/protocol/x/blocktime/keeper/keeper_test.go +++ b/protocol/x/blocktime/keeper/keeper_test.go @@ -243,3 +243,46 @@ func TestGetDowntimeInfoFor(t *testing.T) { }) } } + +func TestGetTimeSinceLastBlock(t *testing.T) { + testPrevBlockHeight := uint32(5) + tests := map[string]struct { + prevBlockTime time.Time + currBlockTime time.Time + expectedTimeSinceLastBlock time.Duration + }{ + "2 sec": { + prevBlockTime: time.Unix(100, 0).UTC(), + currBlockTime: time.Unix(102, 0).UTC(), + expectedTimeSinceLastBlock: time.Second * 2, + }, + "Realistic values": { + prevBlockTime: time.Unix(1_704_827_023, 123_000_000).UTC(), + currBlockTime: time.Unix(1_704_827_024, 518_000_000).UTC(), + expectedTimeSinceLastBlock: time.Second*1 + time.Nanosecond*395_000_000, + }, + } + + for name, tc := range tests { + t.Run(name, func(t *testing.T) { + tApp := testapp.NewTestAppBuilder(t).Build() + tApp.InitChain() + + ctx := tApp.AdvanceToBlock( + testPrevBlockHeight, + testapp.AdvanceToBlockOptions{ + BlockTime: tc.prevBlockTime, + }, + ) + + k := tApp.App.BlockTimeKeeper + + actual := k.GetTimeSinceLastBlock(ctx.WithBlockTime(tc.currBlockTime)) + require.Equal( + t, + tc.expectedTimeSinceLastBlock, + actual, + ) + }) + } +} diff --git a/protocol/x/blocktime/module.go b/protocol/x/blocktime/module.go index ebd2b75551..ea5894459a 100644 --- a/protocol/x/blocktime/module.go +++ b/protocol/x/blocktime/module.go @@ -2,10 +2,11 @@ package blocktime import ( "context" - "cosmossdk.io/core/appmodule" "encoding/json" "fmt" + "cosmossdk.io/core/appmodule" + "github.com/grpc-ecosystem/grpc-gateway/runtime" "github.com/spf13/cobra" diff --git a/protocol/x/ratelimit/keeper/keeper.go b/protocol/x/ratelimit/keeper/keeper.go index 7843749e98..6eca7da2ba 100644 --- a/protocol/x/ratelimit/keeper/keeper.go +++ b/protocol/x/ratelimit/keeper/keeper.go @@ -3,23 +3,29 @@ package keeper import ( "fmt" "math/big" + "time" errorsmod "cosmossdk.io/errors" "cosmossdk.io/log" "cosmossdk.io/store/prefix" storetypes "cosmossdk.io/store/types" "github.com/cosmos/cosmos-sdk/codec" + "github.com/cosmos/cosmos-sdk/telemetry" sdk "github.com/cosmos/cosmos-sdk/types" "github.com/dydxprotocol/v4-chain/protocol/dtypes" "github.com/dydxprotocol/v4-chain/protocol/lib" + "github.com/dydxprotocol/v4-chain/protocol/lib/metrics" "github.com/dydxprotocol/v4-chain/protocol/x/ratelimit/types" + ratelimitutil "github.com/dydxprotocol/v4-chain/protocol/x/ratelimit/util" + gometrics "github.com/hashicorp/go-metrics" ) type ( Keeper struct { - cdc codec.BinaryCodec - storeKey storetypes.StoreKey - bankKeeper types.BankKeeper + cdc codec.BinaryCodec + storeKey storetypes.StoreKey + bankKeeper types.BankKeeper + blockTimeKeeper types.BlockTimeKeeper // TODO(CORE-824): Implement `x/ratelimit` keeper @@ -32,13 +38,15 @@ func NewKeeper( cdc codec.BinaryCodec, storeKey storetypes.StoreKey, bankKeeper types.BankKeeper, + blockTimeKeeper types.BlockTimeKeeper, authorities []string, ) *Keeper { return &Keeper{ - cdc: cdc, - storeKey: storeKey, - bankKeeper: bankKeeper, - authorities: lib.UniqueSliceToSet(authorities), + cdc: cdc, + storeKey: storeKey, + bankKeeper: bankKeeper, + blockTimeKeeper: blockTimeKeeper, + authorities: lib.UniqueSliceToSet(authorities), } } @@ -116,28 +124,6 @@ func (k Keeper) ProcessDeposit( }) } -// GetBaseline returns the current capacity baseline for the given limiter. -// `baseline` formula: -// -// baseline = max(baseline_minimum, baseline_tvl_ppm * current_tvl) -func (k Keeper) GetBaseline( - ctx sdk.Context, - denom string, - limiter types.Limiter, -) *big.Int { - // Get the current TVL. - supply := k.bankKeeper.GetSupply(ctx, denom) - currentTVL := supply.Amount.BigInt() - - return lib.BigMax( - limiter.BaselineMinimum.BigInt(), - lib.BigIntMulPpm( - currentTVL, - limiter.BaselineTvlPpm, - ), - ) -} - // SetLimitParams sets `LimitParams` for the given denom. // Also overwrites the existing `DenomCapacity` object for the denom with a default `capacity_list` of the // same length as the `limiters` list. Each `capacity` is initialized to the current baseline. @@ -161,11 +147,12 @@ func (k Keeper) SetLimitParams( return } + currentTvl := k.bankKeeper.GetSupply(ctx, limitParams.Denom) // Initialize the capacity list with the current baseline. newCapacityList := make([]dtypes.SerializableInt, len(limitParams.Limiters)) for i, limiter := range limitParams.Limiters { newCapacityList[i] = dtypes.NewIntFromBigInt( - k.GetBaseline(ctx, limitParams.Denom, limiter), + ratelimitutil.GetBaseline(currentTvl.Amount.BigInt(), limiter), ) } // Set correspondong `DenomCapacity` in state. @@ -216,6 +203,85 @@ func (k Keeper) SetDenomCapacity( b := k.cdc.MustMarshal(&denomCapacity) store.Set(key, b) } + + // Emit telemetry for the new capacity list. + for i, capacity := range denomCapacity.CapacityList { + telemetry.SetGaugeWithLabels( + []string{types.ModuleName, metrics.Capacity}, + metrics.GetMetricValueFromBigInt(capacity.BigInt()), + []gometrics.Label{ + metrics.GetLabelForStringValue(metrics.RateLimitDenom, denomCapacity.Denom), + metrics.GetLabelForIntValue(metrics.LimiterIndex, i), + }, + ) + } +} + +// UpdateAllCapacitiesEndBlocker is called during the EndBlocker to update the capacity for all limit params. +func (k Keeper) UpdateAllCapacitiesEndBlocker( + ctx sdk.Context, +) { + timeSinceLastBlock := k.blockTimeKeeper.GetTimeSinceLastBlock(ctx) + + if timeSinceLastBlock < 0 { + // This violates an invariant (current block time > prev block time). + // Since this is in the `EndBlocker`, we log an error instead of panicking. + k.Logger(ctx).Error( + fmt.Sprintf( + "timeSinceLastBlock (%v) <= 0; skipping UpdateAllCapacitiesEndBlocker", + timeSinceLastBlock, + ), + ) + return + } + + // Iterate through all the limit params in state. + store := prefix.NewStore(ctx.KVStore(k.storeKey), []byte(types.LimitParamsKeyPrefix)) + iterator := storetypes.KVStorePrefixIterator(store, []byte{}) + + defer iterator.Close() + + for ; iterator.Valid(); iterator.Next() { + var limitParams types.LimitParams + k.cdc.MustUnmarshal(iterator.Value(), &limitParams) + k.updateCapacityForLimitParams(ctx, limitParams, timeSinceLastBlock) + } +} + +// updateCapacityForLimitParams calculates current baseline for a denom and recovers some amount of capacity +// towards baseline. +// Assumes that the `LimitParams` exist in state. +func (k Keeper) updateCapacityForLimitParams( + ctx sdk.Context, + limitParams types.LimitParams, + timeSinceLastBlock time.Duration, +) { + tvl := k.bankKeeper.GetSupply(ctx, limitParams.Denom) + + capacityList := k.GetDenomCapacity(ctx, limitParams.Denom).CapacityList + + newCapacityList, err := ratelimitutil.CalculateNewCapacityList( + tvl.Amount.BigInt(), + limitParams, + capacityList, + timeSinceLastBlock, + ) + + if err != nil { + k.Logger(ctx).Error( + fmt.Sprintf( + "error calculating new capacity list for denom %v: %v. Skipping update.", + limitParams.Denom, + err, + ), + ) + return + } + + k.SetDenomCapacity(ctx, types.DenomCapacity{ + Denom: limitParams.Denom, + CapacityList: newCapacityList, + }) } // GetDenomCapacity returns `DenomCapacity` for the given denom. diff --git a/protocol/x/ratelimit/keeper/keeper_test.go b/protocol/x/ratelimit/keeper/keeper_test.go index a52be4a4fd..9b340de71d 100644 --- a/protocol/x/ratelimit/keeper/keeper_test.go +++ b/protocol/x/ratelimit/keeper/keeper_test.go @@ -1,21 +1,26 @@ package keeper_test import ( - sdkmath "cosmossdk.io/math" "math/big" "testing" + "time" + + sdkmath "cosmossdk.io/math" cometbfttypes "github.com/cometbft/cometbft/types" sdk "github.com/cosmos/cosmos-sdk/types" banktypes "github.com/cosmos/cosmos-sdk/x/bank/types" "github.com/dydxprotocol/v4-chain/protocol/dtypes" testapp "github.com/dydxprotocol/v4-chain/protocol/testutil/app" + big_testutil "github.com/dydxprotocol/v4-chain/protocol/testutil/big" + blocktimetypes "github.com/dydxprotocol/v4-chain/protocol/x/blocktime/types" "github.com/dydxprotocol/v4-chain/protocol/x/ratelimit/types" "github.com/stretchr/testify/require" ) const ( testDenom = "ibc/xxx" + testDenom2 = "testdenom2" testAddress1 = "dydx16h7p7f4dysrgtzptxx2gtpt5d8t834g9dj830z" testAddress2 = "dydx168pjt8rkru35239fsqvz7rzgeclakp49zx3aum" testAddress3 = "dydx1fjg6zp6vv8t9wvy4lps03r5l4g7tkjw9wvmh70" @@ -70,12 +75,12 @@ func TestSetGetLimitParams_Success(t *testing.T) { denom: testDenom, limiters: []types.Limiter{ { - PeriodSec: 3_600, + Period: 3_600 * time.Second, BaselineMinimum: dtypes.NewInt(100_000_000_000), // 100k tokens (assuming 6 decimals) BaselineTvlPpm: 10_000, // 1% }, { - PeriodSec: 86_400, + Period: 86_400 * time.Second, BaselineMinimum: dtypes.NewInt(1_000_000_000_000), // 1m tokens (assuming 6 decimals) BaselineTvlPpm: 100_000, // 10% }, @@ -89,12 +94,12 @@ func TestSetGetLimitParams_Success(t *testing.T) { denom: testDenom, limiters: []types.Limiter{ { - PeriodSec: 3_600, + Period: 3_600 * time.Second, BaselineMinimum: dtypes.NewInt(100_000_000_000), // 100k tokens (assuming 6 decimals) BaselineTvlPpm: 10_000, // 1% }, { - PeriodSec: 86_400, + Period: 86_400 * time.Second, BaselineMinimum: dtypes.NewInt(1_000_000_000_000), // 1m tokens (assuming 6 decimals) BaselineTvlPpm: 100_000, // 10% }, @@ -128,12 +133,12 @@ func TestSetGetLimitParams_Success(t *testing.T) { denom: testDenom, limiters: []types.Limiter{ { - PeriodSec: 3_600, + Period: 3_600 * time.Second, BaselineMinimum: dtypes.NewInt(100_000_000_000), // 100k tokens (assuming 6 decimals) BaselineTvlPpm: 50_000, // 5% }, { - PeriodSec: 86_400, + Period: 86_400 * time.Second, BaselineMinimum: dtypes.NewInt(1_000_000_000_000), // 1m tokens (assuming 6 decimals) BaselineTvlPpm: 200_000, // 20% }, @@ -167,12 +172,12 @@ func TestSetGetLimitParams_Success(t *testing.T) { denom: testDenom, limiters: []types.Limiter{ { - PeriodSec: 3_600, + Period: 3_600 * time.Second, BaselineMinimum: dtypes.NewInt(100_000_000_000), // 100k tokens (assuming 6 decimals) BaselineTvlPpm: 100_000, // 10% }, { - PeriodSec: 86_400, + Period: 86_400 * time.Second, BaselineMinimum: dtypes.NewInt(1_000_000_000_000), // 1m tokens (assuming 6 decimals) BaselineTvlPpm: 1_000_000, // 100% }, @@ -268,193 +273,525 @@ func TestSetGetLimitParams_Success(t *testing.T) { } } -func TestGetBaseline(t *testing.T) { +func TestUpdateAllCapacitiesEndBlocker(t *testing.T) { tests := map[string]struct { - denom string - balances []banktypes.Balance - limiter types.Limiter - expectedBaseline *big.Int + balances []banktypes.Balance // For initializing the current supply + limitParamsList []types.LimitParams + prevBlockTime time.Time + blockTime time.Time + initDenomCapacityList []types.DenomCapacity + expectedDenomCapacityList []types.DenomCapacity }{ - "max(1% of TVL, 100k token), TVL = 5M token": { - denom: testDenom, + "One denom, prev capacity equals baseline": { balances: []banktypes.Balance{ { Address: testAddress1, Coins: sdk.Coins{ { Denom: testDenom, - Amount: sdkmath.NewInt(1_000_000_000_000), // 1M token + Amount: sdkmath.NewInt(25_000_000_000_000), // 25M token (assuming 6 decimals) }, }, }, + }, + limitParamsList: []types.LimitParams{ { - Address: testAddress2, - Coins: sdk.Coins{ + Denom: testDenom, + Limiters: []types.Limiter{ + // baseline = 25M * 1% = 250k tokens { - Denom: testDenom, - Amount: sdkmath.NewInt(4_000_000_000_000), // 4M token + Period: 3_600 * time.Second, + BaselineMinimum: dtypes.NewInt(100_000_000_000), // 100k tokens (assuming 6 decimals) + BaselineTvlPpm: 10_000, // 1% }, + // baseline = 25M * 10% = 2.5M tokens + { + Period: 86_400 * time.Second, + BaselineMinimum: dtypes.NewInt(1_000_000_000_000), // 1M tokens (assuming 6 decimals) + BaselineTvlPpm: 100_000, // 10% + }, + }, + }, + }, + prevBlockTime: time.Unix(1000, 0).In(time.UTC), + blockTime: time.Unix(1001, 0).In(time.UTC), + initDenomCapacityList: []types.DenomCapacity{ + { + Denom: testDenom, + CapacityList: []dtypes.SerializableInt{ + dtypes.NewInt(250_000_000_000), // 250k tokens, which equals baseline + dtypes.NewInt(2_500_000_000_000), // 2.5M tokens, which equals baseline }, }, }, - limiter: types.Limiter{ - PeriodSec: 3_600, - BaselineMinimum: dtypes.NewInt(100_000_000_000), // 100k token - BaselineTvlPpm: 10_000, // 1% + expectedDenomCapacityList: []types.DenomCapacity{ + { + Denom: testDenom, + CapacityList: []dtypes.SerializableInt{ + dtypes.NewInt(250_000_000_000), // 250k tokens + dtypes.NewInt(2_500_000_000_000), // 2.5M tokens + }, + }, }, - expectedBaseline: big.NewInt(100_000_000_000), // 100k token (baseline minimum) }, - "max(1% of TVL, 100k token), TVL = 15M token": { - denom: testDenom, + "One denom, prev capacity < baseline": { balances: []banktypes.Balance{ { Address: testAddress1, Coins: sdk.Coins{ { Denom: testDenom, - Amount: sdkmath.NewInt(1_000_000_000_000), // 1M token + Amount: sdkmath.NewInt(25_000_000_000_000), // 25M token (assuming 6 decimals) }, }, }, + }, + limitParamsList: []types.LimitParams{ { - Address: testAddress2, - Coins: sdk.Coins{ + Denom: testDenom, + Limiters: []types.Limiter{ + // baseline = 25M * 1% = 250k tokens { - Denom: testDenom, - Amount: sdkmath.NewInt(4_000_000_000_000), // 4M token + Period: 3_600 * time.Second, + BaselineMinimum: dtypes.NewInt(100_000_000_000), // 100k tokens (assuming 6 decimals) + BaselineTvlPpm: 10_000, // 1% }, + // baseline = 25M * 10% = 2.5M tokens + { + Period: 86_400 * time.Second, + BaselineMinimum: dtypes.NewInt(1_000_000_000_000), // 1M tokens (assuming 6 decimals) + BaselineTvlPpm: 100_000, // 10% + }, + }, + }, + }, + prevBlockTime: time.Unix(1000, 0).In(time.UTC), + blockTime: time.Unix(1001, 0).In(time.UTC), + initDenomCapacityList: []types.DenomCapacity{ + { + Denom: testDenom, + CapacityList: []dtypes.SerializableInt{ + dtypes.NewInt(99_000_000_000), // 99k tokens, < baseline (250k) + dtypes.NewInt(990_000_000_000), // 0.99M tokens, < baseline (2.5M) }, }, + }, + expectedDenomCapacityList: []types.DenomCapacity{ + { + Denom: testDenom, + CapacityList: []dtypes.SerializableInt{ + dtypes.NewInt(99_069_444_444), // recovered by 1/3600 * 250k = 69.4444 tokens + dtypes.NewInt(990_028_935_185), // recovered by 1/86400 * 2.5M = 28.9351 tokens + }, + }, + }, + }, + "One denom, prev capacity < baseline, 18 decimals": { + balances: []banktypes.Balance{ { - Address: testAddress3, + Address: testAddress1, Coins: sdk.Coins{ { - Denom: testDenom, - Amount: sdkmath.NewInt(10_000_000_000_000), // 10M token + Denom: testDenom, + Amount: sdkmath.NewIntFromBigInt( + big_testutil.Int64MulPow10(25, 24), // 25M tokens (assuming 18 decimals) + ), + }, + }, + }, + }, + limitParamsList: []types.LimitParams{ + { + Denom: testDenom, + Limiters: []types.Limiter{ + // baseline = 25M * 1% = 250k tokens + { + Period: 3_600 * time.Second, + BaselineMinimum: dtypes.NewIntFromBigInt( + big_testutil.Int64MulPow10(100_000, 18), // 100k tokens(assuming 18 decimals) + ), + BaselineTvlPpm: 10_000, // 1% + }, + // baseline = 25M * 10% = 2.5M tokens + { + Period: 86_400 * time.Second, + BaselineMinimum: dtypes.NewIntFromBigInt( + big_testutil.Int64MulPow10(1_000_000, 18), // 1M tokens(assuming 18 decimals) + ), + BaselineTvlPpm: 100_000, // 10% }, }, }, }, - limiter: types.Limiter{ - PeriodSec: 3_600, - BaselineMinimum: dtypes.NewInt(100_000_000_000), // 100k token - BaselineTvlPpm: 10_000, // 1% + prevBlockTime: time.Unix(1000, 0).In(time.UTC), + blockTime: time.Unix(1001, 0).In(time.UTC), + initDenomCapacityList: []types.DenomCapacity{ + { + Denom: testDenom, + CapacityList: []dtypes.SerializableInt{ + dtypes.NewIntFromBigInt( + big_testutil.Int64MulPow10(99_000, 18), + ), // 99k tokens < baseline (250k) + dtypes.NewIntFromBigInt( + big_testutil.Int64MulPow10(990_000, 18), + ), // 0.99M tokens, < baseline (2.5M) + }, + }, + }, + expectedDenomCapacityList: []types.DenomCapacity{ + { + Denom: testDenom, + CapacityList: []dtypes.SerializableInt{ + dtypes.NewIntFromBigInt( + big_testutil.MustFirst(new(big.Int).SetString("99069444444444444444444", 10)), + ), // recovered by 1/3600 * 250k ~= 69.4444 tokens + dtypes.NewIntFromBigInt( + big_testutil.MustFirst(new(big.Int).SetString("990028935185185185185185", 10)), + ), // recovered by 1/86400 * 2.5M ~= 28.9351 tokens + }, + }, }, - expectedBaseline: big.NewInt(150_000_000_000), // 150k token (1% of 15m) }, - "max(1% of TVL, 100k token), TVL = ~15M token, rounds down": { - denom: testDenom, + "One denom, prev capacity = 0": { balances: []banktypes.Balance{ { Address: testAddress1, Coins: sdk.Coins{ { Denom: testDenom, - Amount: sdkmath.NewInt(1_000_000_000_000), // 1M token + Amount: sdkmath.NewInt(1_000_000_000_000), // 1M token (assuming 6 decimals) }, }, }, + }, + limitParamsList: []types.LimitParams{ { - Address: testAddress2, - Coins: sdk.Coins{ + Denom: testDenom, + Limiters: []types.Limiter{ + // baseline = baseline minimum = 100k tokens { - Denom: testDenom, - Amount: sdkmath.NewInt(4_000_123_456_777), // ~4M token + Period: 3_600 * time.Second, + BaselineMinimum: dtypes.NewInt(100_000_000_000), // 100k tokens (assuming 6 decimals) + BaselineTvlPpm: 10_000, // 1% + }, + // baseline = baseline minimum = 1M tokens + { + Period: 86_400 * time.Second, + BaselineMinimum: dtypes.NewInt(1_000_000_000_000), // 1M tokens (assuming 6 decimals) + BaselineTvlPpm: 100_000, // 10% }, }, }, + }, + prevBlockTime: time.Unix(1000, 0).In(time.UTC), + blockTime: time.Unix(1001, 0).In(time.UTC), + initDenomCapacityList: []types.DenomCapacity{ + { + Denom: testDenom, + CapacityList: []dtypes.SerializableInt{ + dtypes.NewInt(0), // 0 Capacity + dtypes.NewInt(0), // 0 Capacity + }, + }, + }, + expectedDenomCapacityList: []types.DenomCapacity{ + { + Denom: testDenom, + CapacityList: []dtypes.SerializableInt{ + dtypes.NewInt(27_777_777), // recovered by 1/3600 * 100k ~= 27.7778 tokens + dtypes.NewInt(11_574_074), // recovered by 1/86400 * 1M ~= 11.5741 tokens + }, + }, + }, + }, + "One denom, baseline < prev capacity < 2 * baseline": { + balances: []banktypes.Balance{ { - Address: testAddress3, + Address: testAddress1, Coins: sdk.Coins{ { Denom: testDenom, - Amount: sdkmath.NewInt(10_200_000_000_000), // ~10M token + Amount: sdkmath.NewInt(20_000_000_000_000), // 20M token (assuming 6 decimals) + }, + }, + }, + }, + limitParamsList: []types.LimitParams{ + { + Denom: testDenom, + Limiters: []types.Limiter{ + // baseline = 20M * 1% = 200k tokens + { + Period: 3_600 * time.Second, + BaselineMinimum: dtypes.NewInt(100_000_000_000), // 100k tokens (assuming 6 decimals) + BaselineTvlPpm: 10_000, // 1% + }, + // baseline = 20M * 10% = 2M tokens + { + Period: 86_400 * time.Second, + BaselineMinimum: dtypes.NewInt(1_000_000_000_000), // 1M tokens (assuming 6 decimals) + BaselineTvlPpm: 100_000, // 10% }, }, }, }, - limiter: types.Limiter{ - PeriodSec: 3_600, - BaselineMinimum: dtypes.NewInt(100_000_000_000), // 100k token - BaselineTvlPpm: 10_000, // 1% + prevBlockTime: time.Unix(1000, 0).In(time.UTC), + blockTime: time.Unix(1001, 0).In(time.UTC), + initDenomCapacityList: []types.DenomCapacity{ + { + Denom: testDenom, + CapacityList: []dtypes.SerializableInt{ + dtypes.NewInt(329_000_000_000), + dtypes.NewInt(3_500_000_000_000), + }, + }, + }, + expectedDenomCapacityList: []types.DenomCapacity{ + { + Denom: testDenom, + CapacityList: []dtypes.SerializableInt{ + dtypes.NewInt(328_944_444_445), // recovered by 1/3600 * 200k ~= 55.5555555556 + dtypes.NewInt(3_499_976_851_852), // recovered by 1/86400 * 2M ~= 23.1481481482 + }, + }, }, - expectedBaseline: big.NewInt(152_001_234_567), // ~152k token (1% of 15.2m) }, - "max(10% of TVL, 1 million), TVL = 20M token": { - denom: testDenom, + "One denom, prev capacity > 2 * baseline": { balances: []banktypes.Balance{ { Address: testAddress1, Coins: sdk.Coins{ { Denom: testDenom, - Amount: sdkmath.NewInt(6_000_000_000_000), // 6M token + Amount: sdkmath.NewInt(20_000_000_000_000), // 20M token (assuming 6 decimals) + }, + }, + }, + }, + limitParamsList: []types.LimitParams{ + { + Denom: testDenom, + Limiters: []types.Limiter{ + // baseline = 20M * 1% = 200k tokens + { + Period: 3_600 * time.Second, + BaselineMinimum: dtypes.NewInt(100_000_000_000), // 100k tokens (assuming 6 decimals) + BaselineTvlPpm: 10_000, // 1% + }, + // baseline = 20M * 10% = 2M tokens + { + Period: 86_400 * time.Second, + BaselineMinimum: dtypes.NewInt(1_000_000_000_000), // 1M tokens (assuming 6 decimals) + BaselineTvlPpm: 100_000, // 10% }, }, }, + }, + prevBlockTime: time.Unix(1000, 0).In(time.UTC), + blockTime: time.Unix(1001, 0).In(time.UTC), + initDenomCapacityList: []types.DenomCapacity{ { - Address: testAddress2, + Denom: testDenom, + CapacityList: []dtypes.SerializableInt{ + dtypes.NewInt(529_000_000_000), // 529k tokens > 2 * baseline (200k) + dtypes.NewInt(4_500_000_000_000), // 4.5M tokens > 2 * baseline (2) + }, + }, + }, + expectedDenomCapacityList: []types.DenomCapacity{ + { + Denom: testDenom, + CapacityList: []dtypes.SerializableInt{ + dtypes.NewInt(528_908_611_112), // recovered by 1/3600 * (529k - 200k) ~= 91.389 + dtypes.NewInt(4_499_971_064_815), // recovered by 1/86400 * (4.5M - 2M) ~= 28.935 + }, + }, + }, + }, + "Two denoms, mix of values from above cases": { + balances: []banktypes.Balance{ + { + Address: testAddress1, Coins: sdk.Coins{ { Denom: testDenom, - Amount: sdkmath.NewInt(4_000_000_000_000), // 4M token + Amount: sdkmath.NewInt(20_000_000_000_000), // 20M token (assuming 6 decimals) + }, + { + Denom: testDenom2, + Amount: sdkmath.NewInt(25_000_000_000_000), // 20M token (assuming 6 decimals) }, }, }, + }, + limitParamsList: []types.LimitParams{ { - Address: testAddress3, - Coins: sdk.Coins{ + Denom: testDenom, + Limiters: []types.Limiter{ + // baseline = 20M * 1% = 200k tokens { - Denom: testDenom, - Amount: sdkmath.NewInt(10_000_000_000_000), // 10M token + Period: 3_600 * time.Second, + BaselineMinimum: dtypes.NewInt(100_000_000_000), // 100k tokens (assuming 6 decimals) + BaselineTvlPpm: 10_000, // 1% + }, + // baseline = 20M * 10% = 2M tokens + { + Period: 86_400 * time.Second, + BaselineMinimum: dtypes.NewInt(1_000_000_000_000), // 1M tokens (assuming 6 decimals) + BaselineTvlPpm: 100_000, // 10% + }, + }, + }, + { + Denom: testDenom2, + Limiters: []types.Limiter{ + // baseline = 25M * 1% = 250k tokens + { + Period: 3_600 * time.Second, + BaselineMinimum: dtypes.NewInt(100_000_000_000), // 100k tokens (assuming 6 decimals) + BaselineTvlPpm: 10_000, // 1% + }, + // baseline = 25M * 10% = 2.5M tokens + { + Period: 86_400 * time.Second, + BaselineMinimum: dtypes.NewInt(1_000_000_000_000), // 1M tokens (assuming 6 decimals) + BaselineTvlPpm: 100_000, // 10% }, }, }, }, - limiter: types.Limiter{ - PeriodSec: 3_600, - BaselineMinimum: dtypes.NewInt(100_000_000_000), // 1m token - BaselineTvlPpm: 100_000, // 10% + prevBlockTime: time.Unix(1000, 0).In(time.UTC), + blockTime: time.Unix(1001, 0).In(time.UTC), + initDenomCapacityList: []types.DenomCapacity{ + { + Denom: testDenom, + CapacityList: []dtypes.SerializableInt{ + dtypes.NewInt(529_000_000_000), // 529k tokens > 2 * baseline (200k) + dtypes.NewInt(4_500_000_000_000), // 4.5M tokens > 2 * baseline (2) + }, + }, + { + Denom: testDenom2, + CapacityList: []dtypes.SerializableInt{ + dtypes.NewInt(99_000_000_000), // 99k tokens, < baseline (250k) + dtypes.NewInt(990_000_000_000), // 0.99M tokens, < baseline (2.5M) + }, + }, + }, + expectedDenomCapacityList: []types.DenomCapacity{ + { + Denom: testDenom, + CapacityList: []dtypes.SerializableInt{ + dtypes.NewInt(528_908_611_112), // recovered by 1/3600 * (529k - 200k) ~= 91.389 + dtypes.NewInt(4_499_971_064_815), // recovered by 1/86400 * (4.5M - 2M) ~= 28.935 + }, + }, + { + Denom: testDenom2, + CapacityList: []dtypes.SerializableInt{ + dtypes.NewInt(99_069_444_444), // recovered by 1/3600 * 250k = 69.4444 tokens + dtypes.NewInt(990_028_935_185), // recovered by 1/86400 * 2.5M = 28.9351 tokens + }, + }, }, - expectedBaseline: big.NewInt(2_000_000_000_000), // 2m token (10% of 20m) }, - "max(10% of TVL, 1 million), TVL = 8M token": { - denom: testDenom, + "(Error) one denom, current block time = prev block time, no changes applied": { balances: []banktypes.Balance{ { Address: testAddress1, Coins: sdk.Coins{ { Denom: testDenom, - Amount: sdkmath.NewInt(2_000_000_000_000), // 2M token + Amount: sdkmath.NewInt(25_000_000_000_000), // 25M token (assuming 6 decimals) }, }, }, + }, + limitParamsList: []types.LimitParams{ { - Address: testAddress2, - Coins: sdk.Coins{ + Denom: testDenom, + Limiters: []types.Limiter{ + // baseline = 25M * 1% = 250k tokens { - Denom: testDenom, - Amount: sdkmath.NewInt(4_000_000_000_000), // 4M token + Period: 3_600 * time.Second, + BaselineMinimum: dtypes.NewInt(100_000_000_000), // 100k tokens (assuming 6 decimals) + BaselineTvlPpm: 10_000, // 1% }, + // baseline = 25M * 10% = 2.5M tokens + { + Period: 86_400 * time.Second, + BaselineMinimum: dtypes.NewInt(1_000_000_000_000), // 1M tokens (assuming 6 decimals) + BaselineTvlPpm: 100_000, // 10% + }, + }, + }, + }, + prevBlockTime: time.Unix(1000, 0).In(time.UTC), + blockTime: time.Unix(1000, 0).In(time.UTC), // same as prev block time (should not happen in practice) + initDenomCapacityList: []types.DenomCapacity{ + { + Denom: testDenom, + CapacityList: []dtypes.SerializableInt{ + dtypes.NewInt(99_000_000_000), // 99k tokens, < baseline (250k) + dtypes.NewInt(990_000_000_000), // 0.99M tokens, < baseline (2.5M) }, }, + }, + expectedDenomCapacityList: []types.DenomCapacity{ { - Address: testAddress3, + Denom: testDenom, + CapacityList: []dtypes.SerializableInt{ + dtypes.NewInt(99_000_000_000), // 99k tokens (unchanged) + dtypes.NewInt(990_000_000_000), // 0.99M tokens (unchanged) + }, + }, + }, + }, + "(Error) one denom, len(limiters) != len(capacityList)": { + balances: []banktypes.Balance{ + { + Address: testAddress1, Coins: sdk.Coins{ { Denom: testDenom, - Amount: sdkmath.NewInt(2_000_000_000_000), // 2M token + Amount: sdkmath.NewInt(25_000_000_000_000), // 25M token (assuming 6 decimals) }, }, }, }, - limiter: types.Limiter{ - PeriodSec: 3_600, - BaselineMinimum: dtypes.NewInt(1_000_000_000_000), // 1m token - BaselineTvlPpm: 100_000, // 10% + limitParamsList: []types.LimitParams{ + { + Denom: testDenom, + Limiters: []types.Limiter{ + // baseline = 25M * 1% = 250k tokens + { + Period: 3_600 * time.Second, + BaselineMinimum: dtypes.NewInt(100_000_000_000), // 100k tokens (assuming 6 decimals) + BaselineTvlPpm: 10_000, // 1% + }, + }, + }, + }, + prevBlockTime: time.Unix(1000, 0).In(time.UTC), + blockTime: time.Unix(1001, 0).In(time.UTC), // same as prev block time (should not happen in practice) + initDenomCapacityList: []types.DenomCapacity{ + { + Denom: testDenom, + CapacityList: []dtypes.SerializableInt{ + dtypes.NewInt(99_000_000_000), // 99k tokens, < baseline (250k) + dtypes.NewInt(990_000_000_000), // 0.99M tokens, < baseline (2.5M) + }, + }, + }, + expectedDenomCapacityList: []types.DenomCapacity{ + { + Denom: testDenom, + CapacityList: []dtypes.SerializableInt{ + dtypes.NewInt(99_000_000_000), // 99k tokens (unchanged) + dtypes.NewInt(990_000_000_000), // 0.99M tokens (unchanged) + }, + }, }, - expectedBaseline: big.NewInt(1_000_000_000_000), // 1m token (baseline minimum) }, } @@ -474,11 +811,38 @@ func TestGetBaseline(t *testing.T) { }).Build() ctx := tApp.InitChain() + + // Set previous block time + tApp.App.BlockTimeKeeper.SetPreviousBlockInfo(ctx, &blocktimetypes.BlockInfo{ + Timestamp: tc.prevBlockTime, + }) + k := tApp.App.RatelimitKeeper - gotBaseline := k.GetBaseline(ctx, tc.denom, tc.limiter) + // Initialize limit params + for _, limitParams := range tc.limitParamsList { + k.SetLimitParams(ctx, limitParams) + } - require.Equal(t, tc.expectedBaseline, gotBaseline, "retrieved baseline does not match the expected value") + // Initialize denom capacity + for _, denomCapacity := range tc.initDenomCapacityList { + k.SetDenomCapacity(ctx, denomCapacity) + } + + // Run the function being tested + k.UpdateAllCapacitiesEndBlocker(ctx.WithBlockTime(tc.blockTime)) + + // Check results + for _, expectedDenomCapacity := range tc.expectedDenomCapacityList { + gotDenomCapacity := k.GetDenomCapacity(ctx, expectedDenomCapacity.Denom) + require.Equal(t, + expectedDenomCapacity, + gotDenomCapacity, + "expected denom capacity: %+v, got: %+v", + expectedDenomCapacity, + gotDenomCapacity, + ) + } }) } } diff --git a/protocol/x/ratelimit/module.go b/protocol/x/ratelimit/module.go index 9b5cfbdaa3..526067e4e7 100644 --- a/protocol/x/ratelimit/module.go +++ b/protocol/x/ratelimit/module.go @@ -2,10 +2,11 @@ package ratelimit import ( "context" - "cosmossdk.io/core/appmodule" "encoding/json" "fmt" + "cosmossdk.io/core/appmodule" + "github.com/grpc-ecosystem/grpc-gateway/runtime" "github.com/spf13/cobra" @@ -151,7 +152,8 @@ func (AppModule) ConsensusVersion() uint64 { return 1 } // EndBlock contains the logic that is automatically triggered at the end of each block func (am AppModule) EndBlock(ctx context.Context) error { - // TODO(CORE-824): Implement `EndBlocker` logic. + sdkCtx := sdk.UnwrapSDKContext(ctx) + am.keeper.UpdateAllCapacitiesEndBlocker(sdkCtx) return nil } diff --git a/protocol/x/ratelimit/types/errors.go b/protocol/x/ratelimit/types/errors.go index 8c02881fc4..acb4bbae1c 100644 --- a/protocol/x/ratelimit/types/errors.go +++ b/protocol/x/ratelimit/types/errors.go @@ -16,4 +16,9 @@ var ( 1002, "withdrawal amount would exceed rate-limit capacity", ) + ErrMismatchedCapacityLimitersLength = errorsmod.Register( + ModuleName, + 1003, + "capacity list length does not match number of limiters", + ) ) diff --git a/protocol/x/ratelimit/types/expected_keepers.go b/protocol/x/ratelimit/types/expected_keepers.go index 1b230d7bde..0c09cad3e3 100644 --- a/protocol/x/ratelimit/types/expected_keepers.go +++ b/protocol/x/ratelimit/types/expected_keepers.go @@ -2,6 +2,8 @@ package types import ( "context" + "time" + sdk "github.com/cosmos/cosmos-sdk/types" ) @@ -9,3 +11,7 @@ import ( type BankKeeper interface { GetSupply(ctx context.Context, denom string) sdk.Coin } + +type BlockTimeKeeper interface { + GetTimeSinceLastBlock(ctx sdk.Context) time.Duration +} diff --git a/protocol/x/ratelimit/types/limit_params.pb.go b/protocol/x/ratelimit/types/limit_params.pb.go index 4dfe8900d6..af90525253 100644 --- a/protocol/x/ratelimit/types/limit_params.pb.go +++ b/protocol/x/ratelimit/types/limit_params.pb.go @@ -7,16 +7,20 @@ import ( fmt "fmt" _ "github.com/cosmos/gogoproto/gogoproto" proto "github.com/cosmos/gogoproto/proto" + _ "github.com/cosmos/gogoproto/types" + github_com_cosmos_gogoproto_types "github.com/cosmos/gogoproto/types" github_com_dydxprotocol_v4_chain_protocol_dtypes "github.com/dydxprotocol/v4-chain/protocol/dtypes" io "io" math "math" math_bits "math/bits" + time "time" ) // Reference imports to suppress errors if they are not otherwise used. var _ = proto.Marshal var _ = fmt.Errorf var _ = math.Inf +var _ = time.Kitchen // This is a compile-time assertion to ensure that this generated file // is compatible with the proto package it is being compiled against. @@ -83,9 +87,9 @@ func (m *LimitParams) GetLimiters() []Limiter { // Limiter defines one rate-limiter on a specfic denom. type Limiter struct { - // period_sec is the rolling time period for which the limit applies + // period is the rolling time period for which the limit applies // e.g. 3600 (an hour) - PeriodSec uint32 `protobuf:"varint,2,opt,name=period_sec,json=periodSec,proto3" json:"period_sec,omitempty"` + Period time.Duration `protobuf:"bytes,1,opt,name=period,proto3,stdduration" json:"period"` // baseline_minimum is the minimum maximum withdrawal coin amount within the // time period. // e.g. 100_000_000_000 uusdc for 100k USDC; 5e22 adv4tnt for 50k DV4TNT @@ -129,9 +133,9 @@ func (m *Limiter) XXX_DiscardUnknown() { var xxx_messageInfo_Limiter proto.InternalMessageInfo -func (m *Limiter) GetPeriodSec() uint32 { +func (m *Limiter) GetPeriod() time.Duration { if m != nil { - return m.PeriodSec + return m.Period } return 0 } @@ -153,28 +157,30 @@ func init() { } var fileDescriptor_b795558e1de1468a = []byte{ - // 329 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x51, 0x4f, 0x4b, 0xc3, 0x30, - 0x1c, 0x6d, 0xb6, 0xf9, 0x67, 0x99, 0xff, 0x28, 0x43, 0x8a, 0x60, 0x57, 0x76, 0xaa, 0x07, 0x5b, - 0x50, 0x6f, 0x5e, 0x74, 0x27, 0x85, 0x09, 0xa3, 0xd3, 0x8b, 0x97, 0x92, 0xb6, 0x71, 0x0b, 0x24, - 0x4d, 0x48, 0xb2, 0xb1, 0xf9, 0x29, 0xfc, 0x56, 0xee, 0xb8, 0xa3, 0x78, 0x18, 0xb2, 0x7d, 0x11, - 0x59, 0xea, 0xc6, 0x04, 0x0f, 0x5e, 0x42, 0xf2, 0xde, 0xcb, 0x7b, 0x8f, 0xdf, 0x0f, 0x9e, 0x65, - 0xe3, 0x6c, 0x24, 0x24, 0xd7, 0x3c, 0xe5, 0x34, 0x94, 0x48, 0x63, 0x4a, 0x18, 0xd1, 0xa1, 0x39, - 0x63, 0x81, 0x24, 0x62, 0x2a, 0x30, 0xbc, 0x7d, 0xbc, 0x29, 0x0d, 0xd6, 0xd2, 0x93, 0x7a, 0x8f, - 0xf7, 0xb8, 0xc1, 0xc3, 0xe5, 0xad, 0x50, 0x37, 0x5f, 0x60, 0xad, 0xbd, 0xa4, 0x3b, 0xc6, 0xc2, - 0xae, 0xc3, 0xad, 0x0c, 0xe7, 0x9c, 0x39, 0xc0, 0x03, 0x7e, 0x35, 0x2a, 0x1e, 0xf6, 0x2d, 0xdc, - 0x35, 0x1e, 0x58, 0x2a, 0xa7, 0xe4, 0x95, 0xfd, 0xda, 0x45, 0x23, 0xf8, 0x3b, 0x25, 0x68, 0x17, - 0xba, 0x56, 0x65, 0x32, 0x6b, 0x58, 0xd1, 0xfa, 0x5b, 0xf3, 0x1d, 0xc0, 0x9d, 0x1f, 0xce, 0x3e, - 0x85, 0x50, 0x60, 0x49, 0x78, 0x16, 0x2b, 0x9c, 0x3a, 0x25, 0x0f, 0xf8, 0xfb, 0x51, 0xb5, 0x40, - 0xba, 0x38, 0xb5, 0x15, 0x3c, 0x4a, 0x90, 0xc2, 0x94, 0xe4, 0x38, 0x66, 0x24, 0x27, 0x6c, 0xc0, - 0x9c, 0xb2, 0x07, 0xfc, 0xbd, 0xd6, 0xdd, 0xd2, 0xf4, 0x73, 0xd6, 0xb8, 0xe9, 0x11, 0xdd, 0x1f, - 0x24, 0x41, 0xca, 0x59, 0xf8, 0x6b, 0x30, 0xc3, 0xab, 0xf3, 0xb4, 0x8f, 0x48, 0x1e, 0xae, 0x91, - 0x4c, 0x8f, 0x05, 0x56, 0x41, 0x17, 0x4b, 0x82, 0x28, 0x79, 0x45, 0x09, 0xc5, 0xf7, 0xb9, 0x8e, - 0x0e, 0x57, 0x09, 0x0f, 0x45, 0x80, 0xed, 0x6f, 0x84, 0xea, 0x21, 0x8d, 0x85, 0x60, 0x4e, 0xc5, - 0x34, 0x3b, 0x58, 0xe1, 0x8f, 0x43, 0xda, 0x11, 0xac, 0xf5, 0x34, 0x99, 0xbb, 0x60, 0x3a, 0x77, - 0xc1, 0xd7, 0xdc, 0x05, 0x6f, 0x0b, 0xd7, 0x9a, 0x2e, 0x5c, 0xeb, 0x63, 0xe1, 0x5a, 0xcf, 0xd7, - 0xff, 0xaf, 0x35, 0xda, 0xd8, 0xa1, 0x69, 0x98, 0x6c, 0x1b, 0xee, 0xf2, 0x3b, 0x00, 0x00, 0xff, - 0xff, 0xd1, 0xaf, 0x8a, 0x2d, 0xea, 0x01, 0x00, 0x00, + // 365 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x92, 0x3f, 0x4f, 0xfa, 0x40, + 0x1c, 0xc6, 0x7b, 0x3f, 0xf8, 0x21, 0x16, 0xff, 0xa5, 0x21, 0xa6, 0x32, 0xb4, 0x0d, 0x53, 0x1d, + 0xbc, 0x26, 0xe8, 0xc6, 0xa2, 0x8d, 0x83, 0x26, 0x98, 0x90, 0xaa, 0x8b, 0x0b, 0xb9, 0xd2, 0xa3, + 0x5c, 0x72, 0xd7, 0x6b, 0xda, 0x2b, 0x01, 0x5f, 0x85, 0xa3, 0x2f, 0x89, 0x91, 0xd1, 0x38, 0xa0, + 0x81, 0xf8, 0x3e, 0x4c, 0xaf, 0x40, 0x30, 0x71, 0x70, 0x69, 0xfa, 0xfd, 0x3e, 0xcf, 0x73, 0xcf, + 0x27, 0x97, 0x53, 0x4f, 0x83, 0x49, 0x30, 0x8e, 0x13, 0x2e, 0x78, 0x9f, 0x53, 0x27, 0x41, 0x02, + 0x53, 0xc2, 0x88, 0x70, 0xe4, 0xb7, 0x17, 0xa3, 0x04, 0xb1, 0x14, 0x4a, 0x5d, 0x3b, 0xde, 0xb6, + 0xc2, 0x8d, 0xb5, 0x51, 0x0f, 0x79, 0xc8, 0xe5, 0xde, 0xc9, 0xff, 0x0a, 0x77, 0xc3, 0x08, 0x39, + 0x0f, 0x29, 0x76, 0xe4, 0xe4, 0x67, 0x03, 0x27, 0xc8, 0x12, 0x24, 0x08, 0x8f, 0x0a, 0xbd, 0x39, + 0x50, 0x6b, 0x9d, 0x3c, 0xde, 0x95, 0x15, 0x5a, 0x5d, 0xfd, 0x1f, 0xe0, 0x88, 0x33, 0x1d, 0x58, + 0xc0, 0xde, 0xf5, 0x8a, 0x41, 0xbb, 0x52, 0xab, 0xb2, 0x03, 0x27, 0xa9, 0xfe, 0xcf, 0x2a, 0xd9, + 0xb5, 0x96, 0x09, 0x7f, 0xa7, 0x80, 0x9d, 0xc2, 0xe7, 0x96, 0xa7, 0x73, 0x53, 0xf1, 0x36, 0xb1, + 0xe6, 0x17, 0x50, 0x77, 0x56, 0x9a, 0xd6, 0x56, 0x2b, 0x31, 0x4e, 0x08, 0x0f, 0x64, 0x4b, 0xad, + 0x75, 0x02, 0x0b, 0x48, 0xb8, 0x86, 0x84, 0xd7, 0x2b, 0x48, 0xb7, 0x9a, 0x1f, 0xf3, 0xfa, 0x61, + 0x02, 0x6f, 0x15, 0xd1, 0x52, 0xf5, 0xc8, 0x47, 0x29, 0xa6, 0x24, 0xc2, 0x3d, 0x46, 0x22, 0xc2, + 0x32, 0xa6, 0x97, 0x2c, 0x60, 0xef, 0xb9, 0x37, 0xb9, 0xf7, 0x7d, 0x6e, 0x5e, 0x86, 0x44, 0x0c, + 0x33, 0x1f, 0xf6, 0x39, 0x73, 0x7e, 0x5c, 0xeb, 0xe8, 0xe2, 0xac, 0x3f, 0x44, 0x24, 0x72, 0x36, + 0x9b, 0x40, 0x4c, 0x62, 0x9c, 0xc2, 0x7b, 0x9c, 0x10, 0x44, 0xc9, 0x33, 0xf2, 0x29, 0xbe, 0x8d, + 0x84, 0x77, 0xb8, 0x6e, 0xb8, 0x2b, 0x0a, 0x34, 0x7b, 0xab, 0x54, 0x8c, 0x68, 0x2f, 0x8e, 0x99, + 0x5e, 0xb6, 0x80, 0xbd, 0xef, 0x1d, 0xac, 0xf7, 0x0f, 0x23, 0xda, 0x8d, 0x99, 0xfb, 0x38, 0x5d, + 0x18, 0x60, 0xb6, 0x30, 0xc0, 0xe7, 0xc2, 0x00, 0x2f, 0x4b, 0x43, 0x99, 0x2d, 0x0d, 0xe5, 0x6d, + 0x69, 0x28, 0x4f, 0xed, 0xbf, 0x63, 0x8d, 0xb7, 0x5e, 0x80, 0x24, 0xf4, 0x2b, 0x52, 0x3b, 0xff, + 0x0e, 0x00, 0x00, 0xff, 0xff, 0x0f, 0x4d, 0x51, 0x66, 0x28, 0x02, 0x00, 0x00, } func (m *LimitParams) Marshal() (dAtA []byte, err error) { @@ -256,11 +262,14 @@ func (m *Limiter) MarshalToSizedBuffer(dAtA []byte) (int, error) { } i-- dAtA[i] = 0x1a - if m.PeriodSec != 0 { - i = encodeVarintLimitParams(dAtA, i, uint64(m.PeriodSec)) - i-- - dAtA[i] = 0x10 + n1, err1 := github_com_cosmos_gogoproto_types.StdDurationMarshalTo(m.Period, dAtA[i-github_com_cosmos_gogoproto_types.SizeOfStdDuration(m.Period):]) + if err1 != nil { + return 0, err1 } + i -= n1 + i = encodeVarintLimitParams(dAtA, i, uint64(n1)) + i-- + dAtA[i] = 0xa return len(dAtA) - i, nil } @@ -300,9 +309,8 @@ func (m *Limiter) Size() (n int) { } var l int _ = l - if m.PeriodSec != 0 { - n += 1 + sovLimitParams(uint64(m.PeriodSec)) - } + l = github_com_cosmos_gogoproto_types.SizeOfStdDuration(m.Period) + n += 1 + l + sovLimitParams(uint64(l)) l = m.BaselineMinimum.Size() n += 1 + l + sovLimitParams(uint64(l)) if m.BaselineTvlPpm != 0 { @@ -462,11 +470,11 @@ func (m *Limiter) Unmarshal(dAtA []byte) error { return fmt.Errorf("proto: Limiter: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { - case 2: - if wireType != 0 { - return fmt.Errorf("proto: wrong wireType = %d for field PeriodSec", wireType) + case 1: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Period", wireType) } - m.PeriodSec = 0 + var msglen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowLimitParams @@ -476,11 +484,25 @@ func (m *Limiter) Unmarshal(dAtA []byte) error { } b := dAtA[iNdEx] iNdEx++ - m.PeriodSec |= uint32(b&0x7F) << shift + msglen |= int(b&0x7F) << shift if b < 0x80 { break } } + if msglen < 0 { + return ErrInvalidLengthLimitParams + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthLimitParams + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if err := github_com_cosmos_gogoproto_types.StdDurationUnmarshal(&m.Period, dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex case 3: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field BaselineMinimum", wireType) diff --git a/protocol/x/ratelimit/types/query.pb.go b/protocol/x/ratelimit/types/query.pb.go index 4a8a939b04..70984cc21c 100644 --- a/protocol/x/ratelimit/types/query.pb.go +++ b/protocol/x/ratelimit/types/query.pb.go @@ -9,6 +9,8 @@ import ( _ "github.com/cosmos/gogoproto/gogoproto" grpc1 "github.com/cosmos/gogoproto/grpc" proto "github.com/cosmos/gogoproto/proto" + _ "github.com/cosmos/gogoproto/types" + github_com_cosmos_gogoproto_types "github.com/cosmos/gogoproto/types" github_com_dydxprotocol_v4_chain_protocol_dtypes "github.com/dydxprotocol/v4-chain/protocol/dtypes" _ "google.golang.org/genproto/googleapis/api/annotations" grpc "google.golang.org/grpc" @@ -17,12 +19,14 @@ import ( io "io" math "math" math_bits "math/bits" + time "time" ) // Reference imports to suppress errors if they are not otherwise used. var _ = proto.Marshal var _ = fmt.Errorf var _ = math.Inf +var _ = time.Kitchen // This is a compile-time assertion to ensure that this generated file // is compatible with the proto package it is being compiled against. @@ -160,8 +164,8 @@ func (m *QueryCapacityByDenomRequest) GetDenom() string { // CapacityResult is a specific rate limit for a denom. type CapacityResult struct { - PeriodSec uint32 `protobuf:"varint,1,opt,name=period_sec,json=periodSec,proto3" json:"period_sec,omitempty"` - Capacity github_com_dydxprotocol_v4_chain_protocol_dtypes.SerializableInt `protobuf:"bytes,2,opt,name=capacity,proto3,customtype=github.com/dydxprotocol/v4-chain/protocol/dtypes.SerializableInt" json:"capacity"` + Period time.Duration `protobuf:"bytes,1,opt,name=period,proto3,stdduration" json:"period"` + Capacity github_com_dydxprotocol_v4_chain_protocol_dtypes.SerializableInt `protobuf:"bytes,2,opt,name=capacity,proto3,customtype=github.com/dydxprotocol/v4-chain/protocol/dtypes.SerializableInt" json:"capacity"` } func (m *CapacityResult) Reset() { *m = CapacityResult{} } @@ -197,9 +201,9 @@ func (m *CapacityResult) XXX_DiscardUnknown() { var xxx_messageInfo_CapacityResult proto.InternalMessageInfo -func (m *CapacityResult) GetPeriodSec() uint32 { +func (m *CapacityResult) GetPeriod() time.Duration { if m != nil { - return m.PeriodSec + return m.Period } return 0 } @@ -263,37 +267,39 @@ func init() { } var fileDescriptor_f2e2dd1cb27aa65a = []byte{ - // 475 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x52, 0x4d, 0x6b, 0xd4, 0x40, - 0x18, 0xde, 0x59, 0xa9, 0xda, 0xf1, 0x63, 0x71, 0x28, 0x75, 0x59, 0x6b, 0xba, 0x44, 0x90, 0x15, - 0x74, 0x06, 0xba, 0x7b, 0xf3, 0x52, 0x56, 0x0f, 0x0a, 0x05, 0x35, 0xc5, 0x8b, 0x97, 0x38, 0x49, - 0x86, 0x74, 0x64, 0x92, 0x49, 0x33, 0x13, 0x69, 0x3c, 0xfa, 0x0b, 0x04, 0xf1, 0x3f, 0x78, 0xf6, - 0x57, 0xf4, 0x58, 0xf0, 0x22, 0x1e, 0x8a, 0xec, 0x0a, 0xfe, 0x0d, 0xc9, 0x24, 0xbb, 0x66, 0xd7, - 0x4d, 0x69, 0x2f, 0x21, 0xf3, 0x3e, 0xcf, 0xfb, 0xf5, 0xbc, 0x0f, 0xb4, 0x83, 0x3c, 0x38, 0x4a, - 0x52, 0xa9, 0xa5, 0x2f, 0x05, 0x49, 0xa9, 0x66, 0x82, 0x47, 0x5c, 0x93, 0xc3, 0x8c, 0xa5, 0x39, - 0x36, 0x00, 0xda, 0xac, 0x73, 0xf0, 0x9c, 0xd3, 0xdb, 0x08, 0x65, 0x28, 0x4d, 0x9c, 0x14, 0x7f, - 0x25, 0xbb, 0xb7, 0x15, 0x4a, 0x19, 0x0a, 0x46, 0x68, 0xc2, 0x09, 0x8d, 0x63, 0xa9, 0xa9, 0xe6, - 0x32, 0x56, 0x15, 0xfa, 0xa0, 0xa1, 0x9f, 0xf9, 0xba, 0x09, 0x4d, 0x69, 0x54, 0x51, 0xed, 0x2e, - 0xdc, 0xdc, 0xe3, 0x4a, 0xef, 0x15, 0xc8, 0x4b, 0x03, 0x38, 0xec, 0x30, 0x63, 0x4a, 0xdb, 0xef, - 0xe0, 0xed, 0xff, 0x10, 0x95, 0xc8, 0x58, 0x31, 0xf4, 0x02, 0xde, 0xaa, 0x97, 0x72, 0x05, 0x57, - 0xba, 0x0b, 0xfa, 0x97, 0x06, 0xd7, 0x76, 0xee, 0xe1, 0xd5, 0x7b, 0xe0, 0x7a, 0x9d, 0x8e, 0xf8, - 0xf7, 0x28, 0x7a, 0xd8, 0x43, 0x78, 0xe7, 0x55, 0xa1, 0xc5, 0x13, 0x9a, 0x50, 0x9f, 0xeb, 0x7c, - 0x9c, 0x3f, 0x65, 0xb1, 0x8c, 0xaa, 0x51, 0xd0, 0x06, 0x5c, 0x0b, 0x8a, 0x77, 0x17, 0xf4, 0xc1, - 0x60, 0xdd, 0x29, 0x1f, 0xf6, 0x17, 0x00, 0x6f, 0xce, 0x12, 0x1c, 0xa6, 0x32, 0xa1, 0xd1, 0x5d, - 0x08, 0x13, 0x96, 0x72, 0x19, 0xb8, 0x8a, 0xf9, 0x86, 0x7d, 0xc3, 0x59, 0x2f, 0x23, 0xfb, 0xcc, - 0x47, 0x01, 0xbc, 0xea, 0x57, 0x09, 0xdd, 0x76, 0x1f, 0x0c, 0xae, 0x8f, 0x9f, 0x1d, 0x9f, 0x6e, - 0xb7, 0x7e, 0x9e, 0x6e, 0xef, 0x86, 0x5c, 0x1f, 0x64, 0x1e, 0xf6, 0x65, 0x44, 0x16, 0xc4, 0x7b, - 0x3f, 0x7a, 0xe4, 0x1f, 0x50, 0x1e, 0x93, 0x79, 0x24, 0xd0, 0x79, 0xc2, 0x14, 0xde, 0x67, 0x29, - 0xa7, 0x82, 0x7f, 0xa0, 0x9e, 0x60, 0xcf, 0x63, 0xed, 0xcc, 0x2b, 0xdb, 0x6f, 0xe1, 0xd6, 0xea, - 0x65, 0x2a, 0xf5, 0x76, 0xe1, 0x95, 0xd4, 0x8c, 0xab, 0x2a, 0xcd, 0xee, 0x37, 0x69, 0xb6, 0xb8, - 0x9d, 0x33, 0x4b, 0xdb, 0xf9, 0xd3, 0x86, 0x6b, 0xa6, 0x05, 0xfa, 0x0a, 0x60, 0x67, 0xe9, 0x4a, - 0x08, 0x37, 0x9f, 0x60, 0xd5, 0xa1, 0x7b, 0xe4, 0xdc, 0xfc, 0x72, 0x01, 0x7b, 0xf4, 0xf1, 0xfb, - 0xef, 0xcf, 0x6d, 0x8c, 0x1e, 0x2e, 0x4b, 0xb5, 0x60, 0x35, 0xa5, 0xdd, 0xba, 0x49, 0xd0, 0x37, - 0x00, 0x3b, 0x4b, 0x92, 0xa0, 0x61, 0x53, 0xeb, 0x33, 0xdc, 0xd0, 0x1b, 0x5d, 0x2c, 0xe9, 0x02, - 0x43, 0xcf, 0x4e, 0xe8, 0x7a, 0xb9, 0x6b, 0x3c, 0x36, 0x7e, 0x7d, 0x3c, 0xb1, 0xc0, 0xc9, 0xc4, - 0x02, 0xbf, 0x26, 0x16, 0xf8, 0x34, 0xb5, 0x5a, 0x27, 0x53, 0xab, 0xf5, 0x63, 0x6a, 0xb5, 0xde, - 0x3c, 0x3e, 0xbf, 0x63, 0x8e, 0x6a, 0x2d, 0x8c, 0x79, 0xbc, 0xcb, 0x06, 0x1b, 0xfe, 0x0d, 0x00, - 0x00, 0xff, 0xff, 0xe9, 0xc9, 0xf9, 0x71, 0x19, 0x04, 0x00, 0x00, + // 502 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x52, 0x41, 0x6f, 0xd3, 0x30, + 0x18, 0xad, 0x8b, 0x36, 0x86, 0x87, 0xa8, 0xb0, 0xa6, 0x51, 0xca, 0x94, 0x56, 0x41, 0x42, 0x45, + 0x02, 0x5b, 0x6a, 0x7b, 0xdb, 0x65, 0x2a, 0x3b, 0x80, 0x34, 0x09, 0x08, 0xe2, 0xc2, 0x25, 0x38, + 0x89, 0xc9, 0x8c, 0xd2, 0x38, 0x8b, 0x1d, 0xb4, 0x70, 0xe4, 0x17, 0x20, 0x71, 0xe1, 0x27, 0x70, + 0x86, 0x3f, 0xb1, 0xe3, 0x24, 0x2e, 0x88, 0xc3, 0x40, 0x2d, 0x12, 0x7f, 0x03, 0xc5, 0x49, 0x4a, + 0x5a, 0x1a, 0xb4, 0x5d, 0xa2, 0x7c, 0xfe, 0xde, 0xf7, 0x3d, 0xbf, 0xe7, 0x07, 0x4d, 0x2f, 0xf5, + 0x8e, 0xa3, 0x58, 0x28, 0xe1, 0x8a, 0x80, 0xc4, 0x54, 0xb1, 0x80, 0x4f, 0xb8, 0x22, 0x47, 0x09, + 0x8b, 0x53, 0xac, 0x1b, 0x68, 0xbb, 0x8a, 0xc1, 0x73, 0x4c, 0x67, 0xcb, 0x17, 0xbe, 0xd0, 0xe7, + 0x24, 0xfb, 0xcb, 0xd1, 0x9d, 0x1d, 0x5f, 0x08, 0x3f, 0x60, 0x84, 0x46, 0x9c, 0xd0, 0x30, 0x14, + 0x8a, 0x2a, 0x2e, 0x42, 0x59, 0x74, 0xef, 0xd6, 0xf0, 0xe9, 0xaf, 0x1d, 0xd1, 0x98, 0x4e, 0x4a, + 0xa8, 0x51, 0x2c, 0xd2, 0x95, 0x93, 0xbc, 0x22, 0x5e, 0x12, 0xeb, 0x5d, 0x79, 0xdf, 0x6c, 0xc3, + 0xed, 0x03, 0x2e, 0xd5, 0x41, 0x36, 0xf9, 0x44, 0x0f, 0x5a, 0xec, 0x28, 0x61, 0x52, 0x99, 0xaf, + 0xe1, 0x8d, 0x7f, 0x3a, 0x32, 0x12, 0xa1, 0x64, 0xe8, 0x31, 0xbc, 0x5e, 0xa5, 0xb2, 0x03, 0x2e, + 0x55, 0x1b, 0xf4, 0x2e, 0xf5, 0x37, 0x07, 0xb7, 0xf1, 0x6a, 0x9d, 0xb8, 0xba, 0xa7, 0x15, 0xfc, + 0x2d, 0x32, 0x0e, 0x73, 0x08, 0x6f, 0x3d, 0xcd, 0xbc, 0x7a, 0x40, 0x23, 0xea, 0x72, 0x95, 0x8e, + 0xd3, 0x7d, 0x16, 0x8a, 0x49, 0x71, 0x15, 0xb4, 0x05, 0xd7, 0xbc, 0xac, 0x6e, 0x83, 0x1e, 0xe8, + 0x5f, 0xb1, 0xf2, 0xc2, 0xfc, 0x02, 0xe0, 0xb5, 0x72, 0xc0, 0x62, 0x32, 0x09, 0x14, 0xda, 0x85, + 0xeb, 0x11, 0x8b, 0xb9, 0xf0, 0x34, 0x72, 0x73, 0x70, 0x13, 0xe7, 0xf2, 0x71, 0x29, 0x1f, 0xef, + 0x17, 0xf2, 0xc7, 0x1b, 0x27, 0x67, 0xdd, 0xc6, 0xc7, 0x1f, 0x5d, 0x60, 0x15, 0x23, 0xc8, 0x83, + 0x1b, 0x6e, 0xb1, 0xae, 0xdd, 0xec, 0x81, 0xfe, 0xd5, 0xf1, 0xc3, 0x0c, 0xf3, 0xfd, 0xac, 0xbb, + 0xe7, 0x73, 0x75, 0x98, 0x38, 0xd8, 0x15, 0x13, 0xb2, 0x60, 0xfd, 0x9b, 0xd1, 0x7d, 0xf7, 0x90, + 0xf2, 0x90, 0xcc, 0x4f, 0x3c, 0x95, 0x46, 0x4c, 0xe2, 0x67, 0x2c, 0xe6, 0x34, 0xe0, 0x6f, 0xa9, + 0x13, 0xb0, 0x47, 0xa1, 0xb2, 0xe6, 0x9b, 0xcd, 0x97, 0x70, 0x67, 0xb5, 0xd4, 0xc2, 0xdb, 0x3d, + 0x78, 0x39, 0xd6, 0x62, 0x64, 0xe1, 0xe8, 0x9d, 0x3a, 0x47, 0x17, 0xb5, 0x5b, 0xe5, 0xd8, 0xe0, + 0x77, 0x13, 0xae, 0x69, 0x0a, 0xf4, 0x09, 0xc0, 0xd6, 0xd2, 0x1b, 0x22, 0x5c, 0xff, 0x40, 0xab, + 0x62, 0xd0, 0x21, 0xe7, 0xc6, 0xe7, 0x02, 0xcc, 0xd1, 0xbb, 0xaf, 0xbf, 0x3e, 0x34, 0x31, 0xba, + 0xb7, 0x6c, 0xd5, 0x42, 0x50, 0xa5, 0xb2, 0xab, 0x11, 0x42, 0x9f, 0x01, 0x6c, 0x2d, 0x59, 0x82, + 0x86, 0x75, 0xd4, 0xff, 0xc9, 0x4a, 0x67, 0x74, 0xb1, 0xa1, 0x0b, 0x5c, 0xba, 0x7c, 0x42, 0xdb, + 0x49, 0x6d, 0x9d, 0xc0, 0xf1, 0xf3, 0x93, 0xa9, 0x01, 0x4e, 0xa7, 0x06, 0xf8, 0x39, 0x35, 0xc0, + 0xfb, 0x99, 0xd1, 0x38, 0x9d, 0x19, 0x8d, 0x6f, 0x33, 0xa3, 0xf1, 0x62, 0xf7, 0xfc, 0x89, 0x39, + 0xae, 0x50, 0xe8, 0xf0, 0x38, 0xeb, 0xba, 0x37, 0xfc, 0x13, 0x00, 0x00, 0xff, 0xff, 0x2f, 0xec, + 0xaa, 0x74, 0x57, 0x04, 0x00, 0x00, } // Reference imports to suppress errors if they are not otherwise used. @@ -536,11 +542,14 @@ func (m *CapacityResult) MarshalToSizedBuffer(dAtA []byte) (int, error) { } i-- dAtA[i] = 0x12 - if m.PeriodSec != 0 { - i = encodeVarintQuery(dAtA, i, uint64(m.PeriodSec)) - i-- - dAtA[i] = 0x8 + n1, err1 := github_com_cosmos_gogoproto_types.StdDurationMarshalTo(m.Period, dAtA[i-github_com_cosmos_gogoproto_types.SizeOfStdDuration(m.Period):]) + if err1 != nil { + return 0, err1 } + i -= n1 + i = encodeVarintQuery(dAtA, i, uint64(n1)) + i-- + dAtA[i] = 0xa return len(dAtA) - i, nil } @@ -635,9 +644,8 @@ func (m *CapacityResult) Size() (n int) { } var l int _ = l - if m.PeriodSec != 0 { - n += 1 + sovQuery(uint64(m.PeriodSec)) - } + l = github_com_cosmos_gogoproto_types.SizeOfStdDuration(m.Period) + n += 1 + l + sovQuery(uint64(l)) l = m.Capacity.Size() n += 1 + l + sovQuery(uint64(l)) return n @@ -910,10 +918,10 @@ func (m *CapacityResult) Unmarshal(dAtA []byte) error { } switch fieldNum { case 1: - if wireType != 0 { - return fmt.Errorf("proto: wrong wireType = %d for field PeriodSec", wireType) + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Period", wireType) } - m.PeriodSec = 0 + var msglen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowQuery @@ -923,11 +931,25 @@ func (m *CapacityResult) Unmarshal(dAtA []byte) error { } b := dAtA[iNdEx] iNdEx++ - m.PeriodSec |= uint32(b&0x7F) << shift + msglen |= int(b&0x7F) << shift if b < 0x80 { break } } + if msglen < 0 { + return ErrInvalidLengthQuery + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthQuery + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if err := github_com_cosmos_gogoproto_types.StdDurationUnmarshal(&m.Period, dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex case 2: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Capacity", wireType) diff --git a/protocol/x/ratelimit/util/baseline.go b/protocol/x/ratelimit/util/baseline.go new file mode 100644 index 0000000000..d6bc88f7c7 --- /dev/null +++ b/protocol/x/ratelimit/util/baseline.go @@ -0,0 +1,25 @@ +package util + +import ( + "math/big" + + "github.com/dydxprotocol/v4-chain/protocol/lib" + "github.com/dydxprotocol/v4-chain/protocol/x/ratelimit/types" +) + +// GetBaseline returns the current capacity baseline for the given limiter. +// `baseline` formula: +// +// baseline = max(baseline_minimum, baseline_tvl_ppm * current_tvl) +func GetBaseline( + currentTvl *big.Int, + limiter types.Limiter, +) *big.Int { + return lib.BigMax( + limiter.BaselineMinimum.BigInt(), + lib.BigIntMulPpm( + currentTvl, + limiter.BaselineTvlPpm, + ), + ) +} diff --git a/protocol/x/ratelimit/util/baseline_test.go b/protocol/x/ratelimit/util/baseline_test.go new file mode 100644 index 0000000000..844433bf6c --- /dev/null +++ b/protocol/x/ratelimit/util/baseline_test.go @@ -0,0 +1,75 @@ +package util_test + +import ( + "math/big" + "testing" + "time" + + "github.com/dydxprotocol/v4-chain/protocol/dtypes" + "github.com/dydxprotocol/v4-chain/protocol/x/ratelimit/types" + ratelimitutil "github.com/dydxprotocol/v4-chain/protocol/x/ratelimit/util" + "github.com/stretchr/testify/require" +) + +func TestGetBaseline(t *testing.T) { + tests := map[string]struct { + supply *big.Int + limiter types.Limiter + expectedBaseline *big.Int + }{ + "max(1% of TVL, 100k token), TVL = 5M token": { + supply: big.NewInt(5_000_000_000_000), // 5M token + limiter: types.Limiter{ + Period: 3_600 * time.Second, + BaselineMinimum: dtypes.NewInt(100_000_000_000), // 100k token + BaselineTvlPpm: 10_000, // 1% + }, + expectedBaseline: big.NewInt(100_000_000_000), // 100k token (baseline minimum) + }, + "max(1% of TVL, 100k token), TVL = 15M token": { + supply: big.NewInt(15_000_000_000_000), // 10M token + limiter: types.Limiter{ + Period: 3_600 * time.Second, + BaselineMinimum: dtypes.NewInt(100_000_000_000), // 100k token + BaselineTvlPpm: 10_000, // 1% + }, + expectedBaseline: big.NewInt(150_000_000_000), // 150k token (1% of 15m) + }, + "max(1% of TVL, 100k token), TVL = ~15M token, rounds down": { + supply: big.NewInt(15_200_123_456_777), + limiter: types.Limiter{ + Period: 3_600 * time.Second, + BaselineMinimum: dtypes.NewInt(100_000_000_000), // 100k token + BaselineTvlPpm: 10_000, // 1% + }, + expectedBaseline: big.NewInt(152_001_234_567), // ~152k token (1% of 15.2m) + }, + "max(10% of TVL, 1 million), TVL = 20M token": { + supply: big.NewInt(20_000_000_000_000), // 20M token, + limiter: types.Limiter{ + Period: 3_600 * time.Second, + BaselineMinimum: dtypes.NewInt(100_000_000_000), // 1m token + BaselineTvlPpm: 100_000, // 10% + }, + expectedBaseline: big.NewInt(2_000_000_000_000), // 2m token (10% of 20m) + }, + "max(10% of TVL, 1 million), TVL = 8M token": { + supply: big.NewInt(8_000_000_000_000), // 2m token (10% of 20m) + limiter: types.Limiter{ + Period: 3_600 * time.Second, + BaselineMinimum: dtypes.NewInt(1_000_000_000_000), // 1m token + BaselineTvlPpm: 100_000, // 10% + }, + expectedBaseline: big.NewInt(1_000_000_000_000), // 1m token (baseline minimum) + }, + } + + // Run tests. + for name, tc := range tests { + t.Run(name, func(t *testing.T) { + gotBaseline := ratelimitutil.GetBaseline(tc.supply, tc.limiter) + + require.Equal(t, tc.expectedBaseline, gotBaseline, "retrieved baseline does not match the expected value") + }) + } +} diff --git a/protocol/x/ratelimit/util/capacity.go b/protocol/x/ratelimit/util/capacity.go new file mode 100644 index 0000000000..b22edf6bea --- /dev/null +++ b/protocol/x/ratelimit/util/capacity.go @@ -0,0 +1,103 @@ +package util + +import ( + "math/big" + "time" + + errorsmod "cosmossdk.io/errors" + "github.com/dydxprotocol/v4-chain/protocol/dtypes" + "github.com/dydxprotocol/v4-chain/protocol/lib" + "github.com/dydxprotocol/v4-chain/protocol/x/ratelimit/types" +) + +// CalculateNewCapacityList calculates the new capacity list for the given current `tvl` and `limitParams“. +// Input invariant: `len(prevCapacityList) == len(limitParams.Limiters)` +// Detailed math for calculating the updated capacity: +// +// `baseline = max(baseline_minimum, baseline_tvl_ppm * tvl)` +// `capacity_diff = max(baseline, capacity-baseline) * (time_since_last_block / period)` +// +// This is basically saying that the capacity returns to the baseline over the course of the `period`. +// Usually in a linear way, but if the `capacity` is more than twice the `baseline`, then in an exponential way. +// +// `capacity =` +// if `abs(capacity - baseline) < capacity_diff` then `capacity = baseline` +// else if `capacity < baseline` then `capacity += capacity_diff` +// else `capacity -= capacity_diff` +// +// On a high level, `capacity` trends towards `baseline` by `capacity_diff` but does not “cross” it. +func CalculateNewCapacityList( + bigTvl *big.Int, + limitParams types.LimitParams, + prevCapacityList []dtypes.SerializableInt, + timeSinceLastBlock time.Duration, +) ( + newCapacityList []dtypes.SerializableInt, + err error, +) { + // Declare new capacity list to be populated. + newCapacityList = make([]dtypes.SerializableInt, len(prevCapacityList)) + + if len(limitParams.Limiters) != len(prevCapacityList) { + // This violates an invariant. Since this is in the `EndBlocker`, we return an error instead of panicking. + return nil, errorsmod.Wrapf( + types.ErrMismatchedCapacityLimitersLength, + "denom = %v, len(limiters) = %v, len(prevCapacityList) = %v", + limitParams.Denom, + len(limitParams.Limiters), + len(prevCapacityList), + ) + } + + for i, limiter := range limitParams.Limiters { + // For each limiter, calculate the current baseline. + baseline := GetBaseline(bigTvl, limiter) + + capacityMinusBaseline := new(big.Int).Sub( + prevCapacityList[i].BigInt(), // array access is safe because of input invariant + baseline, + ) + + // Calculate left operand: `max(baseline, capacity-baseline)`. This equals `baseline` when `capacity <= 2 * baseline` + operandL := new(big.Rat).SetInt( + lib.BigMax( + baseline, + capacityMinusBaseline, + ), + ) + + // Calculate right operand: `time_since_last_block / period` + operandR := new(big.Rat).SetFrac64( + timeSinceLastBlock.Milliseconds(), + limiter.Period.Milliseconds(), + ) + + // Calculate: `capacity_diff = max(baseline, capacity-baseline) * (time_since_last_block / period)` + // Since both operands > 0, `capacity_diff` is positive or zero (due to rounding). + capacityDiffRat := new(big.Rat).Mul(operandL, operandR) + capacityDiff := lib.BigRatRound(capacityDiffRat, false) // rounds down `capacity_diff` + + if new(big.Int).Abs(capacityMinusBaseline).Cmp(capacityDiff) <= 0 { + // if `abs(capacity - baseline) < capacity_diff` then `capacity = baseline`` + newCapacityList[i] = dtypes.NewIntFromBigInt(baseline) + } else if capacityMinusBaseline.Sign() < 0 { + // else if `capacity < baseline` then `capacity += capacity_diff` + newCapacityList[i] = dtypes.NewIntFromBigInt( + new(big.Int).Add( + prevCapacityList[i].BigInt(), + capacityDiff, + ), + ) + } else { + // else `capacity -= capacity_diff` + newCapacityList[i] = dtypes.NewIntFromBigInt( + new(big.Int).Sub( + prevCapacityList[i].BigInt(), + capacityDiff, + ), + ) + } + } + + return newCapacityList, nil +} diff --git a/protocol/x/ratelimit/util/capacity_test.go b/protocol/x/ratelimit/util/capacity_test.go new file mode 100644 index 0000000000..4296cce7b3 --- /dev/null +++ b/protocol/x/ratelimit/util/capacity_test.go @@ -0,0 +1,311 @@ +package util_test + +import ( + "math/big" + "testing" + "time" + + errorsmod "cosmossdk.io/errors" + "github.com/dydxprotocol/v4-chain/protocol/dtypes" + big_testutil "github.com/dydxprotocol/v4-chain/protocol/testutil/big" + "github.com/dydxprotocol/v4-chain/protocol/x/ratelimit/types" + "github.com/dydxprotocol/v4-chain/protocol/x/ratelimit/util" + "github.com/stretchr/testify/require" +) + +func TestUpdateAllCapacitiesEndBlocker(t *testing.T) { + testDenom := "testDenom" + tests := map[string]struct { + bigTvl *big.Int + limitParams types.LimitParams + prevCapacityList []dtypes.SerializableInt + expectedCapacityList []dtypes.SerializableInt + timeSinceLastBlock time.Duration + expectedErr error + }{ + "Prev capacity equals baseline": { + bigTvl: big.NewInt(25_000_000_000_000), // 25M token (assuming 6 decimals) + limitParams: types.LimitParams{ + Denom: testDenom, + Limiters: []types.Limiter{ + // baseline = 25M * 1% = 250k tokens + { + Period: 3_600 * time.Second, + BaselineMinimum: dtypes.NewInt(100_000_000_000), // 100k tokens (assuming 6 decimals) + BaselineTvlPpm: 10_000, // 1% + }, + // baseline = 25M * 10% = 2.5M tokens + { + Period: 86_400 * time.Second, + BaselineMinimum: dtypes.NewInt(1_000_000_000_000), // 1M tokens (assuming 6 decimals) + BaselineTvlPpm: 100_000, // 10% + }, + }, + }, + timeSinceLastBlock: time.Second, + prevCapacityList: []dtypes.SerializableInt{ + dtypes.NewInt(250_000_000_000), // 250k tokens, which equals baseline + dtypes.NewInt(2_500_000_000_000), // 2.5M tokens, which equals baseline + }, + expectedCapacityList: []dtypes.SerializableInt{ + dtypes.NewInt(250_000_000_000), // 250k tokens + dtypes.NewInt(2_500_000_000_000), // 2.5M tokens + }, + }, + "Prev capacity < baseline": { + bigTvl: big.NewInt(25_000_000_000_000), // 25M token (assuming 6 decimals) + limitParams: types.LimitParams{ + Denom: testDenom, + Limiters: []types.Limiter{ + // baseline = 25M * 1% = 250k tokens + { + Period: 3_600 * time.Second, + BaselineMinimum: dtypes.NewInt(100_000_000_000), // 100k tokens (assuming 6 decimals) + BaselineTvlPpm: 10_000, // 1% + }, + // baseline = 25M * 10% = 2.5M tokens + { + Period: 86_400 * time.Second, + BaselineMinimum: dtypes.NewInt(1_000_000_000_000), // 1M tokens (assuming 6 decimals) + BaselineTvlPpm: 100_000, // 10% + }, + }, + }, + timeSinceLastBlock: time.Second + 90*time.Millisecond, // 1.09 second + prevCapacityList: []dtypes.SerializableInt{ + dtypes.NewInt(99_000_000_000), // 99k tokens, < baseline (250k) + dtypes.NewInt(990_000_000_000), // 0.99M tokens, < baseline (2.5M) + }, + expectedCapacityList: []dtypes.SerializableInt{ + dtypes.NewInt(99_075_694_444), // recovered by 1.09/3600 * 250k = 75.694444 tokens + dtypes.NewInt(990_031_539_351), // recovered by 1.09/86400 * 2.5M = 31.539 tokens + }, + }, + "prev capacity < baseline, 18 decimals": { + bigTvl: big_testutil.Int64MulPow10(25, 24), // 25M tokens + limitParams: types.LimitParams{ + Denom: testDenom, + Limiters: []types.Limiter{ + // baseline = 25M * 1% = 250k tokens + { + Period: 3_600 * time.Second, + BaselineMinimum: dtypes.NewIntFromBigInt( + big_testutil.Int64MulPow10(100_000, 18), // 100k tokens(assuming 18 decimals) + ), + BaselineTvlPpm: 10_000, // 1% + }, + // baseline = 25M * 10% = 2.5M tokens + { + Period: 86_400 * time.Second, + BaselineMinimum: dtypes.NewIntFromBigInt( + big_testutil.Int64MulPow10(1_000_000, 18), // 1M tokens(assuming 18 decimals) + ), + BaselineTvlPpm: 100_000, // 10% + }, + }, + }, + timeSinceLastBlock: time.Second, + prevCapacityList: []dtypes.SerializableInt{ + dtypes.NewIntFromBigInt( + big_testutil.Int64MulPow10(99_000, 18), + ), // 99k tokens < baseline (250k) + dtypes.NewIntFromBigInt( + big_testutil.Int64MulPow10(990_000, 18), + ), // 0.99M tokens, < baseline (2.5M) + }, + expectedCapacityList: []dtypes.SerializableInt{ + dtypes.NewIntFromBigInt( + big_testutil.MustFirst(new(big.Int).SetString("99069444444444444444444", 10)), + ), // recovered by 1/3600 * 250k ~= 69.4444 tokens + dtypes.NewIntFromBigInt( + big_testutil.MustFirst(new(big.Int).SetString("990028935185185185185185", 10)), + ), // recovered by 1/86400 * 2.5M ~= 28.9351 tokens + }, + }, + "Prev capacity = 0": { + bigTvl: big.NewInt(1_000_000_000_000), // 1M token (assuming 6 decimals) + limitParams: types.LimitParams{ + Denom: testDenom, + Limiters: []types.Limiter{ + // baseline = baseline minimum = 100k tokens + { + Period: 3_600 * time.Second, + BaselineMinimum: dtypes.NewInt(100_000_000_000), // 100k tokens (assuming 6 decimals) + BaselineTvlPpm: 10_000, // 1% + }, + // baseline = baseline minimum = 1M tokens + { + Period: 86_400 * time.Second, + BaselineMinimum: dtypes.NewInt(1_000_000_000_000), // 1M tokens (assuming 6 decimals) + BaselineTvlPpm: 100_000, // 10% + }, + }, + }, + timeSinceLastBlock: time.Second + 150*time.Millisecond, // 1.15 second + prevCapacityList: []dtypes.SerializableInt{ + dtypes.NewInt(0), + dtypes.NewInt(0), + }, + expectedCapacityList: []dtypes.SerializableInt{ + dtypes.NewInt(31_944_444), // recovered by 1.15/3600 * 100k ~= 31.94 + dtypes.NewInt(13_310_185), // recovered by 1.15/86400 * 1M ~= 13.31 + }, + }, + "Prev capacity = 0, capacity_diff rounds down": { + bigTvl: big.NewInt(1_000_000_000_000), // 1M token (assuming 6 decimals) + limitParams: types.LimitParams{ + Denom: testDenom, + Limiters: []types.Limiter{ + // baseline = baseline minimum = 100k tokens + { + Period: 3_600 * time.Second, + BaselineMinimum: dtypes.NewInt(100_000_000_000), // 100k tokens (assuming 6 decimals) + BaselineTvlPpm: 10_000, // 1% + }, + }, + }, + timeSinceLastBlock: 12 * time.Second, // 12 second + prevCapacityList: []dtypes.SerializableInt{ + dtypes.NewInt(0), + }, + expectedCapacityList: []dtypes.SerializableInt{ + dtypes.NewInt(333_333_333), // recovered by 12/3600 * 100k ~= 333.333 + }, + }, + "Prev capacity = 2 * baseline, capacity_diff rounds down": { + bigTvl: big.NewInt(1_000_000_000_000), // 1M token (assuming 6 decimals) + limitParams: types.LimitParams{ + Denom: testDenom, + Limiters: []types.Limiter{ + // baseline = baseline minimum = 100k tokens + { + Period: 3_600 * time.Second, + BaselineMinimum: dtypes.NewInt(100_000_000_000), // 100k tokens (assuming 6 decimals) + BaselineTvlPpm: 10_000, // 1% + }, + }, + }, + timeSinceLastBlock: 12 * time.Second, // 12 second + prevCapacityList: []dtypes.SerializableInt{ + dtypes.NewInt(200_000_000_000), + }, + expectedCapacityList: []dtypes.SerializableInt{ + dtypes.NewInt(199_666_666_667), // recovered by 12/3600 * 100k ~= 333.333 + }, + }, + "baseline < prev capacity < 2 * baseline": { + bigTvl: big.NewInt(20_000_000_000_000), // 20M token (assuming 6 decimals) + limitParams: types.LimitParams{ + Denom: testDenom, + Limiters: []types.Limiter{ + // baseline = 200k tokens + { + Period: 3_600 * time.Second, + BaselineMinimum: dtypes.NewInt(100_000_000_000), // 100k tokens (assuming 6 decimals) + BaselineTvlPpm: 10_000, // 1% + }, + // baseline = 2M tokens + { + Period: 86_400 * time.Second, + BaselineMinimum: dtypes.NewInt(1_000_000_000_000), // 1M tokens (assuming 6 decimals) + BaselineTvlPpm: 100_000, // 10% + }, + }, + }, + timeSinceLastBlock: time.Second + 150*time.Millisecond, // 1.15 second + prevCapacityList: []dtypes.SerializableInt{ + dtypes.NewInt(329_000_000_000), + dtypes.NewInt(3_500_000_000_000), + }, + expectedCapacityList: []dtypes.SerializableInt{ + dtypes.NewInt(328_936_111_112), // recovered by 1.15/3600 * 200k ~= 63.89 + dtypes.NewInt(3_499_973_379_630), // recovered by 1.15/86400 * 2M ~= 26.62 + }, + }, + "prev capacity > 2 * baseline + capacity < baseline": { + bigTvl: big.NewInt(20_000_000_000_000), // 20M token (assuming 6 decimals) + limitParams: types.LimitParams{ + Denom: testDenom, + Limiters: []types.Limiter{ + // baseline = 200k tokens + { + Period: 3_600 * time.Second, + BaselineMinimum: dtypes.NewInt(100_000_000_000), // 100k tokens (assuming 6 decimals) + BaselineTvlPpm: 10_000, // 1% + }, + // baseline = 2M tokens + { + Period: 86_400 * time.Second, + BaselineMinimum: dtypes.NewInt(1_000_000_000_000), // 1M tokens (assuming 6 decimals) + BaselineTvlPpm: 100_000, // 10% + }, + }, + }, + timeSinceLastBlock: time.Second + 150*time.Millisecond, // 1.15 second + prevCapacityList: []dtypes.SerializableInt{ + dtypes.NewInt(629_000_000_000), // > 2 * baseline + dtypes.NewInt(1_200_000_000_000), // < baseline + }, + expectedCapacityList: []dtypes.SerializableInt{ + dtypes.NewInt(628_862_958_334), // recovered by 1.15/3600 * (629k - 200k) ~= 137.04 + dtypes.NewInt(1_200_026_620_370), // recovered by 1.15/86400 * 2M ~= 26.62 + }, + }, + "Error: len(capacityList) != len(limiters)": { + bigTvl: big.NewInt(25_000_000_000_000), // 25M token (assuming 6 decimals) + limitParams: types.LimitParams{ + Denom: testDenom, + Limiters: []types.Limiter{ + // baseline = 25M * 1% = 250k tokens + { + Period: 3_600 * time.Second, + BaselineMinimum: dtypes.NewInt(100_000_000_000), // 100k tokens (assuming 6 decimals) + BaselineTvlPpm: 10_000, // 1% + }, + // baseline = 25M * 10% = 2.5M tokens + { + Period: 86_400 * time.Second, + BaselineMinimum: dtypes.NewInt(1_000_000_000_000), // 1M tokens (assuming 6 decimals) + BaselineTvlPpm: 100_000, // 10% + }, + }, + }, + timeSinceLastBlock: time.Second + 90*time.Millisecond, // 1.09 second + prevCapacityList: []dtypes.SerializableInt{ + dtypes.NewInt(99_000_000_000), + dtypes.NewInt(990_000_000_000), + dtypes.NewInt(0), + }, + expectedErr: errorsmod.Wrapf( + types.ErrMismatchedCapacityLimitersLength, + "denom = %v, len(limiters) = %v, len(prevCapacityList) = %v", + testDenom, + 2, + 3, + ), + }, + } + + // Run tests. + for name, tc := range tests { + t.Run(name, func(t *testing.T) { + newCapacityList, err := util.CalculateNewCapacityList( + tc.bigTvl, + tc.limitParams, + tc.prevCapacityList, + tc.timeSinceLastBlock, + ) + + if tc.expectedErr != nil { + require.Error(t, tc.expectedErr, err) + return + } + + require.NoError(t, err) + require.Equal(t, + tc.expectedCapacityList, + newCapacityList, + ) + }) + } +}