diff --git a/indexer/packages/postgres/__tests__/helpers/constants.ts b/indexer/packages/postgres/__tests__/helpers/constants.ts index 79007f2d9b..f873c46d64 100644 --- a/indexer/packages/postgres/__tests__/helpers/constants.ts +++ b/indexer/packages/postgres/__tests__/helpers/constants.ts @@ -226,6 +226,7 @@ export const defaultOrderGoodTilBlockTime: OrderCreateObject = { clientId: '2', goodTilBlock: undefined, goodTilBlockTime: '2023-01-22T00:00:00.000Z', + createdAtHeight: '1', orderFlags: ORDER_FLAG_LONG_TERM.toString(), }; diff --git a/indexer/packages/postgres/src/constants.ts b/indexer/packages/postgres/src/constants.ts index 953ea0cfc7..900c2e6038 100644 --- a/indexer/packages/postgres/src/constants.ts +++ b/indexer/packages/postgres/src/constants.ts @@ -1,12 +1,17 @@ import { CandleMessage_Resolution, ClobPairStatus } from '@dydxprotocol-indexer/v4-protos'; import config from './config'; +import AssetModel from './models/asset-model'; import AssetPositionModel from './models/asset-position-model'; import FillModel from './models/fill-model'; +import LiquidityTiersModel from './models/liquidity-tiers-model'; +import MarketModel from './models/market-model'; +import OraclePriceModel from './models/oracle-price-model'; import OrderModel from './models/order-model'; import PerpetualMarketModel from './models/perpetual-market-model'; import PerpetualPositionModel from './models/perpetual-position-model'; import SubaccountModel from './models/subaccount-model'; +import TransferModel from './models/transfer-model'; import { APITimeInForce, CandleResolution, @@ -81,12 +86,17 @@ export const TIME_IN_FORCE_TO_API_TIME_IN_FORCE: Record { logger.error({ at: 'dbHelpers#createModelToJsonFunctions', - message: `Failed to create or replace function dydx_to_json for model ${model.tableName}.`, + message: `Failed to create or replace function dydx_to_jsonb for model ${model.tableName}.`, error, }); throw error; diff --git a/indexer/packages/postgres/src/index.ts b/indexer/packages/postgres/src/index.ts index 46ccc0c6f0..f2546f00ee 100644 --- a/indexer/packages/postgres/src/index.ts +++ b/indexer/packages/postgres/src/index.ts @@ -3,11 +3,16 @@ export * from './constants'; export { default as Transaction } from './helpers/transaction'; export { postgresConfigSchema } from './config'; +export { default as AssetModel } from './models/asset-model'; export { default as AssetPositionModel } from './models/asset-position-model'; export { default as FillModel } from './models/fill-model'; +export { default as LiquidityTiersModel } from './models/liquidity-tiers-model'; +export { default as MarketModel } from './models/market-model'; +export { default as OraclePriceModel } from './models/oracle-price-model'; export { default as OrderModel } from './models/order-model'; export { default as PerpetualMarketModel } from './models/perpetual-market-model'; export { default as PerpetualPositionModel } from './models/perpetual-position-model'; +export { default as TransferModel } from './models/transfer-model'; export * as AssetTable from './stores/asset-table'; export * as AssetPositionTable from './stores/asset-position-table'; diff --git a/indexer/packages/postgres/src/lib/order-translations.ts b/indexer/packages/postgres/src/lib/order-translations.ts index d1bbe6f34e..dd8ae4677c 100644 --- a/indexer/packages/postgres/src/lib/order-translations.ts +++ b/indexer/packages/postgres/src/lib/order-translations.ts @@ -22,13 +22,11 @@ import { * * @param order */ -export async function convertToIndexerOrder( +export function convertToIndexerOrderWithSubaccount( order: OrderFromDatabase, perpetualMarket: PerpetualMarketFromDatabase, -): Promise { - const subaccount: SubaccountFromDatabase | undefined = await SubaccountTable.findById( - order.subaccountId, - ); + subaccount: SubaccountFromDatabase, +): IndexerOrder { if (!OrderTable.isLongTermOrConditionalOrder(order.orderFlags)) { logger.error({ at: 'protocol-translations#convertToIndexerOrder', @@ -77,3 +75,29 @@ export async function convertToIndexerOrder( return indexerOrder; } + +/** + * Converts an order from the database to an IndexerOrder proto. + * This is used to resend open stateful orders to Vulcan during Indexer fast sync + * to uncross the orderbook. + * + * @param order + */ +export async function convertToIndexerOrder( + order: OrderFromDatabase, + perpetualMarket: PerpetualMarketFromDatabase, +): Promise { + const subaccount: SubaccountFromDatabase | undefined = await SubaccountTable.findById( + order.subaccountId, + ); + + if (!subaccount === undefined) { + logger.error({ + at: 'protocol-translations#convertToIndexerOrder', + message: 'Subaccount for order not found', + order, + }); + throw new Error(`Subaccount for order not found: ${order.subaccountId}`); + } + return convertToIndexerOrderWithSubaccount(order, perpetualMarket, subaccount!); +} diff --git a/indexer/packages/postgres/src/loops/market-refresher.ts b/indexer/packages/postgres/src/loops/market-refresher.ts index 164cf781d9..b7a1858387 100644 --- a/indexer/packages/postgres/src/loops/market-refresher.ts +++ b/indexer/packages/postgres/src/loops/market-refresher.ts @@ -40,6 +40,13 @@ export async function updateMarkets(options?: Options): Promise { stats.timing(`${config.SERVICE_NAME}.loops.update_markets`, Date.now() - startTime); } +/** + * Updates the markets map with the specified market. + */ +export function updateMarket(market: MarketFromDatabase): void { + idToMarket[market.id] = market; +} + /** * Gets the market for a given id. */ diff --git a/indexer/packages/postgres/src/models/asset-model.ts b/indexer/packages/postgres/src/models/asset-model.ts index 6a68e2269d..51d1935606 100644 --- a/indexer/packages/postgres/src/models/asset-model.ts +++ b/indexer/packages/postgres/src/models/asset-model.ts @@ -59,6 +59,22 @@ export default class AssetModel extends Model { }; } + /** + * A mapping from column name to JSON conversion expected. + * See getSqlConversionForDydxModelTypes for valid conversions. + * + * TODO(IND-239): Ensure that jsonSchema() / sqlToJsonConversions() / model fields match. + */ + static get sqlToJsonConversions() { + return { + id: 'string', + symbol: 'string', + atomicResolution: 'integer', + hasMarket: 'boolean', + marketId: 'integer', + }; + } + id!: string; symbol!: string; diff --git a/indexer/packages/postgres/src/models/fill-model.ts b/indexer/packages/postgres/src/models/fill-model.ts index c4fa9d2815..30927a6d57 100644 --- a/indexer/packages/postgres/src/models/fill-model.ts +++ b/indexer/packages/postgres/src/models/fill-model.ts @@ -84,7 +84,7 @@ export default class FillModel extends Model { transactionHash: { type: 'string' }, createdAt: { type: 'string', format: 'date-time' }, createdAtHeight: { type: 'string', pattern: IntegerPattern }, - clientMetadata: { type: 'string', pattern: IntegerPattern }, + clientMetadata: { type: ['string', 'null'], pattern: IntegerPattern }, fee: { type: 'string', pattern: NumericPattern }, }, }; diff --git a/indexer/packages/postgres/src/models/liquidity-tiers-model.ts b/indexer/packages/postgres/src/models/liquidity-tiers-model.ts index 470d9f94e7..97e595c837 100644 --- a/indexer/packages/postgres/src/models/liquidity-tiers-model.ts +++ b/indexer/packages/postgres/src/models/liquidity-tiers-model.ts @@ -33,6 +33,22 @@ export default class LiquidityTiersModel extends BaseModel { }; } + /** + * A mapping from column name to JSON conversion expected. + * See getSqlConversionForDydxModelTypes for valid conversions. + * + * TODO(IND-239): Ensure that jsonSchema() / sqlToJsonConversions() / model fields match. + */ + static get sqlToJsonConversions() { + return { + id: 'integer', + name: 'string', + initialMarginPpm: 'string', + maintenanceFractionPpm: 'string', + basePositionNotional: 'string', + }; + } + id!: number; QueryBuilderType!: UpsertQueryBuilder; diff --git a/indexer/packages/postgres/src/models/market-model.ts b/indexer/packages/postgres/src/models/market-model.ts index d60c196f2b..b4f5eeac57 100644 --- a/indexer/packages/postgres/src/models/market-model.ts +++ b/indexer/packages/postgres/src/models/market-model.ts @@ -51,6 +51,22 @@ export default class MarketModel extends Model { }; } + /** + * A mapping from column name to JSON conversion expected. + * See getSqlConversionForDydxModelTypes for valid conversions. + * + * TODO(IND-239): Ensure that jsonSchema() / sqlToJsonConversions() / model fields match. + */ + static get sqlToJsonConversions() { + return { + id: 'integer', + pair: 'string', + exponent: 'integer', + minPriceChangePpm: 'integer', + oraclePrice: 'string', + }; + } + id!: number; pair!: string; diff --git a/indexer/packages/postgres/src/models/oracle-price-model.ts b/indexer/packages/postgres/src/models/oracle-price-model.ts index 8da221df9e..6b0480c8b9 100644 --- a/indexer/packages/postgres/src/models/oracle-price-model.ts +++ b/indexer/packages/postgres/src/models/oracle-price-model.ts @@ -53,6 +53,22 @@ export default class OraclePriceModel extends Model { }; } + /** + * A mapping from column name to JSON conversion expected. + * See getSqlConversionForDydxModelTypes for valid conversions. + * + * TODO(IND-239): Ensure that jsonSchema() / sqlToJsonConversions() / model fields match. + */ + static get sqlToJsonConversions() { + return { + id: 'string', + marketId: 'integer', + price: 'string', + effectiveAt: 'date-time', + effectiveAtHeight: 'string', + }; + } + id!: string; marketId!: number; diff --git a/indexer/packages/postgres/src/models/transfer-model.ts b/indexer/packages/postgres/src/models/transfer-model.ts index cd801cbefc..8fdb88b3dd 100644 --- a/indexer/packages/postgres/src/models/transfer-model.ts +++ b/indexer/packages/postgres/src/models/transfer-model.ts @@ -104,6 +104,28 @@ export default class TransferModel extends Model { }; } + /** + * A mapping from column name to JSON conversion expected. + * See getSqlConversionForDydxModelTypes for valid conversions. + * + * TODO(IND-239): Ensure that jsonSchema() / sqlToJsonConversions() / model fields match. + */ + static get sqlToJsonConversions() { + return { + id: 'string', + senderSubaccountId: 'string', + recipientSubaccountId: 'string', + senderWalletAddress: 'string', + recipientWalletAddress: 'string', + assetId: 'string', + size: 'string', + eventId: 'hex-string', + transactionHash: 'string', + createdAt: 'date-time', + createdAtHeight: 'string', + }; + } + id!: string; senderSubaccountId?: string; diff --git a/indexer/packages/postgres/src/stores/asset-position-table.ts b/indexer/packages/postgres/src/stores/asset-position-table.ts index bd9cce950c..127417ebac 100644 --- a/indexer/packages/postgres/src/stores/asset-position-table.ts +++ b/indexer/packages/postgres/src/stores/asset-position-table.ts @@ -20,6 +20,7 @@ import { } from '../types'; export function uuid(subaccountId: string, assetId: string): string { + // TODO(IND-483): Fix all uuid string substitutions to use Array.join. return getUuid(Buffer.from(`${subaccountId}-${assetId}`, BUFFER_ENCODING_UTF_8)); } diff --git a/indexer/packages/postgres/src/stores/candle-table.ts b/indexer/packages/postgres/src/stores/candle-table.ts index 71afe362f0..d43efb16cf 100644 --- a/indexer/packages/postgres/src/stores/candle-table.ts +++ b/indexer/packages/postgres/src/stores/candle-table.ts @@ -22,6 +22,7 @@ import { } from '../types'; export function uuid(startedAt: IsoString, ticker: string, resolution: CandleResolution): string { + // TODO(IND-483): Fix all uuid string substitutions to use Array.join. return getUuid(Buffer.from(`${startedAt}-${ticker}-${resolution}`, BUFFER_ENCODING_UTF_8)); } diff --git a/indexer/packages/postgres/src/stores/fill-table.ts b/indexer/packages/postgres/src/stores/fill-table.ts index 6263424ddd..daf40ff1b2 100644 --- a/indexer/packages/postgres/src/stores/fill-table.ts +++ b/indexer/packages/postgres/src/stores/fill-table.ts @@ -28,6 +28,7 @@ import { } from '../types'; export function uuid(eventId: Buffer, liquidity: Liquidity): string { + // TODO(IND-483): Fix all uuid string substitutions to use Array.join. return getUuid(Buffer.from(`${eventId.toString('hex')}-${liquidity}`, BUFFER_ENCODING_UTF_8)); } diff --git a/indexer/packages/postgres/src/stores/funding-index-updates-table.ts b/indexer/packages/postgres/src/stores/funding-index-updates-table.ts index 24ac8d02eb..54ab4d6abd 100644 --- a/indexer/packages/postgres/src/stores/funding-index-updates-table.ts +++ b/indexer/packages/postgres/src/stores/funding-index-updates-table.ts @@ -26,6 +26,7 @@ export function uuid( eventId: Buffer, perpetualId: string, ): string { + // TODO(IND-483): Fix all uuid string substitutions to use Array.join. return getUuid(Buffer.from(`${blockHeight}-${eventId.toString('hex')}-${perpetualId}`, BUFFER_ENCODING_UTF_8)); } diff --git a/indexer/packages/postgres/src/stores/oracle-price-table.ts b/indexer/packages/postgres/src/stores/oracle-price-table.ts index f78b82df3a..82ee2a3eb6 100644 --- a/indexer/packages/postgres/src/stores/oracle-price-table.ts +++ b/indexer/packages/postgres/src/stores/oracle-price-table.ts @@ -24,6 +24,7 @@ import { export function uuid( marketId: number, height: string, ): string { + // TODO(IND-483): Fix all uuid string substitutions to use Array.join. return getUuid(Buffer.from(`${marketId.toString()}-${height}`, BUFFER_ENCODING_UTF_8)); } diff --git a/indexer/packages/postgres/src/stores/order-table.ts b/indexer/packages/postgres/src/stores/order-table.ts index 70f8e96f01..8e93ce61f4 100644 --- a/indexer/packages/postgres/src/stores/order-table.ts +++ b/indexer/packages/postgres/src/stores/order-table.ts @@ -26,6 +26,7 @@ export function uuid( clobPairId: string, orderFlags: string, ): string { + // TODO(IND-483): Fix all uuid string substitutions to use Array.join. return getUuid( Buffer.from( `${subaccountId}-${clientId}-${clobPairId}-${orderFlags}`, diff --git a/indexer/packages/postgres/src/stores/perpetual-position-table.ts b/indexer/packages/postgres/src/stores/perpetual-position-table.ts index 24077a68ba..29657a10d9 100644 --- a/indexer/packages/postgres/src/stores/perpetual-position-table.ts +++ b/indexer/packages/postgres/src/stores/perpetual-position-table.ts @@ -45,6 +45,7 @@ const DEFAULT_SUBACCOUNT_UPDATE_DEFAULT_POSITION_FIELDS = { }; export function uuid(subaccountId: string, openEventId: Buffer): string { + // TODO(IND-483): Fix all uuid string substitutions to use Array.join. return getUuid(Buffer.from(`${subaccountId}-${openEventId.toString('hex')}`, BUFFER_ENCODING_UTF_8)); } diff --git a/indexer/packages/postgres/src/stores/pnl-ticks-table.ts b/indexer/packages/postgres/src/stores/pnl-ticks-table.ts index 85438c3e01..7de840933f 100644 --- a/indexer/packages/postgres/src/stores/pnl-ticks-table.ts +++ b/indexer/packages/postgres/src/stores/pnl-ticks-table.ts @@ -22,6 +22,7 @@ export function uuid( subaccountId: string, createdAt: string, ): string { + // TODO(IND-483): Fix all uuid string substitutions to use Array.join. return getUuid( Buffer.from( `${subaccountId}-${createdAt}`, diff --git a/indexer/packages/postgres/src/stores/subaccount-table.ts b/indexer/packages/postgres/src/stores/subaccount-table.ts index 9395d3e533..e71401cd88 100644 --- a/indexer/packages/postgres/src/stores/subaccount-table.ts +++ b/indexer/packages/postgres/src/stores/subaccount-table.ts @@ -23,6 +23,7 @@ import { } from '../types'; export function uuid(address: string, subaccountNumber: number): string { + // TODO(IND-483): Fix all uuid string substitutions to use Array.join. return getUuid(Buffer.from(`${address}-${subaccountNumber}`, BUFFER_ENCODING_UTF_8)); } diff --git a/indexer/packages/postgres/src/stores/transaction-table.ts b/indexer/packages/postgres/src/stores/transaction-table.ts index 95a3ea1b6e..1b6311daf3 100644 --- a/indexer/packages/postgres/src/stores/transaction-table.ts +++ b/indexer/packages/postgres/src/stores/transaction-table.ts @@ -20,6 +20,7 @@ import { } from '../types'; export function uuid(blockHeight: string, transactionIndex: number): string { + // TODO(IND-483): Fix all uuid string substitutions to use Array.join. return getUuid(Buffer.from(`${blockHeight}-${transactionIndex}`, BUFFER_ENCODING_UTF_8)); } diff --git a/indexer/packages/postgres/src/stores/transfer-table.ts b/indexer/packages/postgres/src/stores/transfer-table.ts index d32a5b3901..9b89c9fbe6 100644 --- a/indexer/packages/postgres/src/stores/transfer-table.ts +++ b/indexer/packages/postgres/src/stores/transfer-table.ts @@ -31,6 +31,7 @@ export function uuid( senderWalletAddress?: string, recipientWalletAddress?: string, ): string { + // TODO(IND-483): Fix all uuid string substitutions to use Array.join. return getUuid( Buffer.from( `${senderSubaccountId}-${recipientSubaccountId}-${senderWalletAddress}-${recipientWalletAddress}-${eventId.toString('hex')}-${assetId}`, diff --git a/indexer/packages/postgres/src/types/db-model-types.ts b/indexer/packages/postgres/src/types/db-model-types.ts index edcc4f9606..5243a413e7 100644 --- a/indexer/packages/postgres/src/types/db-model-types.ts +++ b/indexer/packages/postgres/src/types/db-model-types.ts @@ -66,6 +66,7 @@ export interface OrderFromDatabase extends IdBasedModelFromDatabase { updatedAtHeight: string; goodTilBlock?: string; goodTilBlockTime?: string; + // createdAtHeight is optional because short term orders do not have a createdAtHeight. createdAtHeight?: string; clientMetadata: string; triggerPrice?: string; diff --git a/indexer/packages/postgres/src/types/order-types.ts b/indexer/packages/postgres/src/types/order-types.ts index 8c027885a6..373ea21014 100644 --- a/indexer/packages/postgres/src/types/order-types.ts +++ b/indexer/packages/postgres/src/types/order-types.ts @@ -62,6 +62,7 @@ export interface OrderCreateObject { updatedAtHeight: string; goodTilBlock?: string; goodTilBlockTime?: string; + // createdAtHeight is optional because short term orders do not have a createdAtHeight. createdAtHeight?: string; clientMetadata: string; triggerPrice?: string, diff --git a/indexer/packages/redis/__tests__/caches/canceled-orders-cache.test.ts b/indexer/packages/redis/__tests__/caches/canceled-orders-cache.test.ts index 41f1a7b697..e0b1b73ba8 100644 --- a/indexer/packages/redis/__tests__/caches/canceled-orders-cache.test.ts +++ b/indexer/packages/redis/__tests__/caches/canceled-orders-cache.test.ts @@ -4,8 +4,11 @@ import { CANCELED_ORDER_WINDOW_SIZE, isOrderCanceled, addCanceledOrderId, - removeOrderFromCache, + removeOrderFromCaches, + getOrderCanceledStatus, + addBestEffortCanceledOrderId, } from '../../src/caches/canceled-orders-cache'; +import { CanceledOrderStatus } from '../../src'; describe('cancelledOrdersCache', () => { const openOrderId1: string = 'order1'; @@ -40,13 +43,13 @@ describe('cancelledOrdersCache', () => { expect(isCanceled1).toEqual(true); expect(isCanceled2).toEqual(true); - let numRemoved: number = await removeOrderFromCache(openOrderId1, client); - expect(numRemoved).toEqual(1); + await removeOrderFromCaches(openOrderId1, client); isCanceled1 = await isOrderCanceled(openOrderId1, client); expect(isCanceled1).toEqual(false); - numRemoved = await removeOrderFromCache(openOrderId3, client); - expect(numRemoved).toEqual(0); + await removeOrderFromCaches(openOrderId3, client); + const isCanceled3: boolean = await isOrderCanceled(openOrderId1, client); + expect(isCanceled3).toEqual(false); }); it('removes cancelled orders outside of window size', async () => { @@ -61,4 +64,22 @@ describe('cancelledOrdersCache', () => { expect(isCanceled3).toEqual(true); }); + describe('getOrderCanceledStatus', () => { + it('correctly returns CANCELED', async () => { + await addCanceledOrderId(openOrderId1, 10, client); + const status: CanceledOrderStatus = await getOrderCanceledStatus(openOrderId1, client); + expect(status).toEqual(CanceledOrderStatus.CANCELED); + }); + + it('correctly returns BEST_EFFORT_CANCELED', async () => { + await addBestEffortCanceledOrderId(openOrderId1, 10, client); + const status: CanceledOrderStatus = await getOrderCanceledStatus(openOrderId1, client); + expect(status).toEqual(CanceledOrderStatus.BEST_EFFORT_CANCELED); + }); + + it('correctly returns NOT_CANCELED', async () => { + const status: CanceledOrderStatus = await getOrderCanceledStatus(openOrderId1, client); + expect(status).toEqual(CanceledOrderStatus.NOT_CANCELED); + }); + }); }); diff --git a/indexer/packages/redis/__tests__/caches/state-filled-quantums-cache.test.ts b/indexer/packages/redis/__tests__/caches/state-filled-quantums-cache.test.ts new file mode 100644 index 0000000000..5031f40abd --- /dev/null +++ b/indexer/packages/redis/__tests__/caches/state-filled-quantums-cache.test.ts @@ -0,0 +1,45 @@ +import { deleteAllAsync, ttl } from '../../src/helpers/redis'; +import { redis as client } from '../helpers/utils'; +import { orderId } from './constants'; +import { OrderTable } from '@dydxprotocol-indexer/postgres'; +import { + STATE_FILLED_QUANTUMS_TTL_SECONDS, + getCacheKey, + getStateFilledQuantums, + updateStateFilledQuantums, +} from '../../src/caches/state-filled-quantums-cache'; + +describe('stateFilledQuantumsCache', () => { + const orderUuid: string = OrderTable.orderIdToUuid(orderId); + + beforeEach(async () => { + await deleteAllAsync(client); + }); + + afterEach(async () => { + await deleteAllAsync(client); + }); + + describe('updateStateFilledQuantums', () => { + it('updates the state filled amount for an order id', async () => { + const filledQuantums: string = '1000'; + await updateStateFilledQuantums(orderUuid, filledQuantums, client); + + expect(await getStateFilledQuantums(orderUuid, client)).toEqual(filledQuantums); + expect(await ttl(client, getCacheKey(orderUuid))).toEqual(STATE_FILLED_QUANTUMS_TTL_SECONDS); + }); + }); + + describe('getStateFilledQuantums', () => { + it('gets the state filled amount for an order id', async () => { + const filledQuantums: string = '1000'; + await updateStateFilledQuantums(orderUuid, filledQuantums, client); + + expect(await getStateFilledQuantums(orderUuid, client)).toEqual(filledQuantums); + }); + + it('returns undefined if order id does not exist', async () => { + expect(await getStateFilledQuantums(orderUuid, client)).toEqual(undefined); + }); + }); +}); diff --git a/indexer/packages/redis/src/caches/canceled-orders-cache.ts b/indexer/packages/redis/src/caches/canceled-orders-cache.ts index f3d19a02a1..e2bf109336 100644 --- a/indexer/packages/redis/src/caches/canceled-orders-cache.ts +++ b/indexer/packages/redis/src/caches/canceled-orders-cache.ts @@ -1,9 +1,11 @@ import { Callback, RedisClient } from 'redis'; import { zRemAsync, zScoreAsync } from '../helpers/redis'; +import { CanceledOrderStatus } from '../types'; import { addCanceledOrderIdScript } from './scripts'; // Cache of cancelled orders export const CANCELED_ORDERS_CACHE_KEY: string = 'v4/cancelled_orders'; +export const BEST_EFFORT_CANCELED_ORDERS_CACHE_KEY: string = 'v4/best_effort_cancelled_orders'; // 10 seconds in milliseconds export const CANCELED_ORDER_WINDOW_SIZE: number = 30 * 1000; @@ -24,16 +26,62 @@ export async function isOrderCanceled( orderId: string, client: RedisClient, ): Promise { - const score: string | null = await - zScoreAsync({ hash: CANCELED_ORDERS_CACHE_KEY, key: orderId }, client); - return score !== null; + const [ + canceledScore, + bestEffortCanceledScore, + ]: (string | null)[] = await Promise.all([ + zScoreAsync({ hash: CANCELED_ORDERS_CACHE_KEY, key: orderId }, client), + zScoreAsync({ hash: BEST_EFFORT_CANCELED_ORDERS_CACHE_KEY, key: orderId }, client), + ]); + return canceledScore !== null || bestEffortCanceledScore !== null; } -export async function removeOrderFromCache( +export async function getOrderCanceledStatus( orderId: string, client: RedisClient, +): Promise { + const [ + canceledScore, + bestEffortCanceledScore, + ]: (string | null)[] = await Promise.all([ + zScoreAsync({ hash: CANCELED_ORDERS_CACHE_KEY, key: orderId }, client), + zScoreAsync({ hash: BEST_EFFORT_CANCELED_ORDERS_CACHE_KEY, key: orderId }, client), + ]); + + if (canceledScore !== null) { + return CanceledOrderStatus.CANCELED; + } + + if (bestEffortCanceledScore !== null) { + return CanceledOrderStatus.BEST_EFFORT_CANCELED; + } + + return CanceledOrderStatus.NOT_CANCELED; +} + +export async function removeOrderFromCaches( + orderId: string, + client: RedisClient, +): Promise { + await Promise.all([ + zRemAsync({ hash: CANCELED_ORDERS_CACHE_KEY, key: orderId }, client), + zRemAsync({ hash: BEST_EFFORT_CANCELED_ORDERS_CACHE_KEY, key: orderId }, client), + ]); +} + +/** + * addCanceledOrderId adds the order id to the best effort canceled orders cache. + * + * @param orderId + * @param timestamp + * @param client + */ +export async function addBestEffortCanceledOrderId( + orderId: string, + timestamp: number, + client: RedisClient, ): Promise { - return zRemAsync({ hash: CANCELED_ORDERS_CACHE_KEY, key: orderId }, client); + return addOrderIdtoCache(orderId, timestamp, client, BEST_EFFORT_CANCELED_ORDERS_CACHE_KEY); } /** @@ -47,6 +95,22 @@ export async function addCanceledOrderId( orderId: string, timestamp: number, client: RedisClient, +): Promise { + return addOrderIdtoCache(orderId, timestamp, client, CANCELED_ORDERS_CACHE_KEY); +} + +/** + * addCanceledOrderId adds the order id to the cacheKey's cache. + * + * @param orderId + * @param timestamp + * @param client + */ +export async function addOrderIdtoCache( + orderId: string, + timestamp: number, + client: RedisClient, + cacheKey: string, ): Promise { const numKeys: number = 2; let evalAsync: ( @@ -69,7 +133,7 @@ export async function addCanceledOrderId( client.evalsha( addCanceledOrderIdScript.hash, numKeys, - CANCELED_ORDERS_CACHE_KEY, + cacheKey, CANCELED_ORDER_WINDOW_SIZE, canceledOrderId, currentTimestampMs, diff --git a/indexer/packages/redis/src/caches/state-filled-quantums-cache.ts b/indexer/packages/redis/src/caches/state-filled-quantums-cache.ts new file mode 100644 index 0000000000..510f91a918 --- /dev/null +++ b/indexer/packages/redis/src/caches/state-filled-quantums-cache.ts @@ -0,0 +1,52 @@ +import { RedisClient } from 'redis'; + +import { getAsync, setexAsync } from '../helpers/redis'; + +export const STATE_FILLED_QUANTUMS_CACHE_KEY_PREFIX: string = 'v4/state_filled_quantums/'; +export const STATE_FILLED_QUANTUMS_TTL_SECONDS: number = 300; // 5 minutes + +/** + * Updates the state-filled quantums for an order id. This is the total filled quantums of the order + * in the state of the network. + * @param orderId + * @param filledQuantums + * @param client + */ +export async function updateStateFilledQuantums( + orderId: string, + filledQuantums: string, + client: RedisClient, +): Promise { + await setexAsync({ + key: getCacheKey(orderId), + value: filledQuantums, + timeToLiveSeconds: STATE_FILLED_QUANTUMS_TTL_SECONDS, + }, client); +} + +/** + * Gets the state-filled quantums for an order id. This is the total filled quantums of the order + * in the state of the network. + * @param orderId + * @param client + * @returns + */ +export async function getStateFilledQuantums( + orderId: string, + client: RedisClient, +): Promise { + const filledQuantums: string | null = await getAsync( + getCacheKey(orderId), + client, + ); + + if (filledQuantums === null) { + return undefined; + } + + return filledQuantums; +} + +export function getCacheKey(orderId: string): string { + return `${STATE_FILLED_QUANTUMS_CACHE_KEY_PREFIX}${orderId}`; +} diff --git a/indexer/packages/redis/src/index.ts b/indexer/packages/redis/src/index.ts index c1b716f726..de661b0e0a 100644 --- a/indexer/packages/redis/src/index.ts +++ b/indexer/packages/redis/src/index.ts @@ -10,6 +10,7 @@ export * as OrderbookLevelsCache from './caches/orderbook-levels-cache'; export * as LatestAccountPnlTicksCache from './caches/latest-account-pnl-ticks-cache'; export * as CanceledOrdersCache from './caches/canceled-orders-cache'; export * as StatefulOrderUpdatesCache from './caches/stateful-order-updates-cache'; +export * as StateFilledQuantumsCache from './caches/state-filled-quantums-cache'; export { placeOrder } from './caches/place-order'; export { removeOrder } from './caches/remove-order'; export { updateOrder } from './caches/update-order'; diff --git a/indexer/packages/redis/src/types.ts b/indexer/packages/redis/src/types.ts index 0f67c7788d..76e2339d27 100644 --- a/indexer/packages/redis/src/types.ts +++ b/indexer/packages/redis/src/types.ts @@ -67,6 +67,12 @@ export type LuaScript = { readonly hash: string; }; +export enum CanceledOrderStatus { + CANCELED = 'CANCELED', + BEST_EFFORT_CANCELED = 'BEST_EFFORT_CANCELED', + NOT_CANCELED = 'NOT_CANCELED', +} + /* ------- PNL Creation TYPES ------- */ export type PnlTickForSubaccounts = { // Stores a PnlTicksCreateObject for the most recent pnl tick for each subaccount. diff --git a/indexer/services/comlink/src/controllers/api/v4/addresses-controller.ts b/indexer/services/comlink/src/controllers/api/v4/addresses-controller.ts index 8d0081e491..dad65ae2f7 100644 --- a/indexer/services/comlink/src/controllers/api/v4/addresses-controller.ts +++ b/indexer/services/comlink/src/controllers/api/v4/addresses-controller.ts @@ -269,6 +269,7 @@ router.get( 'AddressesController GET /:address', 'Addresses error', error, + req, res, ); } @@ -308,6 +309,7 @@ router.get( 'AddressesController GET /:address/subaccountNumber/:subaccountNumber', 'Addresses subaccount error', error, + req, res, ); } finally { diff --git a/indexer/services/comlink/src/controllers/api/v4/asset-positions-controller.ts b/indexer/services/comlink/src/controllers/api/v4/asset-positions-controller.ts index 2ac642756f..63aa7dccc1 100644 --- a/indexer/services/comlink/src/controllers/api/v4/asset-positions-controller.ts +++ b/indexer/services/comlink/src/controllers/api/v4/asset-positions-controller.ts @@ -174,6 +174,7 @@ router.get( 'AssetPositionsController GET /', 'Asset positions error', error, + req, res, ); } finally { diff --git a/indexer/services/comlink/src/controllers/api/v4/candles-controller.ts b/indexer/services/comlink/src/controllers/api/v4/candles-controller.ts index b25571d64a..28a6ddfc85 100644 --- a/indexer/services/comlink/src/controllers/api/v4/candles-controller.ts +++ b/indexer/services/comlink/src/controllers/api/v4/candles-controller.ts @@ -101,6 +101,7 @@ router.get( 'CandlesController GET /perpetualMarkets/:ticker', 'Candles error', error, + req, res, ); } finally { diff --git a/indexer/services/comlink/src/controllers/api/v4/compliance-controller.ts b/indexer/services/comlink/src/controllers/api/v4/compliance-controller.ts index 501a2535dd..40aa9bc4c6 100644 --- a/indexer/services/comlink/src/controllers/api/v4/compliance-controller.ts +++ b/indexer/services/comlink/src/controllers/api/v4/compliance-controller.ts @@ -144,6 +144,7 @@ router.get( 'ComplianceController GET /', 'Compliance error', error, + req, res, ); } finally { diff --git a/indexer/services/comlink/src/controllers/api/v4/fills-controller.ts b/indexer/services/comlink/src/controllers/api/v4/fills-controller.ts index fa4ac8cacd..0a241c8e78 100644 --- a/indexer/services/comlink/src/controllers/api/v4/fills-controller.ts +++ b/indexer/services/comlink/src/controllers/api/v4/fills-controller.ts @@ -161,6 +161,7 @@ router.get( 'FillsController GET /', 'Fills error', error, + req, res, ); } finally { diff --git a/indexer/services/comlink/src/controllers/api/v4/height-controller.ts b/indexer/services/comlink/src/controllers/api/v4/height-controller.ts index 018fd8fa6c..b6d6a0dbdd 100644 --- a/indexer/services/comlink/src/controllers/api/v4/height-controller.ts +++ b/indexer/services/comlink/src/controllers/api/v4/height-controller.ts @@ -37,7 +37,7 @@ router.get( rejectRestrictedCountries, rateLimiterMiddleware(getReqRateLimiter), ExportResponseCodeStats({ controllerName }), - async (_req: express.Request, res: express.Response) => { + async (req: express.Request, res: express.Response) => { const start: number = Date.now(); try { const controller: HeightController = new HeightController(); @@ -49,6 +49,7 @@ router.get( 'HeightController GET /', 'Height error', error, + req, res, ); } finally { diff --git a/indexer/services/comlink/src/controllers/api/v4/historical-funding-controller.ts b/indexer/services/comlink/src/controllers/api/v4/historical-funding-controller.ts index ea64e0e2e0..a256521d40 100644 --- a/indexer/services/comlink/src/controllers/api/v4/historical-funding-controller.ts +++ b/indexer/services/comlink/src/controllers/api/v4/historical-funding-controller.ts @@ -116,6 +116,7 @@ router.get( 'HistoricalFundingController GET /', 'HistoricalFunding error', error, + req, res, ); } finally { diff --git a/indexer/services/comlink/src/controllers/api/v4/historical-pnl-controller.ts b/indexer/services/comlink/src/controllers/api/v4/historical-pnl-controller.ts index 687b3da264..e2045389e1 100644 --- a/indexer/services/comlink/src/controllers/api/v4/historical-pnl-controller.ts +++ b/indexer/services/comlink/src/controllers/api/v4/historical-pnl-controller.ts @@ -128,6 +128,7 @@ router.get( 'HistoricalPnlController GET /', 'Historical Pnl error', error, + req, res, ); } finally { diff --git a/indexer/services/comlink/src/controllers/api/v4/orderbook-controller.ts b/indexer/services/comlink/src/controllers/api/v4/orderbook-controller.ts index d80822e1b5..7c80ce35f4 100644 --- a/indexer/services/comlink/src/controllers/api/v4/orderbook-controller.ts +++ b/indexer/services/comlink/src/controllers/api/v4/orderbook-controller.ts @@ -79,6 +79,7 @@ router.get( 'OrderbooksController GET /perpetualMarket/:ticker', 'Orderbooks error', error, + req, res, ); } finally { diff --git a/indexer/services/comlink/src/controllers/api/v4/orders-controller.ts b/indexer/services/comlink/src/controllers/api/v4/orders-controller.ts index 4c65a72971..19466174cb 100644 --- a/indexer/services/comlink/src/controllers/api/v4/orders-controller.ts +++ b/indexer/services/comlink/src/controllers/api/v4/orders-controller.ts @@ -262,6 +262,7 @@ router.get( 'OrdersController GET /', 'Orders error', error, + req, res, ); } finally { @@ -300,6 +301,7 @@ router.get( 'OrdersController GET /:orderId', 'Orders error', error, + req, res, ); } finally { diff --git a/indexer/services/comlink/src/controllers/api/v4/perpetual-markets-controller.ts b/indexer/services/comlink/src/controllers/api/v4/perpetual-markets-controller.ts index 6c1dda4ff1..0962f98b1f 100644 --- a/indexer/services/comlink/src/controllers/api/v4/perpetual-markets-controller.ts +++ b/indexer/services/comlink/src/controllers/api/v4/perpetual-markets-controller.ts @@ -142,6 +142,7 @@ router.get( 'PerpetualMarketController GET /', 'PerpetualMarket error', error, + req, res, ); } finally { diff --git a/indexer/services/comlink/src/controllers/api/v4/perpetual-positions-controller.ts b/indexer/services/comlink/src/controllers/api/v4/perpetual-positions-controller.ts index 3815be5a4e..eadba62063 100644 --- a/indexer/services/comlink/src/controllers/api/v4/perpetual-positions-controller.ts +++ b/indexer/services/comlink/src/controllers/api/v4/perpetual-positions-controller.ts @@ -198,6 +198,7 @@ router.get( 'PerpetualPositionsController GET /', 'Perpetual positions error', error, + req, res, ); } finally { diff --git a/indexer/services/comlink/src/controllers/api/v4/sparklines-controller.ts b/indexer/services/comlink/src/controllers/api/v4/sparklines-controller.ts index 8c82eb8642..a426c88eb4 100644 --- a/indexer/services/comlink/src/controllers/api/v4/sparklines-controller.ts +++ b/indexer/services/comlink/src/controllers/api/v4/sparklines-controller.ts @@ -90,6 +90,7 @@ router.get( 'SparklinesController GET /', 'Sparklines error', error, + req, res, ); } finally { diff --git a/indexer/services/comlink/src/controllers/api/v4/trades-controller.ts b/indexer/services/comlink/src/controllers/api/v4/trades-controller.ts index 87d7a6a428..a84cf515b1 100644 --- a/indexer/services/comlink/src/controllers/api/v4/trades-controller.ts +++ b/indexer/services/comlink/src/controllers/api/v4/trades-controller.ts @@ -116,6 +116,7 @@ router.get( 'TradesController GET /perpetualMarket/:ticker', 'Trades error', error, + req, res, ); } finally { diff --git a/indexer/services/comlink/src/controllers/api/v4/transfers-controller.ts b/indexer/services/comlink/src/controllers/api/v4/transfers-controller.ts index c854f9c244..860d9019e8 100644 --- a/indexer/services/comlink/src/controllers/api/v4/transfers-controller.ts +++ b/indexer/services/comlink/src/controllers/api/v4/transfers-controller.ts @@ -163,6 +163,7 @@ router.get( 'TransfersController GET /', 'Transfers error', error, + req, res, ); } finally { diff --git a/indexer/services/comlink/src/lib/helpers.ts b/indexer/services/comlink/src/lib/helpers.ts index 97927455dd..e9feb31700 100644 --- a/indexer/services/comlink/src/lib/helpers.ts +++ b/indexer/services/comlink/src/lib/helpers.ts @@ -48,6 +48,7 @@ export function handleControllerError( at: string, message: string, error: Error, + req: express.Request, res: express.Response, ): express.Response { if (error instanceof NotFoundError) { @@ -57,6 +58,7 @@ export function handleControllerError( at, message, error, + req, res, ); } @@ -65,6 +67,7 @@ function handleInternalServerError( at: string, message: string, error: Error, + req: express.Request, res: express.Response, ): express.Response { if (config.isDevelopment()) { @@ -76,6 +79,8 @@ function handleInternalServerError( at, message, error, + params: JSON.stringify(req.params), + query: JSON.stringify(req.query), }); return createInternalServerErrorResponse(res); } diff --git a/indexer/services/ender/__tests__/handlers/asset-handler.test.ts b/indexer/services/ender/__tests__/handlers/asset-handler.test.ts index bb96c18360..32557975cb 100644 --- a/indexer/services/ender/__tests__/handlers/asset-handler.test.ts +++ b/indexer/services/ender/__tests__/handlers/asset-handler.test.ts @@ -36,6 +36,7 @@ import { } from '../helpers/constants'; import { updateBlockCache } from '../../src/caches/block-cache'; import { createPostgresFunctions } from '../../src/helpers/postgres/postgres-functions'; +import config from '../../src/config'; describe('assetHandler', () => { beforeAll(async () => { @@ -61,6 +62,7 @@ describe('assetHandler', () => { afterEach(async () => { await dbHelpers.clearData(); + assetRefresher.clear(); jest.clearAllMocks(); }); @@ -98,51 +100,82 @@ describe('assetHandler', () => { }); }); - it('fails when market doesnt exist for asset', async () => { - const transactionIndex: number = 0; - const kafkaMessage: KafkaMessage = createKafkaMessageFromAssetEvent({ - assetEvent: defaultAssetCreateEvent, - transactionIndex, - height: defaultHeight, - time: defaultTime, - txHash: defaultTxHash, - }); - - const message: string = 'Unable to find market with id: 0'; - await expect(onMessage(kafkaMessage)).rejects.toThrowError( - new Error(message), - ); - }); + it.each([ + [ + 'via knex', + false, + ], + [ + 'via SQL function', + true, + ], + ])( + 'fails when market doesnt exist for asset (%s)', + async ( + _name: string, + useSqlFunction: boolean, + ) => { + config.USE_ASSET_CREATE_HANDLER_SQL_FUNCTION = useSqlFunction; + const transactionIndex: number = 0; + const kafkaMessage: KafkaMessage = createKafkaMessageFromAssetEvent({ + assetEvent: defaultAssetCreateEvent, + transactionIndex, + height: defaultHeight, + time: defaultTime, + txHash: defaultTxHash, + }); - it('creates new asset', async () => { - await MarketTable.create(testConstants.defaultMarket); - await marketRefresher.updateMarkets(); - const transactionIndex: number = 0; - - const assetEvent: AssetCreateEventV1 = defaultAssetCreateEvent; - const kafkaMessage: KafkaMessage = createKafkaMessageFromAssetEvent({ - assetEvent, - transactionIndex, - height: defaultHeight, - time: defaultTime, - txHash: defaultTxHash, + await expect(onMessage(kafkaMessage)).rejects.toThrowError( + 'Unable to find market with id: 0', + ); }); - // Confirm there is no existing asset to or from the sender subaccount - await expectNoExistingAssets(); - await onMessage(kafkaMessage); + it.each([ + [ + 'via knex', + false, + ], + [ + 'via SQL function', + true, + ], + ])( + 'creates new asset (%s)', + async ( + _name: string, + useSqlFunction: boolean, + ) => { + config.USE_ASSET_CREATE_HANDLER_SQL_FUNCTION = useSqlFunction; + await MarketTable.create(testConstants.defaultMarket); + await marketRefresher.updateMarkets(); + const transactionIndex: number = 0; - const newAssets: AssetFromDatabase[] = await AssetTable.findAll( - {}, - [], { - orderBy: [[AssetColumns.id, Ordering.ASC]], + const assetEvent: AssetCreateEventV1 = defaultAssetCreateEvent; + const kafkaMessage: KafkaMessage = createKafkaMessageFromAssetEvent({ + assetEvent, + transactionIndex, + height: defaultHeight, + time: defaultTime, + txHash: defaultTxHash, }); - expect(newAssets.length).toEqual(1); - expectAssetMatchesEvent(assetEvent, newAssets[0]); - expectTimingStats(); - const asset: AssetFromDatabase = assetRefresher.getAssetFromId('0'); - expect(asset).toBeDefined(); - }); + // Confirm there is no existing asset to or from the sender subaccount + await expectNoExistingAssets(); + + await onMessage(kafkaMessage); + + const newAssets: AssetFromDatabase[] = await AssetTable.findAll( + {}, + [], { + orderBy: [[AssetColumns.id, Ordering.ASC]], + }); + expect(newAssets.length).toEqual(1); + expectAssetMatchesEvent(assetEvent, newAssets[0]); + if (!useSqlFunction) { + expectTimingStats(); + } + const asset: AssetFromDatabase = assetRefresher.getAssetFromId('0'); + expect(asset).toBeDefined(); + }); }); function expectTimingStats() { diff --git a/indexer/services/ender/__tests__/handlers/funding-handler.test.ts b/indexer/services/ender/__tests__/handlers/funding-handler.test.ts index 6b9b8ec117..5ad391ead2 100644 --- a/indexer/services/ender/__tests__/handlers/funding-handler.test.ts +++ b/indexer/services/ender/__tests__/handlers/funding-handler.test.ts @@ -42,6 +42,7 @@ import { redisClient } from '../../src/helpers/redis/redis-controller'; import { bigIntToBytes } from '@dydxprotocol-indexer/v4-proto-parser'; import { startPriceCache } from '../../src/caches/price-cache'; import { createPostgresFunctions } from '../../src/helpers/postgres/postgres-functions'; +import config from '../../src/config'; describe('fundingHandler', () => { beforeAll(async () => { @@ -114,201 +115,273 @@ describe('fundingHandler', () => { }); }); - it('successfully processes single premium sample event', async () => { - const kafkaMessage: KafkaMessage = createKafkaMessageFromFundingEvents({ - fundingEvents: [defaultFundingUpdateSampleEvent], - height: defaultHeight, - time: defaultTime, + it.each([ + [ + 'via knex', + false, + ], + [ + 'via SQL function', + true, + ], + ])( + 'successfully processes single premium sample event (%s)', + async ( + _name: string, + useSqlFunction: boolean, + ) => { + config.USE_FUNDING_HANDLER_SQL_FUNCTION = useSqlFunction; + const kafkaMessage: KafkaMessage = createKafkaMessageFromFundingEvents({ + fundingEvents: [defaultFundingUpdateSampleEvent], + height: defaultHeight, + time: defaultTime, + }); + + await onMessage(kafkaMessage); + + await expectNextFundingRate( + 'BTC-USD', + new Big(protocolTranslations.funding8HourValuePpmTo1HourRate( + defaultFundingUpdateSampleEvent.updates[0].fundingValuePpm, + )), + ); + if (!useSqlFunction) { + expectTimingStat('handle_premium_sample'); + } }); - await onMessage(kafkaMessage); + it.each([ + [ + 'via knex', + false, + ], + [ + 'via SQL function', + true, + ], + ])( + 'successfully processes multiple premium sample event for different markets (%s)', + async ( + _name: string, + useSqlFunction: boolean, + ) => { + config.USE_FUNDING_HANDLER_SQL_FUNCTION = useSqlFunction; + const fundingUpdateSampleEvent2: FundingEventV1 = { + type: FundingEventV1_Type.TYPE_PREMIUM_SAMPLE, + updates: [ + { + perpetualId: 0, + fundingValuePpm: 100, + fundingIndex: bigIntToBytes(BigInt(0)), + }, + { + perpetualId: 1, + fundingValuePpm: 50, + fundingIndex: bigIntToBytes(BigInt(0)), + }, + ], + }; - await expectNextFundingRate( - 'BTC-USD', - new Big(protocolTranslations.funding8HourValuePpmTo1HourRate( - defaultFundingUpdateSampleEvent.updates[0].fundingValuePpm, - )), - ); - expectTimingStat('handle_premium_sample'); - }); + const kafkaMessage: KafkaMessage = createKafkaMessageFromFundingEvents({ + fundingEvents: [defaultFundingUpdateSampleEvent, fundingUpdateSampleEvent2], + height: defaultHeight, + time: defaultTime, + }); - it('successfully processes multiple premium sample event for different markets', async () => { - const fundingUpdateSampleEvent2: FundingEventV1 = { - type: FundingEventV1_Type.TYPE_PREMIUM_SAMPLE, - updates: [ - { - perpetualId: 0, - fundingValuePpm: 100, - fundingIndex: bigIntToBytes(BigInt(0)), - }, - { - perpetualId: 1, - fundingValuePpm: 50, - fundingIndex: bigIntToBytes(BigInt(0)), - }, - ], - }; + await onMessage(kafkaMessage); - const kafkaMessage: KafkaMessage = createKafkaMessageFromFundingEvents({ - fundingEvents: [defaultFundingUpdateSampleEvent, fundingUpdateSampleEvent2], - height: defaultHeight, - time: defaultTime, + await expectNextFundingRate( + 'BTC-USD', + new Big('0.000006875'), + ); + await expectNextFundingRate( + 'ETH-USD', + new Big(protocolTranslations.funding8HourValuePpmTo1HourRate( + fundingUpdateSampleEvent2.updates[1].fundingValuePpm, + )), + ); + if (!useSqlFunction) { + expectTimingStat('handle_premium_sample'); + } }); - await onMessage(kafkaMessage); + it.each([ + [ + 'via knex', + false, + ], + [ + 'via SQL function', + true, + ], + ])( + 'successfully processes and clears cache for a new funding rate (%s)', + async ( + _name: string, + useSqlFunction: boolean, + ) => { + config.USE_FUNDING_HANDLER_SQL_FUNCTION = useSqlFunction; + const kafkaMessage: KafkaMessage = createKafkaMessageFromFundingEvents({ + fundingEvents: [defaultFundingUpdateSampleEvent], + height: defaultHeight, + time: defaultTime, + }); - await expectNextFundingRate( - 'BTC-USD', - new Big('0.000006875'), - ); - await expectNextFundingRate( - 'ETH-USD', - new Big(protocolTranslations.funding8HourValuePpmTo1HourRate( - fundingUpdateSampleEvent2.updates[1].fundingValuePpm, - )), - ); - expectTimingStat('handle_premium_sample'); - }); + await onMessage(kafkaMessage); - it('successfully processes and clears cache for a new funding rate', async () => { - const kafkaMessage: KafkaMessage = createKafkaMessageFromFundingEvents({ - fundingEvents: [defaultFundingUpdateSampleEvent], - height: defaultHeight, - time: defaultTime, - }); + await expectNextFundingRate( + 'BTC-USD', + new Big(protocolTranslations.funding8HourValuePpmTo1HourRate( + defaultFundingUpdateSampleEvent.updates[0].fundingValuePpm, + )), + ); + if (!useSqlFunction) { + expectTimingStat('handle_premium_sample'); + } - await onMessage(kafkaMessage); + const kafkaMessage2: KafkaMessage = createKafkaMessageFromFundingEvents({ + fundingEvents: [defaultFundingRateEvent], + height: 4, + time: defaultTime, + }); - await expectNextFundingRate( - 'BTC-USD', - new Big(protocolTranslations.funding8HourValuePpmTo1HourRate( - defaultFundingUpdateSampleEvent.updates[0].fundingValuePpm, - )), - ); - expectTimingStat('handle_premium_sample'); + await onMessage(kafkaMessage2); + await expectNextFundingRate( + 'BTC-USD', + undefined, + ); + const fundingIndices: FundingIndexUpdatesFromDatabase[] = await + FundingIndexUpdatesTable.findAll({}, [], {}); - const kafkaMessage2: KafkaMessage = createKafkaMessageFromFundingEvents({ - fundingEvents: [defaultFundingRateEvent], - height: 4, - time: defaultTime, + expect(fundingIndices.length).toEqual(1); + expect(fundingIndices[0]).toEqual(expect.objectContaining({ + perpetualId: '0', + rate: '0.00000125', + oraclePrice: '10000', + fundingIndex: '0.1', + })); + if (!useSqlFunction) { + expectTimingStat('handle_funding_rate'); + } }); - await onMessage(kafkaMessage2); - await expectNextFundingRate( - 'BTC-USD', - undefined, - ); - const fundingIndices: FundingIndexUpdatesFromDatabase[] = await - FundingIndexUpdatesTable.findAll({}, [], {}); + it.each([ + [ + 'via knex', + false, + ], + [ + 'via SQL function', + true, + ], + ])( + 'successfully processes and clears cache for multiple new funding rates (%s)', + async ( + _name: string, + useSqlFunction: boolean, + ) => { + config.USE_FUNDING_HANDLER_SQL_FUNCTION = useSqlFunction; + const fundingSampleEvent: FundingEventV1 = { + type: FundingEventV1_Type.TYPE_PREMIUM_SAMPLE, + updates: [ + { + perpetualId: 0, + fundingValuePpm: 100, + fundingIndex: bigIntToBytes(BigInt(0)), + }, + { + perpetualId: 1, + fundingValuePpm: 50, + fundingIndex: bigIntToBytes(BigInt(0)), + }, + ], + }; + const kafkaMessage: KafkaMessage = createKafkaMessageFromFundingEvents({ + fundingEvents: [fundingSampleEvent], + height: defaultHeight, + time: defaultTime, + }); - expect(fundingIndices.length).toEqual(1); - expect(fundingIndices[0]).toEqual(expect.objectContaining({ - perpetualId: '0', - rate: '0.00000125', - oraclePrice: '10000', - fundingIndex: '0.1', - })); - expectTimingStat('handle_funding_rate'); - }); + await onMessage(kafkaMessage); - it('successfully processes and clears cache for multiple new funding rates', async () => { - const fundingSampleEvent: FundingEventV1 = { - type: FundingEventV1_Type.TYPE_PREMIUM_SAMPLE, - updates: [ - { - perpetualId: 0, - fundingValuePpm: 100, - fundingIndex: bigIntToBytes(BigInt(0)), - }, - { - perpetualId: 1, - fundingValuePpm: 50, - fundingIndex: bigIntToBytes(BigInt(0)), - }, - ], - }; - const kafkaMessage: KafkaMessage = createKafkaMessageFromFundingEvents({ - fundingEvents: [fundingSampleEvent], - height: defaultHeight, - time: defaultTime, - }); + await Promise.all([ + expectNextFundingRate( + 'BTC-USD', + new Big(protocolTranslations.funding8HourValuePpmTo1HourRate( + fundingSampleEvent.updates[0].fundingValuePpm, + )), + ), + expectNextFundingRate( + 'ETH-USD', + new Big(protocolTranslations.funding8HourValuePpmTo1HourRate( + fundingSampleEvent.updates[1].fundingValuePpm, + )), + ), + ]); + if (!useSqlFunction) { + expectTimingStat('handle_premium_sample'); + } - await onMessage(kafkaMessage); + const fundingRateEvent: FundingEventMessage = { + type: FundingEventV1_Type.TYPE_FUNDING_RATE_AND_INDEX, + updates: [ + { + perpetualId: 0, + fundingValuePpm: 10, + fundingIndex: bigIntToBytes(BigInt(10)), + }, + { + perpetualId: 1, + fundingValuePpm: 100, + fundingIndex: bigIntToBytes(BigInt(100)), + }, + ], + }; + const kafkaMessage2: KafkaMessage = createKafkaMessageFromFundingEvents({ + fundingEvents: [fundingRateEvent], + height: 4, + time: defaultTime, + }); - await Promise.all([ - expectNextFundingRate( - 'BTC-USD', - new Big(protocolTranslations.funding8HourValuePpmTo1HourRate( - fundingSampleEvent.updates[0].fundingValuePpm, - )), - ), - expectNextFundingRate( - 'ETH-USD', - new Big(protocolTranslations.funding8HourValuePpmTo1HourRate( - fundingSampleEvent.updates[1].fundingValuePpm, - )), - ), - ]); - expectTimingStat('handle_premium_sample'); - - const fundingRateEvent: FundingEventMessage = { - type: FundingEventV1_Type.TYPE_FUNDING_RATE_AND_INDEX, - updates: [ - { - perpetualId: 0, - fundingValuePpm: 10, - fundingIndex: bigIntToBytes(BigInt(10)), - }, + await onMessage(kafkaMessage2); + await Promise.all([ + expectNextFundingRate( + 'BTC-USD', + undefined, + ), + expectNextFundingRate( + 'ETH-USD', + undefined, + ), + ]); + const fundingIndices: FundingIndexUpdatesFromDatabase[] = await + FundingIndexUpdatesTable.findAll( + {}, + [], { - perpetualId: 1, - fundingValuePpm: 100, - fundingIndex: bigIntToBytes(BigInt(100)), + orderBy: [[FundingIndexUpdatesColumns.perpetualId, Ordering.ASC]], }, - ], - }; - const kafkaMessage2: KafkaMessage = createKafkaMessageFromFundingEvents({ - fundingEvents: [fundingRateEvent], - height: 4, - time: defaultTime, - }); - - await onMessage(kafkaMessage2); - await Promise.all([ - expectNextFundingRate( - 'BTC-USD', - undefined, - ), - expectNextFundingRate( - 'ETH-USD', - undefined, - ), - ]); - const fundingIndices: FundingIndexUpdatesFromDatabase[] = await - FundingIndexUpdatesTable.findAll( - {}, - [], - { - orderBy: [[FundingIndexUpdatesColumns.perpetualId, Ordering.ASC]], - }, - ); + ); - expect(fundingIndices.length).toEqual(2); - expect(fundingIndices[0]).toEqual(expect.objectContaining({ - perpetualId: '0', - rate: '0.00000125', - oraclePrice: '10000', - // 1e1 * 1e-6 * 1e-6 / 1e-10 = 1e-1 - fundingIndex: '0.1', - })); - expect(fundingIndices[1]).toEqual(expect.objectContaining({ - perpetualId: '1', - rate: '0.0000125', - oraclePrice: '500', - // 1e2 * 1e-6 * 1e-6 / 1e-18 = 1e8 - fundingIndex: '100000000', - })); - expectTimingStat('handle_funding_rate'); - }); + expect(fundingIndices.length).toEqual(2); + expect(fundingIndices[0]).toEqual(expect.objectContaining({ + perpetualId: '0', + rate: '0.00000125', + oraclePrice: '10000', + // 1e1 * 1e-6 * 1e-6 / 1e-10 = 1e-1 + fundingIndex: '0.1', + })); + expect(fundingIndices[1]).toEqual(expect.objectContaining({ + perpetualId: '1', + rate: '0.0000125', + oraclePrice: '500', + // 1e2 * 1e-6 * 1e-6 / 1e-18 = 1e8 + fundingIndex: '100000000', + })); + if (!useSqlFunction) { + expectTimingStat('handle_funding_rate'); + } + }); }); function expectTimingStat(fnName: string) { diff --git a/indexer/services/ender/__tests__/handlers/liquidity-tier-handler.test.ts b/indexer/services/ender/__tests__/handlers/liquidity-tier-handler.test.ts index f00ac7faa3..bdeeae8322 100644 --- a/indexer/services/ender/__tests__/handlers/liquidity-tier-handler.test.ts +++ b/indexer/services/ender/__tests__/handlers/liquidity-tier-handler.test.ts @@ -39,6 +39,7 @@ import { updateBlockCache } from '../../src/caches/block-cache'; import { defaultLiquidityTier } from '@dydxprotocol-indexer/postgres/build/__tests__/helpers/constants'; import _ from 'lodash'; import { createPostgresFunctions } from '../../src/helpers/postgres/postgres-functions'; +import config from '../../src/config'; describe('liquidityTierHandler', () => { beforeAll(async () => { @@ -103,73 +104,107 @@ describe('liquidityTierHandler', () => { }); }); - it('creates new liquidity tier', async () => { - const transactionIndex: number = 0; - const liquidityTierEvent: LiquidityTierUpsertEventV1 = defaultLiquidityTierUpsertEvent; - const kafkaMessage: KafkaMessage = createKafkaMessageFromLiquidityTiersEvent({ - liquidityTierEvent, - transactionIndex, - height: defaultHeight, - time: defaultTime, - txHash: defaultTxHash, - }); - // Confirm there is no existing liquidity tier - await expectNoExistingLiquidityTiers(); - await perpetualMarketRefresher.updatePerpetualMarkets(); - - const producerSendMock: jest.SpyInstance = jest.spyOn(producer, 'send'); - await onMessage(kafkaMessage); - - const newLiquidityTiers: LiquidityTiersFromDatabase[] = await LiquidityTiersTable.findAll( - {}, - [], { - orderBy: [[LiquidityTiersColumns.id, Ordering.ASC]], + it.each([ + [ + 'via knex', + false, + ], + [ + 'via SQL function', + true, + ], + ])( + 'creates new liquidity tier (%s)', + async ( + _name: string, + useSqlFunction: boolean, + ) => { + config.USE_LIQUIDITY_TIER_HANDLER_SQL_FUNCTION = useSqlFunction; + const transactionIndex: number = 0; + const liquidityTierEvent: LiquidityTierUpsertEventV1 = defaultLiquidityTierUpsertEvent; + const kafkaMessage: KafkaMessage = createKafkaMessageFromLiquidityTiersEvent({ + liquidityTierEvent, + transactionIndex, + height: defaultHeight, + time: defaultTime, + txHash: defaultTxHash, }); - expect(newLiquidityTiers.length).toEqual(1); - expectLiquidityTier(newLiquidityTiers[0], liquidityTierEvent); - expectTimingStats(); - validateLiquidityTierRefresher(defaultLiquidityTierUpsertEvent); - expectKafkaMessages(producerSendMock, liquidityTierEvent, 0); - }); + // Confirm there is no existing liquidity tier + await expectNoExistingLiquidityTiers(); + await perpetualMarketRefresher.updatePerpetualMarkets(); - it('updates existing liquidity tier', async () => { - const transactionIndex: number = 0; - const liquidityTierEvent: LiquidityTierUpsertEventV1 = defaultLiquidityTierUpsertEvent; - const kafkaMessage: KafkaMessage = createKafkaMessageFromLiquidityTiersEvent({ - liquidityTierEvent, - transactionIndex, - height: defaultHeight, - time: defaultTime, - txHash: defaultTxHash, + const producerSendMock: jest.SpyInstance = jest.spyOn(producer, 'send'); + await onMessage(kafkaMessage); + + const newLiquidityTiers: LiquidityTiersFromDatabase[] = await LiquidityTiersTable.findAll( + {}, + [], { + orderBy: [[LiquidityTiersColumns.id, Ordering.ASC]], + }); + expect(newLiquidityTiers.length).toEqual(1); + expectLiquidityTier(newLiquidityTiers[0], liquidityTierEvent); + if (!useSqlFunction) { + expectTimingStats(); + } + validateLiquidityTierRefresher(defaultLiquidityTierUpsertEvent); + expectKafkaMessages(producerSendMock, liquidityTierEvent, 0); }); - // Create existing liquidity tier - await LiquidityTiersTable.upsert(defaultLiquidityTier); - // create perpetual market with existing liquidity tier to test websockets - await Promise.all([ - MarketTable.create(testConstants.defaultMarket), - MarketTable.create(testConstants.defaultMarket2), - ]); - await Promise.all([ - PerpetualMarketTable.create(testConstants.defaultPerpetualMarket), - PerpetualMarketTable.create(testConstants.defaultPerpetualMarket2), - ]); - await perpetualMarketRefresher.updatePerpetualMarkets(); + it.each([ + [ + 'via knex', + false, + ], + [ + 'via SQL function', + true, + ], + ])( + 'updates existing liquidity tier (%s)', + async ( + _name: string, + useSqlFunction: boolean, + ) => { + config.USE_LIQUIDITY_TIER_HANDLER_SQL_FUNCTION = useSqlFunction; + const transactionIndex: number = 0; + const liquidityTierEvent: LiquidityTierUpsertEventV1 = defaultLiquidityTierUpsertEvent; + const kafkaMessage: KafkaMessage = createKafkaMessageFromLiquidityTiersEvent({ + liquidityTierEvent, + transactionIndex, + height: defaultHeight, + time: defaultTime, + txHash: defaultTxHash, + }); + // Create existing liquidity tier + await LiquidityTiersTable.upsert(defaultLiquidityTier); - const producerSendMock: jest.SpyInstance = jest.spyOn(producer, 'send'); - await onMessage(kafkaMessage); + // create perpetual market with existing liquidity tier to test websockets + await Promise.all([ + MarketTable.create(testConstants.defaultMarket), + MarketTable.create(testConstants.defaultMarket2), + ]); + await Promise.all([ + PerpetualMarketTable.create(testConstants.defaultPerpetualMarket), + PerpetualMarketTable.create(testConstants.defaultPerpetualMarket2), + ]); + await perpetualMarketRefresher.updatePerpetualMarkets(); - const newLiquidityTiers: LiquidityTiersFromDatabase[] = await LiquidityTiersTable.findAll( - {}, - [], { - orderBy: [[LiquidityTiersColumns.id, Ordering.ASC]], - }); - expect(newLiquidityTiers.length).toEqual(1); - expectLiquidityTier(newLiquidityTiers[0], liquidityTierEvent); - expectTimingStats(); - validateLiquidityTierRefresher(defaultLiquidityTierUpsertEvent); - expectKafkaMessages(producerSendMock, liquidityTierEvent, 2); - }); + const producerSendMock: jest.SpyInstance = jest.spyOn(producer, 'send'); + await onMessage(kafkaMessage); + + const newLiquidityTiers: LiquidityTiersFromDatabase[] = await LiquidityTiersTable.findAll( + {}, + [], { + orderBy: [[LiquidityTiersColumns.id, Ordering.ASC]], + }); + expect(newLiquidityTiers.length).toEqual(1); + expectLiquidityTier(newLiquidityTiers[0], liquidityTierEvent); + if (!useSqlFunction) { + expectTimingStats(); + } + validateLiquidityTierRefresher(defaultLiquidityTierUpsertEvent); + expectKafkaMessages(producerSendMock, liquidityTierEvent, 2); + }); }); function expectTimingStats() { diff --git a/indexer/services/ender/__tests__/handlers/markets/market-create-handler.test.ts b/indexer/services/ender/__tests__/handlers/markets/market-create-handler.test.ts index 839626cdd5..3495d574a5 100644 --- a/indexer/services/ender/__tests__/handlers/markets/market-create-handler.test.ts +++ b/indexer/services/ender/__tests__/handlers/markets/market-create-handler.test.ts @@ -23,6 +23,7 @@ import { } from '../../helpers/indexer-proto-helpers'; import Long from 'long'; import { createPostgresFunctions } from '../../../src/helpers/postgres/postgres-functions'; +import config from '../../../src/config'; describe('marketCreateHandler', () => { beforeAll(async () => { @@ -86,67 +87,97 @@ describe('marketCreateHandler', () => { }); }); - it('creates new market', async () => { - const transactionIndex: number = 0; + it.each([ + [ + 'via knex', + false, + ], + [ + 'via SQL function', + true, + ], + ])( + 'creates new market (%s)', + async ( + _name: string, + useSqlFunction: boolean, + ) => { + config.USE_MARKET_CREATE_HANDLER_SQL_FUNCTION = useSqlFunction; + const transactionIndex: number = 0; - const marketCreate: MarketEventV1 = { - marketId: 3, - marketCreate: { - base: { - pair: 'DYDX-USD', - minPriceChangePpm: 500, + const marketCreate: MarketEventV1 = { + marketId: 3, + marketCreate: { + base: { + pair: 'DYDX-USD', + minPriceChangePpm: 500, + }, + exponent: -5, }, - exponent: -5, - }, - }; - - const kafkaMessage: KafkaMessage = createKafkaMessageFromMarketEvent({ - marketEvents: [marketCreate], - transactionIndex, - height: defaultHeight, - time: defaultTime, - txHash: defaultTxHash, - }); + }; - await onMessage(kafkaMessage); + const kafkaMessage: KafkaMessage = createKafkaMessageFromMarketEvent({ + marketEvents: [marketCreate], + transactionIndex, + height: defaultHeight, + time: defaultTime, + txHash: defaultTxHash, + }); - const market: MarketFromDatabase = await MarketTable.findById( - marketCreate.marketId, - ) as MarketFromDatabase; + await onMessage(kafkaMessage); - expectMarketMatchesEvent(marketCreate as MarketCreateEventMessage, market); - }); + const market: MarketFromDatabase = await MarketTable.findById( + marketCreate.marketId, + ) as MarketFromDatabase; + + expectMarketMatchesEvent(marketCreate as MarketCreateEventMessage, market); + }); + + it.each([ + [ + 'via knex', + false, + ], + [ + 'via SQL function', + true, + ], + ])( + 'errors when attempting to create an existing market (%s)', + async ( + _name: string, + useSqlFunction: boolean, + ) => { + config.USE_MARKET_CREATE_HANDLER_SQL_FUNCTION = useSqlFunction; + const transactionIndex: number = 0; - it('errors when attempting to create an existing market', async () => { - const transactionIndex: number = 0; + const kafkaMessage: KafkaMessage = createKafkaMessageFromMarketEvent({ + marketEvents: [defaultMarketCreate], + transactionIndex, + height: defaultHeight, + time: defaultTime, + txHash: defaultTxHash, + }); + await expect(onMessage(kafkaMessage)).rejects.toThrowError( + new ParseMessageError('Market in MarketCreate already exists'), + ); - const kafkaMessage: KafkaMessage = createKafkaMessageFromMarketEvent({ - marketEvents: [defaultMarketCreate], - transactionIndex, - height: defaultHeight, - time: defaultTime, - txHash: defaultTxHash, + // Check that market in database is the old market. + const market: MarketFromDatabase = await MarketTable.findById( + defaultMarketCreate.marketId, + ) as MarketFromDatabase; + expect(market.minPriceChangePpm).toEqual(50); + + expect(loggerError).toHaveBeenCalledWith(expect.objectContaining({ + at: 'MarketCreateHandler#logAndThrowParseMessageError', + message: 'Market in MarketCreate already exists', + })); + expect(loggerCrit).toHaveBeenCalledWith(expect.objectContaining({ + at: 'onMessage#onMessage', + message: 'Error: Unable to parse message, this must be due to a bug in V4 node', + })); + expect(producerSendMock.mock.calls.length).toEqual(0); }); - await expect(onMessage(kafkaMessage)).rejects.toThrowError( - new ParseMessageError('Market in MarketCreate already exists'), - ); - - // Check that market in database is the old market. - const market: MarketFromDatabase = await MarketTable.findById( - defaultMarketCreate.marketId, - ) as MarketFromDatabase; - expect(market.minPriceChangePpm).toEqual(50); - - expect(loggerError).toHaveBeenCalledWith(expect.objectContaining({ - at: 'MarketCreateHandler#logAndThrowParseMessageError', - message: 'Market in MarketCreate already exists', - })); - expect(loggerCrit).toHaveBeenCalledWith(expect.objectContaining({ - at: 'onMessage#onMessage', - message: 'Error: Unable to parse message, this must be due to a bug in V4 node', - })); - expect(producerSendMock.mock.calls.length).toEqual(0); - }); }); function expectMarketMatchesEvent( diff --git a/indexer/services/ender/__tests__/handlers/markets/market-modify-handler.test.ts b/indexer/services/ender/__tests__/handlers/markets/market-modify-handler.test.ts index c80d4a1a7f..ae33c3d228 100644 --- a/indexer/services/ender/__tests__/handlers/markets/market-modify-handler.test.ts +++ b/indexer/services/ender/__tests__/handlers/markets/market-modify-handler.test.ts @@ -16,6 +16,7 @@ import { createIndexerTendermintBlock, createIndexerTendermintEvent } from '../. import { MarketModifyHandler } from '../../../src/handlers/markets/market-modify-handler'; import Long from 'long'; import { createPostgresFunctions } from '../../../src/helpers/postgres/postgres-functions'; +import config from '../../../src/config'; describe('marketModifyHandler', () => { @@ -80,54 +81,84 @@ describe('marketModifyHandler', () => { }); }); - it('modifies existing market', async () => { - const transactionIndex: number = 0; + it.each([ + [ + 'via knex', + false, + ], + [ + 'via SQL function', + true, + ], + ])( + 'modifies existing market (%s)', + async ( + _name: string, + useSqlFunction: boolean, + ) => { + config.USE_MARKET_MODIFY_HANDLER_SQL_FUNCTION = useSqlFunction; + const transactionIndex: number = 0; + + const kafkaMessage: KafkaMessage = createKafkaMessageFromMarketEvent({ + marketEvents: [defaultMarketModify], + transactionIndex, + height: defaultHeight, + time: defaultTime, + txHash: defaultTxHash, + }); - const kafkaMessage: KafkaMessage = createKafkaMessageFromMarketEvent({ - marketEvents: [defaultMarketModify], - transactionIndex, - height: defaultHeight, - time: defaultTime, - txHash: defaultTxHash, + await onMessage(kafkaMessage); + + const market: MarketFromDatabase = await MarketTable.findById( + defaultMarketModify.marketId, + ) as MarketFromDatabase; + + expectMarketMatchesEvent(defaultMarketModify as MarketModifyEventMessage, market); }); - await onMessage(kafkaMessage); + it.each([ + [ + 'via knex', + false, + ], + [ + 'via SQL function', + true, + ], + ])( + 'modifies non-existent market (%s)', + async ( + _name: string, + useSqlFunction: boolean, + ) => { + config.USE_MARKET_MODIFY_HANDLER_SQL_FUNCTION = useSqlFunction; + const transactionIndex: number = 0; - const market: MarketFromDatabase = await MarketTable.findById( - defaultMarketModify.marketId, - ) as MarketFromDatabase; + const kafkaMessage: KafkaMessage = createKafkaMessageFromMarketEvent({ + marketEvents: [{ + ...defaultMarketModify, + marketId: 5, + }], + transactionIndex, + height: defaultHeight, + time: defaultTime, + txHash: defaultTxHash, + }); - expectMarketMatchesEvent(defaultMarketModify as MarketModifyEventMessage, market); - }); + await expect(onMessage(kafkaMessage)).rejects.toThrowError( + new ParseMessageError('Market in MarketModify doesn\'t exist'), + ); - it('modifies non-existent market', async () => { - const transactionIndex: number = 0; - - const kafkaMessage: KafkaMessage = createKafkaMessageFromMarketEvent({ - marketEvents: [{ - ...defaultMarketModify, - marketId: 5, - }], - transactionIndex, - height: defaultHeight, - time: defaultTime, - txHash: defaultTxHash, + expect(loggerError).toHaveBeenCalledWith(expect.objectContaining({ + at: 'MarketModifyHandler#logAndThrowParseMessageError', + message: 'Market in MarketModify doesn\'t exist', + })); + expect(loggerCrit).toHaveBeenCalledWith(expect.objectContaining({ + at: 'onMessage#onMessage', + message: 'Error: Unable to parse message, this must be due to a bug in V4 node', + })); + expect(producerSendMock.mock.calls.length).toEqual(0); }); - - await expect(onMessage(kafkaMessage)).rejects.toThrowError( - new ParseMessageError('Market in MarketModify doesn\'t exist'), - ); - - expect(loggerError).toHaveBeenCalledWith(expect.objectContaining({ - at: 'MarketModifyHandler#logAndThrowParseMessageError', - message: 'Market in MarketModify doesn\'t exist', - })); - expect(loggerCrit).toHaveBeenCalledWith(expect.objectContaining({ - at: 'onMessage#onMessage', - message: 'Error: Unable to parse message, this must be due to a bug in V4 node', - })); - expect(producerSendMock.mock.calls.length).toEqual(0); - }); }); function expectMarketMatchesEvent( diff --git a/indexer/services/ender/__tests__/handlers/markets/market-price-update-handler.test.ts b/indexer/services/ender/__tests__/handlers/markets/market-price-update-handler.test.ts index 9a56111afd..05edfd648d 100644 --- a/indexer/services/ender/__tests__/handlers/markets/market-price-update-handler.test.ts +++ b/indexer/services/ender/__tests__/handlers/markets/market-price-update-handler.test.ts @@ -33,6 +33,7 @@ import { MarketPriceUpdateHandler } from '../../../src/handlers/markets/market-p import Long from 'long'; import { getPrice } from '../../../src/caches/price-cache'; import { createPostgresFunctions } from '../../../src/helpers/postgres/postgres-functions'; +import config from '../../../src/config'; describe('marketPriceUpdateHandler', () => { beforeAll(async () => { @@ -95,125 +96,170 @@ describe('marketPriceUpdateHandler', () => { }); }); - it('fails when no market exists', async () => { - const transactionIndex: number = 0; - const marketPriceUpdate: MarketEventV1 = { - marketId: 5, - priceUpdate: { - priceWithExponent: Long.fromValue(50000000, true), - }, - }; - const kafkaMessage: KafkaMessage = createKafkaMessageFromMarketEvent({ - marketEvents: [marketPriceUpdate], - transactionIndex, - height: defaultHeight, - time: defaultTime, - txHash: defaultTxHash, - }); - - await expect(onMessage(kafkaMessage)).rejects.toThrowError( - new ParseMessageError('MarketPriceUpdateEvent contains a non-existent market id'), - ); - - expect(loggerError).toHaveBeenCalledWith(expect.objectContaining({ - at: 'MarketPriceUpdateHandler#logAndThrowParseMessageError', - message: 'MarketPriceUpdateEvent contains a non-existent market id', - })); - expect(loggerCrit).toHaveBeenCalledWith(expect.objectContaining({ - at: 'onMessage#onMessage', - message: 'Error: Unable to parse message, this must be due to a bug in V4 node', - })); - expect(producerSendMock.mock.calls.length).toEqual(0); - }); + it.each([ + [ + 'via knex', + false, + ], + [ + 'via SQL function', + true, + ], + ])( + 'fails when no market exists (%s)', + async ( + _name: string, + useSqlFunction: boolean, + ) => { + config.USE_MARKET_PRICE_UPDATE_HANDLER_SQL_FUNCTION = useSqlFunction; + const transactionIndex: number = 0; + const marketPriceUpdate: MarketEventV1 = { + marketId: 5, + priceUpdate: { + priceWithExponent: Long.fromValue(50000000, true), + }, + }; + const kafkaMessage: KafkaMessage = createKafkaMessageFromMarketEvent({ + marketEvents: [marketPriceUpdate], + transactionIndex, + height: defaultHeight, + time: defaultTime, + txHash: defaultTxHash, + }); - it('successfully inserts new oracle price for existing market', async () => { - const transactionIndex: number = 0; + await expect(onMessage(kafkaMessage)).rejects.toThrowError( + new ParseMessageError('MarketPriceUpdateEvent contains a non-existent market id'), + ); - const kafkaMessage: KafkaMessage = createKafkaMessageFromMarketEvent({ - marketEvents: [defaultMarketPriceUpdate], - transactionIndex, - height: defaultHeight, - time: defaultTime, - txHash: defaultTxHash, + expect(loggerError).toHaveBeenCalledWith(expect.objectContaining({ + at: 'MarketPriceUpdateHandler#logAndThrowParseMessageError', + message: 'MarketPriceUpdateEvent contains a non-existent market id', + })); + expect(loggerCrit).toHaveBeenCalledWith(expect.objectContaining({ + at: 'onMessage#onMessage', + message: 'Error: Unable to parse message, this must be due to a bug in V4 node', + })); + expect(producerSendMock.mock.calls.length).toEqual(0); }); - await onMessage(kafkaMessage); + it.each([ + [ + 'via knex', + false, + ], + [ + 'via SQL function', + true, + ], + ])( + 'successfully inserts new oracle price for existing market (%s)', + async ( + _name: string, + useSqlFunction: boolean, + ) => { + config.USE_MARKET_PRICE_UPDATE_HANDLER_SQL_FUNCTION = useSqlFunction; + const transactionIndex: number = 0; - const { market, oraclePrice } = await getDbState(defaultMarketPriceUpdate); + const kafkaMessage: KafkaMessage = createKafkaMessageFromMarketEvent({ + marketEvents: [defaultMarketPriceUpdate], + transactionIndex, + height: defaultHeight, + time: defaultTime, + txHash: defaultTxHash, + }); - expectOraclePriceMatchesEvent( - defaultMarketPriceUpdate as MarketPriceUpdateEventMessage, - oraclePrice, - market, - defaultHeight, - ); + await onMessage(kafkaMessage); - expect(getPrice(oraclePrice.marketId)).toEqual(oraclePrice.price); + const { market, oraclePrice } = await getDbState(defaultMarketPriceUpdate); - const contents: MarketMessageContents = generateOraclePriceContents( - oraclePrice, - market.pair, - ); + expectOraclePriceMatchesEvent( + defaultMarketPriceUpdate as MarketPriceUpdateEventMessage, + oraclePrice, + market, + defaultHeight, + ); + + expect(getPrice(oraclePrice.marketId)).toEqual(oraclePrice.price); - expectMarketKafkaMessage({ - producerSendMock, - contents: JSON.stringify(contents), + const contents: MarketMessageContents = generateOraclePriceContents( + oraclePrice, + market.pair, + ); + + expectMarketKafkaMessage({ + producerSendMock, + contents: JSON.stringify(contents), + }); }); - }); - it('successfully inserts new oracle price for market created in same block', async () => { - const transactionIndex: number = 0; - const newMarketId: number = 3000; - - // Include an event to create the market - const marketCreate: MarketEventV1 = { - marketId: newMarketId, - marketCreate: { - base: { - pair: 'NEWTOKEN-USD', - minPriceChangePpm: 500, + it.each([ + [ + 'via knex', + false, + ], + [ + 'via SQL function', + true, + ], + ])( + 'successfully inserts new oracle price for market created in same block (%s)', + async ( + _name: string, + useSqlFunction: boolean, + ) => { + config.USE_MARKET_PRICE_UPDATE_HANDLER_SQL_FUNCTION = useSqlFunction; + const transactionIndex: number = 0; + const newMarketId: number = 3000; + + // Include an event to create the market + const marketCreate: MarketEventV1 = { + marketId: newMarketId, + marketCreate: { + base: { + pair: 'NEWTOKEN-USD', + minPriceChangePpm: 500, + }, + exponent: -5, }, - exponent: -5, - }, - }; - const marketPriceUpdate: MarketEventV1 = { - marketId: newMarketId, - priceUpdate: { - priceWithExponent: Long.fromValue(50000000), - }, - }; - - const kafkaMessage: KafkaMessage = createKafkaMessageFromMarketEvent({ - marketEvents: [marketCreate, marketPriceUpdate], - transactionIndex, - height: defaultHeight, - time: defaultTime, - txHash: defaultTxHash, - }); + }; + const marketPriceUpdate: MarketEventV1 = { + marketId: newMarketId, + priceUpdate: { + priceWithExponent: Long.fromValue(50000000), + }, + }; + + const kafkaMessage: KafkaMessage = createKafkaMessageFromMarketEvent({ + marketEvents: [marketCreate, marketPriceUpdate], + transactionIndex, + height: defaultHeight, + time: defaultTime, + txHash: defaultTxHash, + }); - await onMessage(kafkaMessage); + await onMessage(kafkaMessage); - const { market, oraclePrice } = await getDbState(marketPriceUpdate); + const { market, oraclePrice } = await getDbState(marketPriceUpdate); - expectOraclePriceMatchesEvent( - marketPriceUpdate as MarketPriceUpdateEventMessage, - oraclePrice, - market, - defaultHeight, - ); + expectOraclePriceMatchesEvent( + marketPriceUpdate as MarketPriceUpdateEventMessage, + oraclePrice, + market, + defaultHeight, + ); - expect(getPrice(oraclePrice.marketId)).toEqual(oraclePrice.price); + expect(getPrice(oraclePrice.marketId)).toEqual(oraclePrice.price); - const contents: MarketMessageContents = generateOraclePriceContents( - oraclePrice, - market.pair, - ); + const contents: MarketMessageContents = generateOraclePriceContents( + oraclePrice, + market.pair, + ); - expectMarketKafkaMessage({ - producerSendMock, - contents: JSON.stringify(contents), + expectMarketKafkaMessage({ + producerSendMock, + contents: JSON.stringify(contents), + }); }); - }); }); async function getDbState(marketPriceUpdate: MarketEventV1): Promise { diff --git a/indexer/services/ender/__tests__/handlers/order-fills/liquidation-handler.test.ts b/indexer/services/ender/__tests__/handlers/order-fills/liquidation-handler.test.ts index 1eb3d8d72b..b7ab1b37bf 100644 --- a/indexer/services/ender/__tests__/handlers/order-fills/liquidation-handler.test.ts +++ b/indexer/services/ender/__tests__/handlers/order-fills/liquidation-handler.test.ts @@ -73,6 +73,8 @@ import { LiquidationHandler } from '../../../src/handlers/order-fills/liquidatio import { clearCandlesMap } from '../../../src/caches/candle-cache'; import Long from 'long'; import { createPostgresFunctions } from '../../../src/helpers/postgres/postgres-functions'; +import config from '../../../src/config'; +import { expectStateFilledQuantums } from '../../helpers/redis-helpers'; const defaultClobPairId: string = testConstants.defaultPerpetualMarket.clobPairId; const defaultMakerFeeQuantum: number = 1_000_000; @@ -203,18 +205,32 @@ describe('LiquidationHandler', () => { it.each([ [ - 'goodTilBlock', + 'goodTilBlock via knex', { goodTilBlock: 10, - goodTilBlockTime: undefined, }, + false, ], [ - 'goodTilBlockTime', + 'goodTilBlock via SQL function', + { + goodTilBlock: 10, + }, + true, + ], + [ + 'goodTilBlockTime via knex', + { + goodTilBlockTime: 1_000_000_000, + }, + false, + ], + [ + 'goodTilBlockTime via SQL function', { - goodTilBlock: undefined, goodTilBlockTime: 1_000_000_000, }, + true, ], ])( 'creates fills and orders (with %s), sends vulcan message for maker order update and updates ' + @@ -222,7 +238,9 @@ describe('LiquidationHandler', () => { async ( _name: string, goodTilOneof: Partial, + useSqlFunction: boolean, ) => { + config.USE_LIQUIDATION_HANDLER_SQL_FUNCTION = useSqlFunction; const transactionIndex: number = 0; const eventIndex: number = 0; const makerQuantums: number = 10_000_000; @@ -237,7 +255,7 @@ describe('LiquidationHandler', () => { goodTilOneof, clobPairId: defaultClobPairId, orderFlags: ORDER_FLAG_SHORT_TERM.toString(), - timeInForce: IndexerOrder_TimeInForce.TIME_IN_FORCE_IOC, + timeInForce: IndexerOrder_TimeInForce.TIME_IN_FORCE_UNSPECIFIED, reduceOnly: true, clientMetadata: 0, }); @@ -300,7 +318,7 @@ describe('LiquidationHandler', () => { clobPairId: defaultClobPairId, side: makerOrderProto.side === IndexerOrder_Side.SIDE_BUY ? OrderSide.BUY : OrderSide.SELL, orderFlags: makerOrderProto.orderId!.orderFlags.toString(), - timeInForce: TimeInForce.IOC, + timeInForce: TimeInForce.GTT, reduceOnly: true, goodTilBlock: protocolTranslations.getGoodTilBlock(makerOrderProto)?.toString(), goodTilBlockTime: protocolTranslations.getGoodTilBlockTime(makerOrderProto), @@ -407,26 +425,52 @@ describe('LiquidationHandler', () => { exitPrice: makerPrice, }, ), + expectStateFilledQuantums( + OrderTable.orderIdToUuid(makerOrderProto.orderId!), + orderFillEvent.totalFilledMaker.toString(), + ), expectCandlesUpdated(), ]); - expectTimingStats(); + if (!useSqlFunction) { + expectTimingStats(); + } }); it.each([ [ - 'goodTilBlock', + 'goodTilBlock via knex', { goodTilBlock: 10, }, + false, '5', undefined, ], [ - 'goodTilBlockTime', + 'goodTilBlock via SQL function', + { + goodTilBlock: 10, + }, + true, + '5', + undefined, + ], + [ + 'goodTilBlockTime via knex', + { + goodTilBlockTime: 1_000_000, + }, + false, + undefined, + '1970-01-11T13:46:40.000Z', + ], + [ + 'goodTilBlockTime via SQL function', { goodTilBlockTime: 1_000_000, }, + true, undefined, '1970-01-11T13:46:40.000Z', ], @@ -436,10 +480,13 @@ describe('LiquidationHandler', () => { async ( _name: string, goodTilOneof: Partial, + useSqlFunction: boolean, existingGoodTilBlock?: string, existingGoodTilBlockTime?: string, ) => { - // create initial orders + config.USE_LIQUIDATION_HANDLER_SQL_FUNCTION = useSqlFunction; + + // create initial orders const existingMakerOrder: OrderCreateObject = { subaccountId: testConstants.defaultSubaccountId, clientId: '0', @@ -456,7 +503,7 @@ describe('LiquidationHandler', () => { goodTilBlock: existingGoodTilBlock, goodTilBlockTime: existingGoodTilBlockTime, clientMetadata: '0', - updatedAt: defaultDateTime.toISO(), + updatedAt: DateTime.fromMillis(0).toISO(), updatedAtHeight: '0', }; @@ -624,152 +671,177 @@ describe('LiquidationHandler', () => { eventId, ), expectCandlesUpdated(), + expectStateFilledQuantums( + OrderTable.orderIdToUuid(makerOrderProto.orderId!), + orderFillEvent.totalFilledMaker.toString(), + ), ]); - expectTimingStats(); + if (!useSqlFunction) { + expectTimingStats(); + } }); - it('creates fills and orders with fixed-point notation quoteAmount', async () => { - const transactionIndex: number = 0; - const eventIndex: number = 0; - const makerQuantums: number = 100; - const makerSubticks: number = 1_000_000; + it.each([ + [ + 'via knex', + false, + ], + [ + 'via SQL function', + true, + ], + ])( + 'creates fills and orders (%s) with fixed-point notation quoteAmount', + async ( + _name: string, + useSqlFunction: boolean, + ) => { + config.USE_LIQUIDATION_HANDLER_SQL_FUNCTION = useSqlFunction; + const transactionIndex: number = 0; + const eventIndex: number = 0; + const makerQuantums: number = 100; + const makerSubticks: number = 1_000_000; - const makerOrderProto: IndexerOrder = createOrder({ - subaccountId: defaultSubaccountId, - clientId: 0, - side: IndexerOrder_Side.SIDE_BUY, - quantums: makerQuantums, - subticks: makerSubticks, - goodTilOneof: { goodTilBlock: 10 }, - clobPairId: defaultClobPairId, - orderFlags: ORDER_FLAG_SHORT_TERM.toString(), - timeInForce: IndexerOrder_TimeInForce.TIME_IN_FORCE_UNSPECIFIED, - reduceOnly: false, - clientMetadata: 0, - }); + const makerOrderProto: IndexerOrder = createOrder({ + subaccountId: defaultSubaccountId, + clientId: 0, + side: IndexerOrder_Side.SIDE_BUY, + quantums: makerQuantums, + subticks: makerSubticks, + goodTilOneof: { goodTilBlock: 10 }, + clobPairId: defaultClobPairId, + orderFlags: ORDER_FLAG_SHORT_TERM.toString(), + timeInForce: IndexerOrder_TimeInForce.TIME_IN_FORCE_UNSPECIFIED, + reduceOnly: false, + clientMetadata: 0, + }); - const takerSubticks: number = 150_000; - const takerQuantums: number = 10; - const liquidationOrder: LiquidationOrderV1 = createLiquidationOrder({ - subaccountId: defaultSubaccountId2, - clobPairId: defaultClobPairId, - perpetualId: defaultPerpetualPosition.perpetualId, - quantums: takerQuantums, - isBuy: false, - subticks: takerSubticks, - }); + const takerSubticks: number = 150_000; + const takerQuantums: number = 10; + const liquidationOrder: LiquidationOrderV1 = createLiquidationOrder({ + subaccountId: defaultSubaccountId2, + clobPairId: defaultClobPairId, + perpetualId: defaultPerpetualPosition.perpetualId, + quantums: takerQuantums, + isBuy: false, + subticks: takerSubticks, + }); - const fillAmount: number = 10; - const orderFillEvent: OrderFillEventV1 = createLiquidationOrderFillEvent( - makerOrderProto, - liquidationOrder, - fillAmount, - fillAmount, - ); - const kafkaMessage: KafkaMessage = createKafkaMessageFromOrderFillEvent({ - orderFillEvent, - transactionIndex, - eventIndex, - height: parseInt(defaultHeight, 10), - time: defaultTime, - txHash: defaultTxHash, - }); + const fillAmount: number = 10; + const orderFillEvent: OrderFillEventV1 = createLiquidationOrderFillEvent( + makerOrderProto, + liquidationOrder, + fillAmount, + fillAmount, + ); + const kafkaMessage: KafkaMessage = createKafkaMessageFromOrderFillEvent({ + orderFillEvent, + transactionIndex, + eventIndex, + height: parseInt(defaultHeight, 10), + time: defaultTime, + txHash: defaultTxHash, + }); - // create initial PerpetualPositions - await Promise.all([ - PerpetualPositionTable.create(defaultPerpetualPosition), - PerpetualPositionTable.create({ - ...defaultPerpetualPosition, - subaccountId: testConstants.defaultSubaccountId2, - }), - ]); + // create initial PerpetualPositions + await Promise.all([ + PerpetualPositionTable.create(defaultPerpetualPosition), + PerpetualPositionTable.create({ + ...defaultPerpetualPosition, + subaccountId: testConstants.defaultSubaccountId2, + }), + ]); - const producerSendMock: jest.SpyInstance = jest.spyOn(producer, 'send'); - await onMessage(kafkaMessage); - - // This size should be in fixed-point notation rather than exponential notation (1e-8) - const makerOrderSize: string = '0.00000001'; // quantums in human = 1e2 * 1e-10 = 1e-8 - const makerPrice: string = '100'; // quote currency / base currency = 1e6 * 1e-8 * 1e-6 / 1e-10 = 1e2 - const totalFilled: string = '0.000000001'; // fillAmount in human = 1e1 * 1e-10 = 1e-9 - await expectOrderInDatabase({ - subaccountId: testConstants.defaultSubaccountId, - clientId: '0', - size: makerOrderSize, - totalFilled, - price: makerPrice, - status: OrderStatus.OPEN, // orderSize > totalFilled so status is open - clobPairId: defaultClobPairId, - side: makerOrderProto.side === IndexerOrder_Side.SIDE_BUY ? OrderSide.BUY : OrderSide.SELL, - orderFlags: makerOrderProto.orderId!.orderFlags.toString(), - timeInForce: TimeInForce.GTT, - reduceOnly: false, - goodTilBlock: protocolTranslations.getGoodTilBlock(makerOrderProto)?.toString(), - goodTilBlockTime: protocolTranslations.getGoodTilBlockTime(makerOrderProto), - clientMetadata: makerOrderProto.clientMetadata.toString(), - updatedAt: defaultDateTime.toISO(), - updatedAtHeight: defaultHeight.toString(), - }); + const producerSendMock: jest.SpyInstance = jest.spyOn(producer, 'send'); + await onMessage(kafkaMessage); - const eventId: Buffer = TendermintEventTable.createEventId( - defaultHeight, - transactionIndex, - eventIndex, - ); + // This size should be in fixed-point notation rather than exponential notation (1e-8) + const makerOrderSize: string = '0.00000001'; // quantums in human = 1e2 * 1e-10 = 1e-8 + const makerPrice: string = '100'; // quote currency / base currency = 1e6 * 1e-8 * 1e-6 / 1e-10 = 1e2 + const totalFilled: string = '0.000000001'; // fillAmount in human = 1e1 * 1e-10 = 1e-9 + await expectOrderInDatabase({ + subaccountId: testConstants.defaultSubaccountId, + clientId: '0', + size: makerOrderSize, + totalFilled, + price: makerPrice, + status: OrderStatus.OPEN, // orderSize > totalFilled so status is open + clobPairId: defaultClobPairId, + side: makerOrderProto.side === IndexerOrder_Side.SIDE_BUY ? OrderSide.BUY : OrderSide.SELL, + orderFlags: makerOrderProto.orderId!.orderFlags.toString(), + timeInForce: TimeInForce.GTT, + reduceOnly: false, + goodTilBlock: protocolTranslations.getGoodTilBlock(makerOrderProto)?.toString(), + goodTilBlockTime: protocolTranslations.getGoodTilBlockTime(makerOrderProto), + clientMetadata: makerOrderProto.clientMetadata.toString(), + updatedAt: defaultDateTime.toISO(), + updatedAtHeight: defaultHeight.toString(), + }); - // This size should be in fixed-point notation rather than exponential notation (1e-5) - const quoteAmount: string = '0.0000001'; // quote amount is price * fillAmount = 1e2 * 1e-9 = 1e-7 - await expectFillInDatabase({ - subaccountId: testConstants.defaultSubaccountId, - clientId: '0', - liquidity: Liquidity.MAKER, - size: totalFilled, - price: makerPrice, - quoteAmount, - eventId, - transactionHash: defaultTxHash, - createdAt: defaultDateTime.toISO(), - createdAtHeight: defaultHeight, - type: FillType.LIQUIDATION, - clobPairId: defaultClobPairId, - side: protocolTranslations.protocolOrderSideToOrderSide(makerOrderProto.side), - orderFlags: ORDER_FLAG_SHORT_TERM.toString(), - clientMetadata: makerOrderProto.clientMetadata.toString(), - fee: defaultMakerFee, - }); - await expectFillInDatabase({ - subaccountId: testConstants.defaultSubaccountId2, - clientId: '0', - liquidity: Liquidity.TAKER, - size: totalFilled, - price: makerPrice, - quoteAmount, - eventId, - transactionHash: defaultTxHash, - createdAt: defaultDateTime.toISO(), - createdAtHeight: defaultHeight, - type: FillType.LIQUIDATED, - clobPairId: defaultClobPairId, - side: liquidationOrderToOrderSide(liquidationOrder), - orderFlags: ORDER_FLAG_SHORT_TERM.toString(), - clientMetadata: null, - fee: defaultTakerFee, - hasOrderId: false, - }); + const eventId: Buffer = TendermintEventTable.createEventId( + defaultHeight, + transactionIndex, + eventIndex, + ); - await Promise.all([ - expectDefaultOrderFillAndPositionSubaccountKafkaMessages( - producerSendMock, + // This size should be in fixed-point notation rather than exponential notation (1e-5) + const quoteAmount: string = '0.0000001'; // quote amount is price * fillAmount = 1e2 * 1e-9 = 1e-7 + await expectFillInDatabase({ + subaccountId: testConstants.defaultSubaccountId, + clientId: '0', + liquidity: Liquidity.MAKER, + size: totalFilled, + price: makerPrice, + quoteAmount, eventId, - ORDER_FLAG_SHORT_TERM, - ), - expectDefaultTradeKafkaMessageFromTakerFillId( - producerSendMock, + transactionHash: defaultTxHash, + createdAt: defaultDateTime.toISO(), + createdAtHeight: defaultHeight, + type: FillType.LIQUIDATION, + clobPairId: defaultClobPairId, + side: protocolTranslations.protocolOrderSideToOrderSide(makerOrderProto.side), + orderFlags: ORDER_FLAG_SHORT_TERM.toString(), + clientMetadata: makerOrderProto.clientMetadata.toString(), + fee: defaultMakerFee, + }); + await expectFillInDatabase({ + subaccountId: testConstants.defaultSubaccountId2, + clientId: '0', + liquidity: Liquidity.TAKER, + size: totalFilled, + price: makerPrice, + quoteAmount, eventId, - ), - expectCandlesUpdated(), - ]); - }); + transactionHash: defaultTxHash, + createdAt: defaultDateTime.toISO(), + createdAtHeight: defaultHeight, + type: FillType.LIQUIDATED, + clobPairId: defaultClobPairId, + side: liquidationOrderToOrderSide(liquidationOrder), + orderFlags: ORDER_FLAG_SHORT_TERM.toString(), + clientMetadata: null, + fee: defaultTakerFee, + hasOrderId: false, + }); + + await Promise.all([ + expectDefaultOrderFillAndPositionSubaccountKafkaMessages( + producerSendMock, + eventId, + ORDER_FLAG_SHORT_TERM, + ), + expectDefaultTradeKafkaMessageFromTakerFillId( + producerSendMock, + eventId, + ), + expectCandlesUpdated(), + expectStateFilledQuantums( + OrderTable.orderIdToUuid(makerOrderProto.orderId!), + orderFillEvent.totalFilledMaker.toString(), + ), + ]); + }); it('LiquidationOrderFillEvent fails liquidationOrder validation', async () => { const makerQuantums: number = 10_000_000; diff --git a/indexer/services/ender/__tests__/handlers/order-fills/order-handler.test.ts b/indexer/services/ender/__tests__/handlers/order-fills/order-handler.test.ts index 2ee036061c..e8f49aeab4 100644 --- a/indexer/services/ender/__tests__/handlers/order-fills/order-handler.test.ts +++ b/indexer/services/ender/__tests__/handlers/order-fills/order-handler.test.ts @@ -22,6 +22,7 @@ import { FillTable, FillType, Liquidity, + OrderFromDatabase, OrderSide, OrderStatus, OrderTable, @@ -74,6 +75,7 @@ import Long from 'long'; import { createPostgresFunctions } from '../../../src/helpers/postgres/postgres-functions'; import config from '../../../src/config'; import { redisClient } from '../../../src/helpers/redis/redis-controller'; +import { expectStateFilledQuantums } from '../../helpers/redis-helpers'; const defaultClobPairId: string = testConstants.defaultPerpetualMarket.clobPairId; const defaultMakerFeeQuantum: number = 1_000_000; @@ -280,7 +282,7 @@ describe('OrderHandler', () => { goodTilOneof: takerGoodTilOneof, clobPairId: defaultClobPairId, orderFlags: ORDER_FLAG_SHORT_TERM.toString(), - timeInForce: IndexerOrder_TimeInForce.TIME_IN_FORCE_IOC, + timeInForce: IndexerOrder_TimeInForce.TIME_IN_FORCE_UNSPECIFIED, reduceOnly: true, clientMetadata: 0, }); @@ -354,7 +356,7 @@ describe('OrderHandler', () => { clobPairId: defaultClobPairId, side: protocolTranslations.protocolOrderSideToOrderSide(takerOrderProto.side), orderFlags: takerOrderProto.orderId!.orderFlags.toString(), - timeInForce: TimeInForce.IOC, + timeInForce: TimeInForce.GTT, reduceOnly: true, goodTilBlock: protocolTranslations.getGoodTilBlock(takerOrderProto)?.toString(), goodTilBlockTime: protocolTranslations.getGoodTilBlockTime(takerOrderProto), @@ -478,6 +480,14 @@ describe('OrderHandler', () => { }, ), expectCandlesUpdated(), + expectStateFilledQuantums( + OrderTable.orderIdToUuid(makerOrderProto.orderId!), + orderFillEvent.totalFilledMaker.toString(), + ), + expectStateFilledQuantums( + OrderTable.orderIdToUuid(takerOrderProto.orderId!), + orderFillEvent.totalFilledTaker.toString(), + ), ]); if (!useSqlFunction) { @@ -608,8 +618,8 @@ describe('OrderHandler', () => { goodTilBlockTime: existingGoodTilBlockTime, orderFlags: ORDER_FLAG_SHORT_TERM.toString(), clientMetadata: '0', - updatedAt: defaultDateTime.toISO(), - updatedAtHeight: defaultHeight.toString(), + updatedAt: DateTime.fromMillis(0).toISO(), + updatedAtHeight: '0', }), // taker order OrderTable.create({ @@ -628,8 +638,8 @@ describe('OrderHandler', () => { goodTilBlockTime: existingGoodTilBlockTime, orderFlags: ORDER_FLAG_LONG_TERM.toString(), clientMetadata: '0', - updatedAt: defaultDateTime.toISO(), - updatedAtHeight: defaultHeight.toString(), + updatedAt: DateTime.fromMillis(0).toISO(), + updatedAtHeight: '0', }), ]); @@ -707,7 +717,7 @@ describe('OrderHandler', () => { totalFilled: totalMakerOrderFilled, price, status: isOrderCanceled - ? OrderStatus.BEST_EFFORT_CANCELED + ? OrderStatus.CANCELED : OrderStatus.OPEN, // orderSize > totalFilled so status is open clobPairId: defaultClobPairId, side: protocolTranslations.protocolOrderSideToOrderSide(makerOrderProto.side), @@ -832,6 +842,14 @@ describe('OrderHandler', () => { eventId, ), expectCandlesUpdated(), + expectStateFilledQuantums( + OrderTable.orderIdToUuid(makerOrderProto.orderId!), + orderFillEvent.totalFilledMaker.toString(), + ), + expectStateFilledQuantums( + OrderTable.orderIdToUuid(takerOrderProto.orderId!), + orderFillEvent.totalFilledTaker.toString(), + ), ]); if (!useSqlFunction) { @@ -869,7 +887,7 @@ describe('OrderHandler', () => { }, clobPairId: defaultClobPairId, orderFlags: ORDER_FLAG_SHORT_TERM.toString(), - timeInForce: IndexerOrder_TimeInForce.TIME_IN_FORCE_FILL_OR_KILL, + timeInForce: IndexerOrder_TimeInForce.TIME_IN_FORCE_UNSPECIFIED, reduceOnly: false, clientMetadata: 0, }); @@ -954,7 +972,7 @@ describe('OrderHandler', () => { clobPairId: defaultClobPairId, side: protocolTranslations.protocolOrderSideToOrderSide(makerOrderProto.side), orderFlags: makerOrderProto.orderId!.orderFlags.toString(), - timeInForce: TimeInForce.FOK, + timeInForce: TimeInForce.GTT, reduceOnly: false, goodTilBlock: protocolTranslations.getGoodTilBlock(makerOrderProto)?.toString(), goodTilBlockTime: protocolTranslations.getGoodTilBlockTime(makerOrderProto), @@ -1041,6 +1059,14 @@ describe('OrderHandler', () => { eventId, ), expectCandlesUpdated(), + expectStateFilledQuantums( + OrderTable.orderIdToUuid(makerOrderProto.orderId!), + orderFillEvent.totalFilledMaker.toString(), + ), + expectStateFilledQuantums( + OrderTable.orderIdToUuid(takerOrderProto.orderId!), + orderFillEvent.totalFilledTaker.toString(), + ), ]); }); @@ -1074,7 +1100,7 @@ describe('OrderHandler', () => { }, clobPairId: testConstants.defaultPerpetualMarket3.clobPairId, orderFlags: ORDER_FLAG_SHORT_TERM.toString(), - timeInForce: IndexerOrder_TimeInForce.TIME_IN_FORCE_FILL_OR_KILL, + timeInForce: IndexerOrder_TimeInForce.TIME_IN_FORCE_UNSPECIFIED, reduceOnly: false, clientMetadata: 0, }); @@ -1165,7 +1191,7 @@ describe('OrderHandler', () => { clobPairId: testConstants.defaultPerpetualMarket3.clobPairId, side: protocolTranslations.protocolOrderSideToOrderSide(makerOrderProto.side), orderFlags: makerOrderProto.orderId!.orderFlags.toString(), - timeInForce: TimeInForce.FOK, + timeInForce: TimeInForce.GTT, reduceOnly: false, goodTilBlock: protocolTranslations.getGoodTilBlock(makerOrderProto)?.toString(), goodTilBlockTime: protocolTranslations.getGoodTilBlockTime(makerOrderProto), @@ -1252,6 +1278,14 @@ describe('OrderHandler', () => { eventId, ), expectCandlesUpdated(), + expectStateFilledQuantums( + OrderTable.orderIdToUuid(makerOrderProto.orderId!), + orderFillEvent.totalFilledMaker.toString(), + ), + expectStateFilledQuantums( + OrderTable.orderIdToUuid(takerOrderProto.orderId!), + orderFillEvent.totalFilledTaker.toString(), + ), ]); }); @@ -1359,6 +1393,258 @@ describe('OrderHandler', () => { await expectNoCandles(); }); + it.each([ + [ + 'via knex', + false, + ], + [ + 'via SQL function', + true, + ], + ])('correctly sets status for short term IOC orders (%s)', async ( + _name: string, + useSqlFunction: boolean, + ) => { + config.USE_ORDER_HANDLER_SQL_FUNCTION = useSqlFunction; + const transactionIndex: number = 0; + const eventIndex: number = 0; + const makerQuantums: number = 100; + const makerSubticks: number = 1_000_000; + + const makerOrderProto: IndexerOrder = createOrder({ + subaccountId: defaultSubaccountId, + clientId: 0, + side: IndexerOrder_Side.SIDE_BUY, + quantums: makerQuantums, + subticks: makerSubticks, + goodTilOneof: { + goodTilBlock: 10, + }, + clobPairId: testConstants.defaultPerpetualMarket3.clobPairId, + orderFlags: ORDER_FLAG_SHORT_TERM.toString(), + timeInForce: IndexerOrder_TimeInForce.TIME_IN_FORCE_IOC, + reduceOnly: false, + clientMetadata: 0, + }); + + const takerSubticks: number = 150_000; + const takerQuantums: number = 10; + const takerOrderProto: IndexerOrder = createOrder({ + subaccountId: defaultSubaccountId2, + clientId: 0, + side: IndexerOrder_Side.SIDE_SELL, + quantums: takerQuantums, + subticks: takerSubticks, + goodTilOneof: { + goodTilBlock: 10, + }, + clobPairId: testConstants.defaultPerpetualMarket3.clobPairId, + orderFlags: ORDER_FLAG_SHORT_TERM.toString(), + timeInForce: IndexerOrder_TimeInForce.TIME_IN_FORCE_IOC, + reduceOnly: true, + clientMetadata: 0, + }); + + const fillAmount: number = takerQuantums; + const orderFillEvent: OrderFillEventV1 = createOrderFillEvent( + makerOrderProto, + takerOrderProto, + fillAmount, + fillAmount, + fillAmount, + ); + const kafkaMessage: KafkaMessage = createKafkaMessageFromOrderFillEvent({ + orderFillEvent, + transactionIndex, + eventIndex, + height: parseInt(defaultHeight, 10), + time: defaultTime, + txHash: defaultTxHash, + }); + + await Promise.all([ + // initial position for subaccount 1 + PerpetualPositionTable.create({ + ...defaultPerpetualPosition, + perpetualId: testConstants.defaultPerpetualMarket3.id, + }), + // initial position for subaccount 2 + PerpetualPositionTable.create({ + ...defaultPerpetualPosition, + subaccountId: testConstants.defaultSubaccountId2, + perpetualId: testConstants.defaultPerpetualMarket3.id, + }), + ]); + + await onMessage(kafkaMessage); + + const makerOrderId: string = OrderTable.orderIdToUuid(makerOrderProto.orderId!); + const takerOrderId: string = OrderTable.orderIdToUuid(takerOrderProto.orderId!); + + const [makerOrder, takerOrder]: [ + OrderFromDatabase | undefined, + OrderFromDatabase | undefined + ] = await Promise.all([ + OrderTable.findById(makerOrderId), + OrderTable.findById(takerOrderId), + ]); + + expect(makerOrder).toBeDefined(); + expect(takerOrder).toBeDefined(); + + // maker order is partially filled + expect(makerOrder!.status).toEqual(OrderStatus.CANCELED); + // taker order is fully filled + expect(takerOrder!.status).toEqual(OrderStatus.FILLED); + }); + + it.each([ + [ + 'limit', + 'via knex', + false, + IndexerOrder_TimeInForce.TIME_IN_FORCE_UNSPECIFIED, + ], + [ + 'limit', + 'via SQL function', + true, + IndexerOrder_TimeInForce.TIME_IN_FORCE_UNSPECIFIED, + ], + [ + 'post-only best effort canceled', + 'via knex', + false, + IndexerOrder_TimeInForce.TIME_IN_FORCE_POST_ONLY, + ], + [ + 'post-only best effort canceled', + 'via SQL function', + true, + IndexerOrder_TimeInForce.TIME_IN_FORCE_POST_ONLY, + ], + [ + 'post-only canceled', + 'via knex', + false, + IndexerOrder_TimeInForce.TIME_IN_FORCE_POST_ONLY, + OrderStatus.CANCELED, + ], + [ + 'post-only canceled', + 'via SQL function', + true, + IndexerOrder_TimeInForce.TIME_IN_FORCE_POST_ONLY, + OrderStatus.CANCELED, + ], + ])('correctly sets status for short term %s orders (%s)', async ( + _orderType: string, + _name: string, + useSqlFunction: boolean, + timeInForce: IndexerOrder_TimeInForce, + // either BEST_EFFORT_CANCELED or CANCELED + status: OrderStatus = OrderStatus.BEST_EFFORT_CANCELED, + ) => { + config.USE_ORDER_HANDLER_SQL_FUNCTION = useSqlFunction; + const transactionIndex: number = 0; + const eventIndex: number = 0; + const makerQuantums: number = 100; + const makerSubticks: number = 1_000_000; + + const makerOrderProto: IndexerOrder = createOrder({ + subaccountId: defaultSubaccountId, + clientId: 0, + side: IndexerOrder_Side.SIDE_BUY, + quantums: makerQuantums, + subticks: makerSubticks, + goodTilOneof: { + goodTilBlock: 10, + }, + clobPairId: testConstants.defaultPerpetualMarket3.clobPairId, + orderFlags: ORDER_FLAG_SHORT_TERM.toString(), + timeInForce, + reduceOnly: false, + clientMetadata: 0, + }); + + const takerSubticks: number = 150_000; + const takerQuantums: number = 100; + const takerOrderProto: IndexerOrder = createOrder({ + subaccountId: defaultSubaccountId2, + clientId: 0, + side: IndexerOrder_Side.SIDE_SELL, + quantums: takerQuantums, + subticks: takerSubticks, + goodTilOneof: { + goodTilBlock: 10, + }, + clobPairId: testConstants.defaultPerpetualMarket3.clobPairId, + orderFlags: ORDER_FLAG_SHORT_TERM.toString(), + timeInForce, + reduceOnly: true, + clientMetadata: 0, + }); + + const makerOrderId: string = OrderTable.orderIdToUuid(makerOrderProto.orderId!); + if (status === OrderStatus.BEST_EFFORT_CANCELED) { + await CanceledOrdersCache.addBestEffortCanceledOrderId(makerOrderId, Date.now(), redisClient); + } else { // Status is only over CANCELED or BEST_EFFORT_CANCELED + await CanceledOrdersCache.addCanceledOrderId(makerOrderId, Date.now(), redisClient); + } + + const fillAmount: number = 10; + const orderFillEvent: OrderFillEventV1 = createOrderFillEvent( + makerOrderProto, + takerOrderProto, + fillAmount, + fillAmount, + fillAmount, + ); + const kafkaMessage: KafkaMessage = createKafkaMessageFromOrderFillEvent({ + orderFillEvent, + transactionIndex, + eventIndex, + height: parseInt(defaultHeight, 10), + time: defaultTime, + txHash: defaultTxHash, + }); + + await Promise.all([ + // initial position for subaccount 1 + PerpetualPositionTable.create({ + ...defaultPerpetualPosition, + perpetualId: testConstants.defaultPerpetualMarket3.id, + }), + // initial position for subaccount 2 + PerpetualPositionTable.create({ + ...defaultPerpetualPosition, + subaccountId: testConstants.defaultSubaccountId2, + perpetualId: testConstants.defaultPerpetualMarket3.id, + }), + ]); + + await onMessage(kafkaMessage); + + const takerOrderId: string = OrderTable.orderIdToUuid(takerOrderProto.orderId!); + + const [makerOrder, takerOrder]: [ + OrderFromDatabase | undefined, + OrderFromDatabase | undefined + ] = await Promise.all([ + OrderTable.findById(makerOrderId), + OrderTable.findById(takerOrderId), + ]); + + expect(makerOrder).toBeDefined(); + expect(takerOrder).toBeDefined(); + + // maker order is partially filled, and in CanceledOrdersCache + expect(makerOrder!.status).toEqual(status); + // taker order is partially filled, and not in CanceledOrdersCache + expect(takerOrder!.status).toEqual(OrderStatus.OPEN); + }); + async function expectDefaultOrderAndFillSubaccountKafkaMessages( producerSendMock: jest.SpyInstance, eventId: Buffer, diff --git a/indexer/services/ender/__tests__/handlers/perpetual-market-handler.test.ts b/indexer/services/ender/__tests__/handlers/perpetual-market-handler.test.ts index 67bb3cef7c..02235fd182 100644 --- a/indexer/services/ender/__tests__/handlers/perpetual-market-handler.test.ts +++ b/indexer/services/ender/__tests__/handlers/perpetual-market-handler.test.ts @@ -40,6 +40,7 @@ import { } from '../helpers/constants'; import { updateBlockCache } from '../../src/caches/block-cache'; import { createPostgresFunctions } from '../../src/helpers/postgres/postgres-functions'; +import config from '../../src/config'; describe('perpetualMarketHandler', () => { beforeAll(async () => { @@ -65,6 +66,7 @@ describe('perpetualMarketHandler', () => { afterEach(async () => { await dbHelpers.clearData(); + perpetualMarketRefresher.clear(); jest.clearAllMocks(); liquidityTierRefresher.clear(); }); @@ -103,71 +105,118 @@ describe('perpetualMarketHandler', () => { }); }); - it('fails when market doesnt exist for perpetual market', async () => { - const transactionIndex: number = 0; - const kafkaMessage: KafkaMessage = createKafkaMessageFromPerpetualMarketEvent({ - perpetualMarketEvent: defaultPerpetualMarketCreateEvent, - transactionIndex, - height: defaultHeight, - time: defaultTime, - txHash: defaultTxHash, + it.each([ + [ + 'via knex', + false, + ], + [ + 'via SQL function', + true, + ], + ])( + 'fails when market doesnt exist for perpetual market (%s)', + async ( + _name: string, + useSqlFunction: boolean, + ) => { + config.USE_PERPETUAL_MARKET_HANDLER_SQL_FUNCTION = useSqlFunction; + const transactionIndex: number = 0; + const kafkaMessage: KafkaMessage = createKafkaMessageFromPerpetualMarketEvent({ + perpetualMarketEvent: defaultPerpetualMarketCreateEvent, + transactionIndex, + height: defaultHeight, + time: defaultTime, + txHash: defaultTxHash, + }); + + await expect(onMessage(kafkaMessage)).rejects.toThrowError(); }); - await expect(onMessage(kafkaMessage)).rejects.toThrowError(); - }); + it.each([ + [ + 'via knex', + false, + ], + [ + 'via SQL function', + true, + ], + ])( + 'fails when liquidity tier doesnt exist for perpetual market (%s)', + async ( + _name: string, + useSqlFunction: boolean, + ) => { + config.USE_PERPETUAL_MARKET_HANDLER_SQL_FUNCTION = useSqlFunction; + await MarketTable.create(testConstants.defaultMarket); + await marketRefresher.updateMarkets(); + const transactionIndex: number = 0; + const kafkaMessage: KafkaMessage = createKafkaMessageFromPerpetualMarketEvent({ + perpetualMarketEvent: defaultPerpetualMarketCreateEvent, + transactionIndex, + height: defaultHeight, + time: defaultTime, + txHash: defaultTxHash, + }); - it('fails when liquidity tier doesnt exist for perpetual market', async () => { - await MarketTable.create(testConstants.defaultMarket); - await marketRefresher.updateMarkets(); - const transactionIndex: number = 0; - const kafkaMessage: KafkaMessage = createKafkaMessageFromPerpetualMarketEvent({ - perpetualMarketEvent: defaultPerpetualMarketCreateEvent, - transactionIndex, - height: defaultHeight, - time: defaultTime, - txHash: defaultTxHash, + await expect(onMessage(kafkaMessage)).rejects.toThrowError(); }); - await expect(onMessage(kafkaMessage)).rejects.toThrowError(); - }); - - it('creates new perpetual market', async () => { - await Promise.all([ - MarketTable.create(testConstants.defaultMarket), - LiquidityTiersTable.create(testConstants.defaultLiquidityTier), - ]); - await liquidityTierRefresher.updateLiquidityTiers(); - await marketRefresher.updateMarkets(); + it.each([ + [ + 'via knex', + false, + ], + [ + 'via SQL function', + true, + ], + ])( + 'creates new perpetual market (%s)', + async ( + _name: string, + useSqlFunction: boolean, + ) => { + config.USE_PERPETUAL_MARKET_HANDLER_SQL_FUNCTION = useSqlFunction; + await Promise.all([ + MarketTable.create(testConstants.defaultMarket), + LiquidityTiersTable.create(testConstants.defaultLiquidityTier), + ]); + await liquidityTierRefresher.updateLiquidityTiers(); + await marketRefresher.updateMarkets(); - const transactionIndex: number = 0; + const transactionIndex: number = 0; - const perpetualMarketEvent: PerpetualMarketCreateEventV1 = defaultPerpetualMarketCreateEvent; - const kafkaMessage: KafkaMessage = createKafkaMessageFromPerpetualMarketEvent({ - perpetualMarketEvent, - transactionIndex, - height: defaultHeight, - time: defaultTime, - txHash: defaultTxHash, - }); - // Confirm there is no existing perpetualMarket. - await expectNoExistingPerpetualMarkets(); + const perpetualMarketEvent: PerpetualMarketCreateEventV1 = defaultPerpetualMarketCreateEvent; + const kafkaMessage: KafkaMessage = createKafkaMessageFromPerpetualMarketEvent({ + perpetualMarketEvent, + transactionIndex, + height: defaultHeight, + time: defaultTime, + txHash: defaultTxHash, + }); + // Confirm there is no existing perpetualMarket. + await expectNoExistingPerpetualMarkets(); - const producerSendMock: jest.SpyInstance = jest.spyOn(producer, 'send'); - await onMessage(kafkaMessage); + const producerSendMock: jest.SpyInstance = jest.spyOn(producer, 'send'); + await onMessage(kafkaMessage); - const newPerpetualMarkets: PerpetualMarketFromDatabase[] = await PerpetualMarketTable.findAll( - {}, - [], { - orderBy: [[PerpetualMarketColumns.id, Ordering.ASC]], - }); - expect(newPerpetualMarkets.length).toEqual(1); - expectPerpetualMarketMatchesEvent(perpetualMarketEvent, newPerpetualMarkets[0]); - expectTimingStats(); - const perpetualMarket: PerpetualMarketFromDatabase | undefined = perpetualMarketRefresher.getPerpetualMarketFromId('0'); - expect(perpetualMarket).toBeDefined(); - expectPerpetualMarket(perpetualMarket!, perpetualMarketEvent); - expectPerpetualMarketKafkaMessage(producerSendMock, [perpetualMarket!]); - }); + const newPerpetualMarkets: PerpetualMarketFromDatabase[] = await PerpetualMarketTable.findAll( + {}, + [], { + orderBy: [[PerpetualMarketColumns.id, Ordering.ASC]], + }); + expect(newPerpetualMarkets.length).toEqual(1); + expectPerpetualMarketMatchesEvent(perpetualMarketEvent, newPerpetualMarkets[0]); + if (!useSqlFunction) { + expectTimingStats(); + } + const perpetualMarket: PerpetualMarketFromDatabase | undefined = perpetualMarketRefresher.getPerpetualMarketFromId('0'); + expect(perpetualMarket).toBeDefined(); + expectPerpetualMarket(perpetualMarket!, perpetualMarketEvent); + expectPerpetualMarketKafkaMessage(producerSendMock, [perpetualMarket!]); + }); }); function expectTimingStats() { diff --git a/indexer/services/ender/__tests__/handlers/stateful-order/conditional-order-placement-handler.test.ts b/indexer/services/ender/__tests__/handlers/stateful-order/conditional-order-placement-handler.test.ts index 3c74bafe73..c995bee8a0 100644 --- a/indexer/services/ender/__tests__/handlers/stateful-order/conditional-order-placement-handler.test.ts +++ b/indexer/services/ender/__tests__/handlers/stateful-order/conditional-order-placement-handler.test.ts @@ -45,6 +45,7 @@ import Long from 'long'; import { producer } from '@dydxprotocol-indexer/kafka'; import { ConditionalOrderPlacementHandler } from '../../../src/handlers/stateful-order/conditional-order-placement-handler'; import { createPostgresFunctions } from '../../../src/helpers/postgres/postgres-functions'; +import config from '../../../src/config'; describe('conditionalOrderPlacementHandler', () => { beforeAll(async () => { @@ -125,7 +126,14 @@ describe('conditionalOrderPlacementHandler', () => { }); }); - it('successfully places order', async () => { + it.each([ + ['via knex', false], + ['via SQL function', true], + ])('successfully places order (%s)', async ( + _name: string, + useSqlFunction: boolean, + ) => { + config.USE_STATEFUL_ORDER_HANDLER_SQL_FUNCTION = useSqlFunction; const kafkaMessage: KafkaMessage = createKafkaMessageFromStatefulOrderEvent( defaultStatefulOrderEvent, ); @@ -154,7 +162,9 @@ describe('conditionalOrderPlacementHandler', () => { updatedAt: defaultDateTime.toISO(), updatedAtHeight: defaultHeight.toString(), }); - expectTimingStats(); + if (!useSqlFunction) { + expectTimingStats(); + } expectOrderSubaccountKafkaMessage( producerSendMock, defaultOrder.orderId!.subaccountId!, @@ -162,7 +172,14 @@ describe('conditionalOrderPlacementHandler', () => { ); }); - it('successfully upserts order', async () => { + it.each([ + ['via knex', false], + ['via SQL function', true], + ])('successfully upserts order (%s)', async ( + _name: string, + useSqlFunction: boolean, + ) => { + config.USE_STATEFUL_ORDER_HANDLER_SQL_FUNCTION = useSqlFunction; const subaccountId: string = SubaccountTable.subaccountIdToUuid( defaultOrder.orderId!.subaccountId!, ); @@ -215,7 +232,9 @@ describe('conditionalOrderPlacementHandler', () => { updatedAt: defaultDateTime.toISO(), updatedAtHeight: defaultHeight.toString(), }); - expectTimingStats(); + if (!useSqlFunction) { + expectTimingStats(); + } expectOrderSubaccountKafkaMessage( producerSendMock, defaultOrder.orderId!.subaccountId!, diff --git a/indexer/services/ender/__tests__/handlers/stateful-order/conditional-order-triggered-handler.test.ts b/indexer/services/ender/__tests__/handlers/stateful-order/conditional-order-triggered-handler.test.ts index 9c5701b636..395301a856 100644 --- a/indexer/services/ender/__tests__/handlers/stateful-order/conditional-order-triggered-handler.test.ts +++ b/indexer/services/ender/__tests__/handlers/stateful-order/conditional-order-triggered-handler.test.ts @@ -39,8 +39,9 @@ import { ORDER_FLAG_CONDITIONAL } from '@dydxprotocol-indexer/v4-proto-parser'; import { ConditionalOrderTriggeredHandler } from '../../../src/handlers/stateful-order/conditional-order-triggered-handler'; import { defaultPerpetualMarket } from '@dydxprotocol-indexer/postgres/build/__tests__/helpers/constants'; import { createPostgresFunctions } from '../../../src/helpers/postgres/postgres-functions'; +import config from '../../../src/config'; -describe('statefulOrderRemovalHandler', () => { +describe('conditionalOrderTriggeredHandler', () => { beforeAll(async () => { await dbHelpers.migrate(); await createPostgresFunctions(); @@ -110,7 +111,14 @@ describe('statefulOrderRemovalHandler', () => { }); }); - it('successfully triggers order and sends to vulcan', async () => { + it.each([ + ['via knex', false], + ['via SQL function', true], + ])('successfully triggers order and sends to vulcan (%s)', async ( + _name: string, + useSqlFunction: boolean, + ) => { + config.USE_STATEFUL_ORDER_HANDLER_SQL_FUNCTION = useSqlFunction; await OrderTable.create({ ...testConstants.defaultOrderGoodTilBlockTime, orderFlags: conditionalOrderId.orderFlags.toString(), @@ -147,16 +155,25 @@ describe('statefulOrderRemovalHandler', () => { orderId: conditionalOrderId, offchainUpdate: expectedOffchainUpdate, }); - expectTimingStats(); + if (!useSqlFunction) { + expectTimingStats(); + } }); - it('throws error when attempting to trigger an order that does not exist', async () => { + it.each([ + ['via knex', false], + ['via SQL function', true], + ])('throws error when attempting to trigger an order that does not exist (%s)', async ( + _name: string, + useSqlFunction: boolean, + ) => { + config.USE_STATEFUL_ORDER_HANDLER_SQL_FUNCTION = useSqlFunction; const kafkaMessage: KafkaMessage = createKafkaMessageFromStatefulOrderEvent( defaultStatefulOrderEvent, ); await expect(onMessage(kafkaMessage)).rejects.toThrowError( - new Error(`Unable to update order status with orderId: ${orderId}`), + `Unable to update order status with orderId: ${orderId}`, ); }); }); diff --git a/indexer/services/ender/__tests__/handlers/stateful-order/stateful-order-placement-handler.test.ts b/indexer/services/ender/__tests__/handlers/stateful-order/stateful-order-placement-handler.test.ts index cea37d7762..62cdce5817 100644 --- a/indexer/services/ender/__tests__/handlers/stateful-order/stateful-order-placement-handler.test.ts +++ b/indexer/services/ender/__tests__/handlers/stateful-order/stateful-order-placement-handler.test.ts @@ -45,6 +45,7 @@ import { STATEFUL_ORDER_ORDER_FILL_EVENT_TYPE } from '../../../src/constants'; import { producer } from '@dydxprotocol-indexer/kafka'; import { ORDER_FLAG_LONG_TERM } from '@dydxprotocol-indexer/v4-proto-parser'; import { createPostgresFunctions } from '../../../src/helpers/postgres/postgres-functions'; +import config from '../../../src/config'; describe('statefulOrderPlacementHandler', () => { beforeAll(async () => { @@ -138,12 +139,16 @@ describe('statefulOrderPlacementHandler', () => { it.each([ // TODO(IND-334): Remove after deprecating StatefulOrderPlacementEvent - ['stateful order placement', defaultStatefulOrderEvent], - ['stateful long term order placement', defaultStatefulOrderLongTermEvent], + ['stateful order placement (via knex)', defaultStatefulOrderEvent, false], + ['stateful order placement (via SQL function)', defaultStatefulOrderEvent, true], + ['stateful long term order placement (via knex)', defaultStatefulOrderLongTermEvent, false], + ['stateful long term order placement (via SQL function)', defaultStatefulOrderLongTermEvent, true], ])('successfully places order with %s', async ( _name: string, statefulOrderEvent: StatefulOrderEventV1, + useSqlFunction: boolean, ) => { + config.USE_STATEFUL_ORDER_HANDLER_SQL_FUNCTION = useSqlFunction; const kafkaMessage: KafkaMessage = createKafkaMessageFromStatefulOrderEvent( statefulOrderEvent, ); @@ -172,7 +177,9 @@ describe('statefulOrderPlacementHandler', () => { updatedAt: defaultDateTime.toISO(), updatedAtHeight: defaultHeight.toString(), }); - expectTimingStats(); + if (!useSqlFunction) { + expectTimingStats(); + } const expectedOffchainUpdate: OffChainUpdateV1 = { orderPlace: { @@ -189,12 +196,16 @@ describe('statefulOrderPlacementHandler', () => { it.each([ // TODO(IND-334): Remove after deprecating StatefulOrderPlacementEvent - ['stateful order placement', defaultStatefulOrderEvent], - ['stateful long term order placement', defaultStatefulOrderLongTermEvent], + ['stateful order placement (via knex)', defaultStatefulOrderEvent, false], + ['stateful order placement (via SQL function)', defaultStatefulOrderEvent, true], + ['stateful long term order placement (via knex)', defaultStatefulOrderLongTermEvent, false], + ['stateful long term order placement (via SQL function)', defaultStatefulOrderLongTermEvent, true], ])('successfully upserts order with %s', async ( _name: string, statefulOrderEvent: StatefulOrderEventV1, + useSqlFunction: boolean, ) => { + config.USE_STATEFUL_ORDER_HANDLER_SQL_FUNCTION = useSqlFunction; const subaccountId: string = SubaccountTable.subaccountIdToUuid( defaultOrder.orderId!.subaccountId!, ); @@ -247,7 +258,9 @@ describe('statefulOrderPlacementHandler', () => { updatedAt: defaultDateTime.toISO(), updatedAtHeight: defaultHeight.toString(), }); - expectTimingStats(); + if (!useSqlFunction) { + expectTimingStats(); + } // TODO[IND-20]: Add tests for vulcan messages }); }); diff --git a/indexer/services/ender/__tests__/handlers/stateful-order/stateful-order-removal-handler.test.ts b/indexer/services/ender/__tests__/handlers/stateful-order/stateful-order-removal-handler.test.ts index da1c3a4782..c47f935af1 100644 --- a/indexer/services/ender/__tests__/handlers/stateful-order/stateful-order-removal-handler.test.ts +++ b/indexer/services/ender/__tests__/handlers/stateful-order/stateful-order-removal-handler.test.ts @@ -35,6 +35,7 @@ import { stats, STATS_FUNCTION_NAME } from '@dydxprotocol-indexer/base'; import { STATEFUL_ORDER_ORDER_FILL_EVENT_TYPE } from '../../../src/constants'; import { producer } from '@dydxprotocol-indexer/kafka'; import { createPostgresFunctions } from '../../../src/helpers/postgres/postgres-functions'; +import config from '../../../src/config'; describe('statefulOrderRemovalHandler', () => { beforeAll(async () => { @@ -104,7 +105,14 @@ describe('statefulOrderRemovalHandler', () => { }); }); - it('successfully cancels and removes order', async () => { + it.each([ + ['via knex', false], + ['via SQL function', true], + ])('successfully cancels and removes order (%s)', async ( + _name: string, + useSqlFunction: boolean, + ) => { + config.USE_STATEFUL_ORDER_HANDLER_SQL_FUNCTION = useSqlFunction; await OrderTable.create({ ...testConstants.defaultOrder, clientId: '0', @@ -121,7 +129,9 @@ describe('statefulOrderRemovalHandler', () => { updatedAt: defaultDateTime.toISO(), updatedAtHeight: defaultHeight.toString(), })); - expectTimingStats(); + if (!useSqlFunction) { + expectTimingStats(); + } const expectedOffchainUpdate: OffChainUpdateV1 = { orderRemove: { @@ -137,13 +147,20 @@ describe('statefulOrderRemovalHandler', () => { }); }); - it('throws error when attempting to cancel an order that does not exist', async () => { + it.each([ + ['via knex', false], + ['via SQL function', true], + ])('throws error when attempting to cancel an order that does not exist (%s)', async ( + _name: string, + useSqlFunction: boolean, + ) => { + config.USE_STATEFUL_ORDER_HANDLER_SQL_FUNCTION = useSqlFunction; const kafkaMessage: KafkaMessage = createKafkaMessageFromStatefulOrderEvent( defaultStatefulOrderEvent, ); await expect(onMessage(kafkaMessage)).rejects.toThrowError( - new Error(`Unable to update order status with orderId: ${orderId}`), + `Unable to update order status with orderId: ${orderId}`, ); }); }); diff --git a/indexer/services/ender/__tests__/handlers/transfer-handler.test.ts b/indexer/services/ender/__tests__/handlers/transfer-handler.test.ts index 43d397000c..fe05ed7ff9 100644 --- a/indexer/services/ender/__tests__/handlers/transfer-handler.test.ts +++ b/indexer/services/ender/__tests__/handlers/transfer-handler.test.ts @@ -52,6 +52,7 @@ import { } from '../helpers/constants'; import { updateBlockCache } from '../../src/caches/block-cache'; import { createPostgresFunctions } from '../../src/helpers/postgres/postgres-functions'; +import config from '../../src/config'; describe('transferHandler', () => { beforeAll(async () => { @@ -134,336 +135,452 @@ describe('transferHandler', () => { }); }); - it('fails when TransferEvent does not contain sender subaccountId', async () => { - const transactionIndex: number = 0; - const transferEvent: TransferEventV1 = TransferEventV1.fromPartial({ - recipient: { - subaccountId: { - owner: '', - number: 0, + it.each([ + [ + 'via knex', + false, + ], + [ + 'via SQL function', + true, + ], + ])( + 'fails when TransferEvent does not contain sender subaccountId (%s)', + async ( + _name: string, + useSqlFunction: boolean, + ) => { + config.USE_TRANSFER_HANDLER_SQL_FUNCTION = useSqlFunction; + const transactionIndex: number = 0; + const transferEvent: TransferEventV1 = TransferEventV1.fromPartial({ + recipient: { + subaccountId: { + owner: '', + number: 0, + }, }, - }, - assetId: 0, - amount: 100, - }); - const kafkaMessage: KafkaMessage = createKafkaMessageFromTransferEvent({ - transferEvent, - transactionIndex, - height: defaultHeight, - time: defaultTime, - txHash: defaultTxHash, - }); - - const loggerCrit = jest.spyOn(logger, 'crit'); - const loggerError = jest.spyOn(logger, 'error'); - await expect(onMessage(kafkaMessage)).rejects.toThrowError( - new ParseMessageError( - 'TransferEvent must have either a sender subaccount id or sender wallet address', - ), - ); + assetId: 0, + amount: 100, + }); + const kafkaMessage: KafkaMessage = createKafkaMessageFromTransferEvent({ + transferEvent, + transactionIndex, + height: defaultHeight, + time: defaultTime, + txHash: defaultTxHash, + }); - expect(loggerError).toHaveBeenCalledWith(expect.objectContaining({ - at: 'TransferValidator#logAndThrowParseMessageError', - message: 'TransferEvent must have either a sender subaccount id or sender wallet address', - })); - expect(loggerCrit).toHaveBeenCalledWith(expect.objectContaining({ - at: 'onMessage#onMessage', - message: 'Error: Unable to parse message, this must be due to a bug in V4 node', - })); - }); + const loggerCrit = jest.spyOn(logger, 'crit'); + const loggerError = jest.spyOn(logger, 'error'); + await expect(onMessage(kafkaMessage)).rejects.toThrowError( + new ParseMessageError( + 'TransferEvent must have either a sender subaccount id or sender wallet address', + ), + ); - it('fails when TransferEvent does not contain recipient subaccountId', async () => { - const transactionIndex: number = 0; - const transferEvent: TransferEventV1 = TransferEventV1.fromPartial({ - sender: { - subaccountId: { - owner: '', - number: 0, - }, - }, - assetId: 0, - amount: 100, - }); - const kafkaMessage: KafkaMessage = createKafkaMessageFromTransferEvent({ - transferEvent, - transactionIndex, - height: defaultHeight, - time: defaultTime, - txHash: defaultTxHash, + expect(loggerError).toHaveBeenCalledWith(expect.objectContaining({ + at: 'TransferValidator#logAndThrowParseMessageError', + message: 'TransferEvent must have either a sender subaccount id or sender wallet address', + })); + expect(loggerCrit).toHaveBeenCalledWith(expect.objectContaining({ + at: 'onMessage#onMessage', + message: 'Error: Unable to parse message, this must be due to a bug in V4 node', + })); }); - const loggerCrit = jest.spyOn(logger, 'crit'); - const loggerError = jest.spyOn(logger, 'error'); - await expect(onMessage(kafkaMessage)).rejects.toThrowError( - new ParseMessageError( - 'TransferEvent must have either a recipient subaccount id or recipient wallet address', - ), - ); - - expect(loggerError).toHaveBeenCalledWith(expect.objectContaining({ - at: 'TransferValidator#logAndThrowParseMessageError', - message: 'TransferEvent must have either a recipient subaccount id or recipient wallet address', - })); - expect(loggerCrit).toHaveBeenCalledWith(expect.objectContaining({ - at: 'onMessage#onMessage', - message: 'Error: Unable to parse message, this must be due to a bug in V4 node', - })); - }); + it.each([ + [ + 'via knex', + false, + ], + [ + 'via SQL function', + true, + ], + ])( + 'fails when TransferEvent does not contain recipient subaccountId (%s)', + async ( + _name: string, + useSqlFunction: boolean, + ) => { + config.USE_TRANSFER_HANDLER_SQL_FUNCTION = useSqlFunction; + const transactionIndex: number = 0; + const transferEvent: TransferEventV1 = TransferEventV1.fromPartial({ + sender: { + subaccountId: { + owner: '', + number: 0, + }, + }, + assetId: 0, + amount: 100, + }); + const kafkaMessage: KafkaMessage = createKafkaMessageFromTransferEvent({ + transferEvent, + transactionIndex, + height: defaultHeight, + time: defaultTime, + txHash: defaultTxHash, + }); - it('creates new transfer for existing subaccounts', async () => { - const transactionIndex: number = 0; + const loggerCrit = jest.spyOn(logger, 'crit'); + const loggerError = jest.spyOn(logger, 'error'); + await expect(onMessage(kafkaMessage)).rejects.toThrowError( + new ParseMessageError( + 'TransferEvent must have either a recipient subaccount id or recipient wallet address', + ), + ); - const transferEvent: TransferEventV1 = defaultTransferEvent; - const kafkaMessage: KafkaMessage = createKafkaMessageFromTransferEvent({ - transferEvent, - transactionIndex, - height: defaultHeight, - time: defaultTime, - txHash: defaultTxHash, + expect(loggerError).toHaveBeenCalledWith(expect.objectContaining({ + at: 'TransferValidator#logAndThrowParseMessageError', + message: 'TransferEvent must have either a recipient subaccount id or recipient wallet address', + })); + expect(loggerCrit).toHaveBeenCalledWith(expect.objectContaining({ + at: 'onMessage#onMessage', + message: 'Error: Unable to parse message, this must be due to a bug in V4 node', + })); }); - // Create the subaccounts - await Promise.all([ - SubaccountTable.upsert(defaultSenderSubaccount), - SubaccountTable.upsert(defaultRecipientSubaccount), - ]); - - // Confirm there are subaccounts - const subaccountIds: string[] = [defaultSenderSubaccountId, defaultRecipientSubaccountId]; - _.each(subaccountIds, async (subaccountId) => { - const existingSubaccount: SubaccountFromDatabase | undefined = await SubaccountTable.findById( - subaccountId, - ); - expect(existingSubaccount).toBeDefined(); - }); + it.each([ + [ + 'via knex', + false, + ], + [ + 'via SQL function', + true, + ], + ])( + 'creates new transfer for existing subaccounts (%s)', + async ( + _name: string, + useSqlFunction: boolean, + ) => { + config.USE_TRANSFER_HANDLER_SQL_FUNCTION = useSqlFunction; + const transactionIndex: number = 0; - // Confirm there is no existing transfer to or from the recipient/sender subaccounts - await expectNoExistingTransfers([defaultRecipientSubaccountId, defaultSenderSubaccountId]); + const transferEvent: TransferEventV1 = defaultTransferEvent; + const kafkaMessage: KafkaMessage = createKafkaMessageFromTransferEvent({ + transferEvent, + transactionIndex, + height: defaultHeight, + time: defaultTime, + txHash: defaultTxHash, + }); - const producerSendMock: jest.SpyInstance = jest.spyOn(producer, 'send'); - await onMessage(kafkaMessage); + // Create the subaccounts + await Promise.all([ + SubaccountTable.upsert(defaultSenderSubaccount), + SubaccountTable.upsert(defaultRecipientSubaccount), + ]); + + // Confirm there are subaccounts + const subaccountIds: string[] = [defaultSenderSubaccountId, defaultRecipientSubaccountId]; + _.each(subaccountIds, async (subaccountId) => { + const existingSubaccount: + SubaccountFromDatabase | undefined = await SubaccountTable.findById( + subaccountId, + ); + expect(existingSubaccount).toBeDefined(); + }); - const newTransfer: TransferFromDatabase = await expectAndReturnNewTransfer({ - recipientSubaccountId: defaultRecipientSubaccountId, - senderSubaccountId: defaultSenderSubaccountId, - }); + // Confirm there is no existing transfer to or from the recipient/sender subaccounts + await expectNoExistingTransfers([defaultRecipientSubaccountId, defaultSenderSubaccountId]); - expectTransferMatchesEvent(transferEvent, newTransfer, asset); + const producerSendMock: jest.SpyInstance = jest.spyOn(producer, 'send'); + await onMessage(kafkaMessage); - await expectTransfersSubaccountKafkaMessage( - producerSendMock, - transferEvent, - newTransfer, - asset, - ); - expectTimingStats(); - }); + const newTransfer: TransferFromDatabase = await expectAndReturnNewTransfer({ + recipientSubaccountId: defaultRecipientSubaccountId, + senderSubaccountId: defaultSenderSubaccountId, + }); - it('creates new deposit for existing subaccount', async () => { - const transactionIndex: number = 0; + expectTransferMatchesEvent(transferEvent, newTransfer, asset); - const depositEvent: TransferEventV1 = defaultDepositEvent; - const kafkaMessage: KafkaMessage = createKafkaMessageFromTransferEvent({ - transferEvent: depositEvent, - transactionIndex, - height: defaultHeight, - time: defaultTime, - txHash: defaultTxHash, + await expectTransfersSubaccountKafkaMessage( + producerSendMock, + transferEvent, + newTransfer, + asset, + ); + if (!useSqlFunction) { + expectTimingStats(); + } }); - // Create the subaccounts - await Promise.all([ - SubaccountTable.upsert(defaultRecipientSubaccount), - ]); + it.each([ + [ + 'via knex', + false, + ], + [ + 'via SQL function', + true, + ], + ])( + 'creates new deposit for existing subaccount (%s)', + async ( + _name: string, + useSqlFunction: boolean, + ) => { + config.USE_TRANSFER_HANDLER_SQL_FUNCTION = useSqlFunction; + const transactionIndex: number = 0; - // Confirm there is a recipient subaccount - const existingSubaccount: SubaccountFromDatabase | undefined = await SubaccountTable.findById( - defaultRecipientSubaccountId, - ); - expect(existingSubaccount).toBeDefined(); + const depositEvent: TransferEventV1 = defaultDepositEvent; + const kafkaMessage: KafkaMessage = createKafkaMessageFromTransferEvent({ + transferEvent: depositEvent, + transactionIndex, + height: defaultHeight, + time: defaultTime, + txHash: defaultTxHash, + }); - // Confirm there is no existing transfer to or from the recipient subaccount - await expectNoExistingTransfers([defaultRecipientSubaccountId]); + // Create the subaccounts + await Promise.all([ + SubaccountTable.upsert(defaultRecipientSubaccount), + ]); - const producerSendMock: jest.SpyInstance = jest.spyOn(producer, 'send'); - await onMessage(kafkaMessage); + // Confirm there is a recipient subaccount + const existingSubaccount: SubaccountFromDatabase | undefined = await SubaccountTable.findById( + defaultRecipientSubaccountId, + ); + expect(existingSubaccount).toBeDefined(); - const newTransfer: TransferFromDatabase = await expectAndReturnNewTransfer( - { - recipientSubaccountId: defaultRecipientSubaccountId, - }, - ); + // Confirm there is no existing transfer to or from the recipient subaccount + await expectNoExistingTransfers([defaultRecipientSubaccountId]); - expectTransferMatchesEvent(depositEvent, newTransfer, asset); + const producerSendMock: jest.SpyInstance = jest.spyOn(producer, 'send'); + await onMessage(kafkaMessage); - await expectTransfersSubaccountKafkaMessage( - producerSendMock, - depositEvent, - newTransfer, - asset, - ); - // Confirm the wallet was created - const wallet: WalletFromDatabase | undefined = await WalletTable.findById( - defaultWalletAddress, - ); - expect(wallet).toBeDefined(); - expectTimingStats(); - }); + const newTransfer: TransferFromDatabase = await expectAndReturnNewTransfer( + { + recipientSubaccountId: defaultRecipientSubaccountId, + }, + ); - it('creates new deposit for previously non-existent subaccount', async () => { - const transactionIndex: number = 0; + expectTransferMatchesEvent(depositEvent, newTransfer, asset); - const depositEvent: TransferEventV1 = defaultDepositEvent; - const kafkaMessage: KafkaMessage = createKafkaMessageFromTransferEvent({ - transferEvent: depositEvent, - transactionIndex, - height: defaultHeight, - time: defaultTime, - txHash: defaultTxHash, + await expectTransfersSubaccountKafkaMessage( + producerSendMock, + depositEvent, + newTransfer, + asset, + ); + // Confirm the wallet was created + const wallet: WalletFromDatabase | undefined = await WalletTable.findById( + defaultWalletAddress, + ); + expect(wallet).toBeDefined(); + if (!useSqlFunction) { + expectTimingStats(); + } }); - // Confirm there is no recipient subaccount - const existingSubaccount: SubaccountFromDatabase | undefined = await SubaccountTable.findById( - defaultRecipientSubaccountId, - ); - expect(existingSubaccount).toBeUndefined(); + it.each([ + [ + 'via knex', + false, + ], + [ + 'via SQL function', + true, + ], + ])( + 'creates new deposit for previously non-existent subaccount (%s)', + async ( + _name: string, + useSqlFunction: boolean, + ) => { + config.USE_TRANSFER_HANDLER_SQL_FUNCTION = useSqlFunction; + const transactionIndex: number = 0; - // Confirm there is no existing transfer to or from the recipient subaccount - await expectNoExistingTransfers([defaultRecipientSubaccountId]); + const depositEvent: TransferEventV1 = defaultDepositEvent; + const kafkaMessage: KafkaMessage = createKafkaMessageFromTransferEvent({ + transferEvent: depositEvent, + transactionIndex, + height: defaultHeight, + time: defaultTime, + txHash: defaultTxHash, + }); - const producerSendMock: jest.SpyInstance = jest.spyOn(producer, 'send'); - await onMessage(kafkaMessage); + // Confirm there is no recipient subaccount + const existingSubaccount: SubaccountFromDatabase | undefined = await SubaccountTable.findById( + defaultRecipientSubaccountId, + ); + expect(existingSubaccount).toBeUndefined(); - const newTransfer: TransferFromDatabase = await expectAndReturnNewTransfer( - { - recipientSubaccountId: defaultRecipientSubaccountId, - }, - ); + // Confirm there is no existing transfer to or from the recipient subaccount + await expectNoExistingTransfers([defaultRecipientSubaccountId]); - expectTransferMatchesEvent(depositEvent, newTransfer, asset); - await expectTransfersSubaccountKafkaMessage( - producerSendMock, - depositEvent, - newTransfer, - asset, - ); - // Confirm the wallet was created - const wallet: WalletFromDatabase | undefined = await WalletTable.findById( - defaultWalletAddress, - ); - const newRecipientSubaccount: SubaccountFromDatabase | undefined = await - SubaccountTable.findById( - defaultRecipientSubaccountId, - ); - expect(newRecipientSubaccount).toBeDefined(); - expect(wallet).toBeDefined(); - expectTimingStats(); - }); + const producerSendMock: jest.SpyInstance = jest.spyOn(producer, 'send'); + await onMessage(kafkaMessage); - it('creates new withdrawal for existing subaccount', async () => { - const transactionIndex: number = 0; + const newTransfer: TransferFromDatabase = await expectAndReturnNewTransfer( + { + recipientSubaccountId: defaultRecipientSubaccountId, + }, + ); - const withdrawalEvent: TransferEventV1 = defaultWithdrawalEvent; - const kafkaMessage: KafkaMessage = createKafkaMessageFromTransferEvent({ - transferEvent: withdrawalEvent, - transactionIndex, - height: defaultHeight, - time: defaultTime, - txHash: defaultTxHash, + expectTransferMatchesEvent(depositEvent, newTransfer, asset); + await expectTransfersSubaccountKafkaMessage( + producerSendMock, + depositEvent, + newTransfer, + asset, + ); + // Confirm the wallet was created + const wallet: WalletFromDatabase | undefined = await WalletTable.findById( + defaultWalletAddress, + ); + const newRecipientSubaccount: SubaccountFromDatabase | undefined = await + SubaccountTable.findById( + defaultRecipientSubaccountId, + ); + expect(newRecipientSubaccount).toBeDefined(); + expect(wallet).toBeDefined(); + if (!useSqlFunction) { + expectTimingStats(); + } }); - // Create the subaccounts - await Promise.all([ - SubaccountTable.upsert(defaultSenderSubaccount), - ]); + it.each([ + [ + 'via knex', + false, + ], + [ + 'via SQL function', + true, + ], + ])( + 'creates new withdrawal for existing subaccount (%s)', + async ( + _name: string, + useSqlFunction: boolean, + ) => { + config.USE_TRANSFER_HANDLER_SQL_FUNCTION = useSqlFunction; + const transactionIndex: number = 0; - // Confirm there is a sender subaccount - const existingSubaccount: SubaccountFromDatabase | undefined = await SubaccountTable.findById( - defaultSenderSubaccountId, - ); - expect(existingSubaccount).toBeDefined(); + const withdrawalEvent: TransferEventV1 = defaultWithdrawalEvent; + const kafkaMessage: KafkaMessage = createKafkaMessageFromTransferEvent({ + transferEvent: withdrawalEvent, + transactionIndex, + height: defaultHeight, + time: defaultTime, + txHash: defaultTxHash, + }); - // Confirm there is no existing transfer to or from the sender subaccount - await expectNoExistingTransfers([defaultSenderSubaccountId]); + // Create the subaccounts + await Promise.all([ + SubaccountTable.upsert(defaultSenderSubaccount), + ]); - const producerSendMock: jest.SpyInstance = jest.spyOn(producer, 'send'); - await onMessage(kafkaMessage); + // Confirm there is a sender subaccount + const existingSubaccount: SubaccountFromDatabase | undefined = await SubaccountTable.findById( + defaultSenderSubaccountId, + ); + expect(existingSubaccount).toBeDefined(); - const newTransfer: TransferFromDatabase = await expectAndReturnNewTransfer( - { - senderSubaccountId: defaultSenderSubaccountId, - }, - ); + // Confirm there is no existing transfer to or from the sender subaccount + await expectNoExistingTransfers([defaultSenderSubaccountId]); - expectTransferMatchesEvent(withdrawalEvent, newTransfer, asset); + const producerSendMock: jest.SpyInstance = jest.spyOn(producer, 'send'); + await onMessage(kafkaMessage); - await expectTransfersSubaccountKafkaMessage( - producerSendMock, - withdrawalEvent, - newTransfer, - asset, - ); - // Confirm the wallet was created - const wallet: WalletFromDatabase | undefined = await WalletTable.findById( - defaultWalletAddress, - ); - expect(wallet).toBeDefined(); - expectTimingStats(); - }); + const newTransfer: TransferFromDatabase = await expectAndReturnNewTransfer( + { + senderSubaccountId: defaultSenderSubaccountId, + }, + ); - it('creates new transfer and the recipient subaccount', async () => { - const transactionIndex: number = 0; + expectTransferMatchesEvent(withdrawalEvent, newTransfer, asset); - const transferEvent: TransferEventV1 = defaultTransferEvent; - const kafkaMessage: KafkaMessage = createKafkaMessageFromTransferEvent({ - transferEvent, - transactionIndex, - height: defaultHeight, - time: defaultTime, - txHash: defaultTxHash, + await expectTransfersSubaccountKafkaMessage( + producerSendMock, + withdrawalEvent, + newTransfer, + asset, + ); + // Confirm the wallet was created + const wallet: WalletFromDatabase | undefined = await WalletTable.findById( + defaultWalletAddress, + ); + expect(wallet).toBeDefined(); + if (!useSqlFunction) { + expectTimingStats(); + } }); - await SubaccountTable.upsert(defaultSenderSubaccount); + it.each([ + [ + 'via knex', + false, + ], + [ + 'via SQL function', + true, + ], + ])( + 'creates new transfer and the recipient subaccount (%s)', + async ( + _name: string, + useSqlFunction: boolean, + ) => { + config.USE_TRANSFER_HANDLER_SQL_FUNCTION = useSqlFunction; + const transactionIndex: number = 0; - // Confirm there is 1 subaccount - const existingSenderSubaccount: SubaccountFromDatabase | undefined = await - SubaccountTable.findById( - defaultSenderSubaccountId, - ); - expect(existingSenderSubaccount).toBeDefined(); - const existingRecipientSubaccount: SubaccountFromDatabase | undefined = await - SubaccountTable.findById( - defaultRecipientSubaccountId, - ); - expect(existingRecipientSubaccount).toBeUndefined(); + const transferEvent: TransferEventV1 = defaultTransferEvent; + const kafkaMessage: KafkaMessage = createKafkaMessageFromTransferEvent({ + transferEvent, + transactionIndex, + height: defaultHeight, + time: defaultTime, + txHash: defaultTxHash, + }); - // Confirm there is no existing transfers - await expectNoExistingTransfers([defaultRecipientSubaccountId, defaultSenderSubaccountId]); + await SubaccountTable.upsert(defaultSenderSubaccount); - const producerSendMock: jest.SpyInstance = jest.spyOn(producer, 'send'); - await onMessage(kafkaMessage); + // Confirm there is 1 subaccount + const existingSenderSubaccount: SubaccountFromDatabase | undefined = await + SubaccountTable.findById( + defaultSenderSubaccountId, + ); + expect(existingSenderSubaccount).toBeDefined(); + const existingRecipientSubaccount: SubaccountFromDatabase | undefined = await + SubaccountTable.findById( + defaultRecipientSubaccountId, + ); + expect(existingRecipientSubaccount).toBeUndefined(); - const newTransfer: TransferFromDatabase = await expectAndReturnNewTransfer( - { - recipientSubaccountId: defaultRecipientSubaccountId, - senderSubaccountId: defaultSenderSubaccountId, - }); + // Confirm there is no existing transfers + await expectNoExistingTransfers([defaultRecipientSubaccountId, defaultSenderSubaccountId]); - expectTransferMatchesEvent(transferEvent, newTransfer, asset); - const newRecipientSubaccount: SubaccountFromDatabase | undefined = await - SubaccountTable.findById( - defaultRecipientSubaccountId, - ); - expect(newRecipientSubaccount).toBeDefined(); + const producerSendMock: jest.SpyInstance = jest.spyOn(producer, 'send'); + await onMessage(kafkaMessage); - await expectTransfersSubaccountKafkaMessage( - producerSendMock, - transferEvent, - newTransfer, - asset, - ); - expectTimingStats(); - }); + const newTransfer: TransferFromDatabase = await expectAndReturnNewTransfer( + { + recipientSubaccountId: defaultRecipientSubaccountId, + senderSubaccountId: defaultSenderSubaccountId, + }); + + expectTransferMatchesEvent(transferEvent, newTransfer, asset); + const newRecipientSubaccount: SubaccountFromDatabase | undefined = await + SubaccountTable.findById( + defaultRecipientSubaccountId, + ); + expect(newRecipientSubaccount).toBeDefined(); + + await expectTransfersSubaccountKafkaMessage( + producerSendMock, + transferEvent, + newTransfer, + asset, + ); + if (!useSqlFunction) { + expectTimingStats(); + } + }); }); function createKafkaMessageFromTransferEvent({ diff --git a/indexer/services/ender/__tests__/handlers/update-clob-pair-handler.test.ts b/indexer/services/ender/__tests__/handlers/update-clob-pair-handler.test.ts index f0041ac215..23b0fbd255 100644 --- a/indexer/services/ender/__tests__/handlers/update-clob-pair-handler.test.ts +++ b/indexer/services/ender/__tests__/handlers/update-clob-pair-handler.test.ts @@ -33,6 +33,7 @@ import { createKafkaMessage, producer } from '@dydxprotocol-indexer/kafka'; import { KafkaMessage } from 'kafkajs'; import { onMessage } from '../../src/lib/on-message'; import { createPostgresFunctions } from '../../src/helpers/postgres/postgres-functions'; +import config from '../../src/config'; describe('update-clob-pair-handler', () => { beforeAll(async () => { @@ -91,35 +92,54 @@ describe('update-clob-pair-handler', () => { }); }); - it('updates an existing perpetual market', async () => { - const transactionIndex: number = 0; - const kafkaMessage: KafkaMessage = createKafkaMessageFromUpdateClobPairEvent({ - updatePerpetualEvent: defaultUpdateClobPairEvent, - transactionIndex, - height: defaultHeight, - time: defaultTime, - txHash: defaultTxHash, - }); - const producerSendMock: jest.SpyInstance = jest.spyOn(producer, 'send'); - await onMessage(kafkaMessage); + it.each([ + [ + 'via knex', + false, + ], + [ + 'via SQL function', + true, + ], + ])( + 'updates an existing perpetual market (%s)', + async ( + _name: string, + useSqlFunction: boolean, + ) => { + config.USE_UPDATE_CLOB_PAIR_HANDLER_SQL_FUNCTION = useSqlFunction; + const transactionIndex: number = 0; + const kafkaMessage: KafkaMessage = createKafkaMessageFromUpdateClobPairEvent({ + updatePerpetualEvent: defaultUpdateClobPairEvent, + transactionIndex, + height: defaultHeight, + time: defaultTime, + txHash: defaultTxHash, + }); + const producerSendMock: jest.SpyInstance = jest.spyOn(producer, 'send'); + await onMessage(kafkaMessage); - const perpetualMarketId: string = perpetualMarketRefresher.getPerpetualMarketFromClobPairId( - defaultUpdateClobPairEvent.clobPairId.toString(), - )!.id; - const perpetualMarket: - PerpetualMarketFromDatabase | undefined = await PerpetualMarketTable.findById( - perpetualMarketId, - ); - expect(perpetualMarket).toEqual(expect.objectContaining({ - clobPairId: defaultUpdateClobPairEvent.clobPairId.toString(), - status: protocolTranslations.clobStatusToMarketStatus(defaultUpdateClobPairEvent.status), - quantumConversionExponent: defaultUpdateClobPairEvent.quantumConversionExponent, - subticksPerTick: defaultUpdateClobPairEvent.subticksPerTick, - stepBaseQuantums: defaultUpdateClobPairEvent.stepBaseQuantums.toNumber(), - })); - expectTimingStats(); - expectPerpetualMarketKafkaMessage(producerSendMock, [perpetualMarket!]); - }); + const perpetualMarketId: string = perpetualMarketRefresher.getPerpetualMarketFromClobPairId( + defaultUpdateClobPairEvent.clobPairId.toString(), + )!.id; + const perpetualMarket: + PerpetualMarketFromDatabase | undefined = await PerpetualMarketTable.findById( + perpetualMarketId, + ); + expect(perpetualMarket).toEqual(expect.objectContaining({ + clobPairId: defaultUpdateClobPairEvent.clobPairId.toString(), + status: protocolTranslations.clobStatusToMarketStatus(defaultUpdateClobPairEvent.status), + quantumConversionExponent: defaultUpdateClobPairEvent.quantumConversionExponent, + subticksPerTick: defaultUpdateClobPairEvent.subticksPerTick, + stepBaseQuantums: defaultUpdateClobPairEvent.stepBaseQuantums.toNumber(), + })); + expect(perpetualMarket).toEqual( + perpetualMarketRefresher.getPerpetualMarketFromId(perpetualMarketId)); + if (!useSqlFunction) { + expectTimingStats(); + } + expectPerpetualMarketKafkaMessage(producerSendMock, [perpetualMarket!]); + }); }); function expectTimingStats() { diff --git a/indexer/services/ender/__tests__/handlers/update-perpetual-handler.test.ts b/indexer/services/ender/__tests__/handlers/update-perpetual-handler.test.ts index af6f99c904..dcc3103023 100644 --- a/indexer/services/ender/__tests__/handlers/update-perpetual-handler.test.ts +++ b/indexer/services/ender/__tests__/handlers/update-perpetual-handler.test.ts @@ -32,6 +32,7 @@ import { createKafkaMessage, producer } from '@dydxprotocol-indexer/kafka'; import { KafkaMessage } from 'kafkajs'; import { onMessage } from '../../src/lib/on-message'; import { createPostgresFunctions } from '../../src/helpers/postgres/postgres-functions'; +import config from '../../src/config'; describe('update-perpetual-handler', () => { beforeAll(async () => { @@ -90,32 +91,52 @@ describe('update-perpetual-handler', () => { }); }); - it('updates an existing perpetual market', async () => { - const transactionIndex: number = 0; - const kafkaMessage: KafkaMessage = createKafkaMessageFromUpdatePerpetualEvent({ - updatePerpetualEvent: defaultUpdatePerpetualEvent, - transactionIndex, - height: defaultHeight, - time: defaultTime, - txHash: defaultTxHash, - }); - const producerSendMock: jest.SpyInstance = jest.spyOn(producer, 'send'); - await onMessage(kafkaMessage); + it.each([ + [ + 'via knex', + false, + ], + [ + 'via SQL function', + true, + ], + ])( + 'updates an existing perpetual market (%s)', + async ( + _name: string, + useSqlFunction: boolean, + ) => { + config.USE_UPDATE_PERPETUAL_HANDLER_SQL_FUNCTION = useSqlFunction; + const transactionIndex: number = 0; + const kafkaMessage: KafkaMessage = createKafkaMessageFromUpdatePerpetualEvent({ + updatePerpetualEvent: defaultUpdatePerpetualEvent, + transactionIndex, + height: defaultHeight, + time: defaultTime, + txHash: defaultTxHash, + }); + const producerSendMock: jest.SpyInstance = jest.spyOn(producer, 'send'); + await onMessage(kafkaMessage); - const perpetualMarket: - PerpetualMarketFromDatabase | undefined = await PerpetualMarketTable.findById( - defaultUpdatePerpetualEvent.id.toString(), - ); - expect(perpetualMarket).toEqual(expect.objectContaining({ - id: defaultUpdatePerpetualEvent.id.toString(), - ticker: defaultUpdatePerpetualEvent.ticker, - marketId: defaultUpdatePerpetualEvent.marketId, - atomicResolution: defaultUpdatePerpetualEvent.atomicResolution, - liquidityTierId: defaultUpdatePerpetualEvent.liquidityTier, - })); - expectTimingStats(); - expectPerpetualMarketKafkaMessage(producerSendMock, [perpetualMarket!]); - }); + const perpetualMarket: + PerpetualMarketFromDatabase | undefined = await PerpetualMarketTable.findById( + defaultUpdatePerpetualEvent.id.toString(), + ); + expect(perpetualMarket).toEqual(expect.objectContaining({ + id: defaultUpdatePerpetualEvent.id.toString(), + ticker: defaultUpdatePerpetualEvent.ticker, + marketId: defaultUpdatePerpetualEvent.marketId, + atomicResolution: defaultUpdatePerpetualEvent.atomicResolution, + liquidityTierId: defaultUpdatePerpetualEvent.liquidityTier, + })); + expect(perpetualMarket).toEqual( + perpetualMarketRefresher.getPerpetualMarketFromId( + defaultUpdatePerpetualEvent.id.toString())); + if (!useSqlFunction) { + expectTimingStats(); + } + expectPerpetualMarketKafkaMessage(producerSendMock, [perpetualMarket!]); + }); }); function expectTimingStats() { diff --git a/indexer/services/ender/__tests__/helpers/redis-helpers.ts b/indexer/services/ender/__tests__/helpers/redis-helpers.ts index 7002d8b79c..3af8f0c1f0 100644 --- a/indexer/services/ender/__tests__/helpers/redis-helpers.ts +++ b/indexer/services/ender/__tests__/helpers/redis-helpers.ts @@ -1,4 +1,7 @@ -import { NextFundingCache } from '@dydxprotocol-indexer/redis'; +import { + NextFundingCache, + StateFilledQuantumsCache, +} from '@dydxprotocol-indexer/redis'; import Big from 'big.js'; import { redisClient } from '../../src/helpers/redis/redis-controller'; @@ -13,3 +16,16 @@ export async function expectNextFundingRate( ); expect(rates[ticker]).toEqual(rate); } + +export async function expectStateFilledQuantums( + orderUuid: string, + quantums: string, +): Promise { + const stateFilledQuantums: string | undefined = await StateFilledQuantumsCache + .getStateFilledQuantums( + orderUuid, + redisClient, + ); + expect(stateFilledQuantums).toBeDefined(); + expect(stateFilledQuantums).toEqual(quantums); +} diff --git a/indexer/services/ender/__tests__/lib/sync-handlers.test.ts b/indexer/services/ender/__tests__/lib/sync-handlers.test.ts index 4967a7b28d..f68f6a0f97 100644 --- a/indexer/services/ender/__tests__/lib/sync-handlers.test.ts +++ b/indexer/services/ender/__tests__/lib/sync-handlers.test.ts @@ -30,6 +30,7 @@ import { Transaction, } from '@dydxprotocol-indexer/postgres'; import { KafkaPublisher } from '../../src/lib/kafka-publisher'; +import { createPostgresFunctions } from '../../src/helpers/postgres/postgres-functions'; const defaultMarketEventBinary: Uint8Array = Uint8Array.from(MarketEventV1.encode( defaultMarketCreate, @@ -63,6 +64,10 @@ describe('syncHandler', () => { ); describe('addHandler/process', () => { + beforeAll(async () => { + await createPostgresFunctions(); + }); + beforeEach(async () => { await BlockTable.create({ blockHeight: '1', @@ -86,6 +91,10 @@ describe('syncHandler', () => { await dbHelpers.clearData(); }); + afterAll(async () => { + await dbHelpers.teardown(); + }); + it('successfully adds handler', async () => { const synchHandlers: SyncHandlers = new SyncHandlers(); const txId: number = await Transaction.start(); diff --git a/indexer/services/ender/__tests__/scripts/scripts.test.ts b/indexer/services/ender/__tests__/scripts/scripts.test.ts index 17fbe45943..7be6875a34 100644 --- a/indexer/services/ender/__tests__/scripts/scripts.test.ts +++ b/indexer/services/ender/__tests__/scripts/scripts.test.ts @@ -7,10 +7,11 @@ import { IndexerTendermintEvent_BlockEvent, AssetCreateEventV1, SubaccountUpdateEventV1, - MarketEventV1, + MarketEventV1, IndexerOrder_ConditionType, } from '@dydxprotocol-indexer/v4-protos'; import { BUFFER_ENCODING_UTF_8, + CLOB_STATUS_TO_MARKET_STATUS, dbHelpers, AssetPositionTable, PerpetualPositionTable, @@ -19,6 +20,8 @@ import { PositionSide, TendermintEventTable, FillTable, + FundingIndexUpdatesTable, + OraclePriceTable, OrderTable, protocolTranslations, SubaccountTable, @@ -27,6 +30,7 @@ import { uuid, TransactionTable, TransactionFromDatabase, + TransferTable, BlockTable, TendermintEventFromDatabase, } from '@dydxprotocol-indexer/postgres'; @@ -171,6 +175,16 @@ describe('SQL Function Tests', () => { expect(result).toEqual(protocolTranslations.protocolOrderTIFToTIF(value)); }); + it.each([ + ['LIMIT', IndexerOrder_ConditionType.UNRECOGNIZED], + ['LIMIT', IndexerOrder_ConditionType.CONDITION_TYPE_UNSPECIFIED], + ['TAKE_PROFIT', IndexerOrder_ConditionType.CONDITION_TYPE_TAKE_PROFIT], + ['STOP_LIMIT', IndexerOrder_ConditionType.CONDITION_TYPE_STOP_LOSS], + ])('dydx_protocol_condition_type_to_order_type (%s)', async (_name: string, value: IndexerOrder_ConditionType) => { + const result = await getSingleRawQueryResultRow(`SELECT dydx_protocol_condition_type_to_order_type('${value}') AS result`); + expect(result).toEqual(protocolTranslations.protocolConditionTypeToOrderType(value)); + }); + it.each([ '0', '1', '-1', '10000000000000000000000000000', '-20000000000000000000000000000', ])('dydx_from_serializable_int (%s)', async (value: string) => { @@ -253,6 +267,16 @@ describe('SQL Function Tests', () => { expect(result).toEqual(FillTable.uuid(eventId, liquidity)); }); + it.each([ + [1, 2, 3, 4], + [5, 6, 7, 8], + ])('dydx_uuid_from_funding_index_update_parts (%s, %s, %s, %s)', async (blockHeight: number, transactionIndex: number, eventIndex: number, perpetualId: number) => { + const eventId = TendermintEventTable.createEventId(`${blockHeight}`, transactionIndex, eventIndex); + const result = await getSingleRawQueryResultRow( + `SELECT dydx_uuid_from_funding_index_update_parts('${blockHeight}', '\\x${eventId.toString('hex')}'::bytea, '${perpetualId}') AS result`); + expect(result).toEqual(FundingIndexUpdatesTable.uuid(`${blockHeight}`, eventId, `${perpetualId}`)); + }); + it.each([ { subaccountId: { @@ -290,7 +314,7 @@ describe('SQL Function Tests', () => { 5, 6, ], - ])('dydx_uuid_from_perpetual_position_parts (%s)', async (subaccountId: IndexerSubaccountId, blockHeight: number, transactionIndex: number, eventIndex: number) => { + ])('dydx_uuid_from_perpetual_position_parts (%s, %s, %s, %s)', async (subaccountId: IndexerSubaccountId, blockHeight: number, transactionIndex: number, eventIndex: number) => { const subaccountUuid = SubaccountTable.subaccountIdToUuid(subaccountId); const eventId = TendermintEventTable.createEventId(`${blockHeight}`, transactionIndex, eventIndex); const result = await getSingleRawQueryResultRow( @@ -311,6 +335,66 @@ describe('SQL Function Tests', () => { expect(result).toEqual(SubaccountTable.subaccountIdToUuid(subaccountId)); }); + it.each([ + [ + { + owner: testConstants.defaultSubaccount.address, + number: testConstants.defaultSubaccount.subaccountNumber, + }, + { + owner: testConstants.defaultSubaccount2.address, + number: testConstants.defaultSubaccount2.subaccountNumber, + }, + undefined, + undefined, + ], + [ + { + owner: testConstants.defaultSubaccount2.address, + number: testConstants.defaultSubaccount2.subaccountNumber, + }, + undefined, + 'senderWallet', + undefined, + ], + [ + { + owner: testConstants.defaultSubaccount.address, + number: testConstants.defaultSubaccount.subaccountNumber, + }, + undefined, + undefined, + 'recipientWallet', + ], + [ + undefined, + undefined, + 'senderWallet', + 'recipientWallet', + ], + ])('dydx_uuid_from_transfer_parts (%s, %s, %s, %s)', async ( + senderSubaccountId: IndexerSubaccountId | undefined, + recipientSubaccountId: IndexerSubaccountId | undefined, + senderWalletAddress: string | undefined, + recipientWalletAddress: string | undefined) => { + const eventId: Buffer = TendermintEventTable.createEventId('1', 2, 3); + const assetId: string = '0'; + const senderSubaccountUuid: string | undefined = senderSubaccountId + ? SubaccountTable.subaccountIdToUuid(senderSubaccountId) : undefined; + const recipientSubaccountUuid: string | undefined = recipientSubaccountId + ? SubaccountTable.subaccountIdToUuid(recipientSubaccountId) : undefined; + const result = await getSingleRawQueryResultRow( + `SELECT dydx_uuid_from_transfer_parts('\\x${eventId.toString('hex')}'::bytea, '${assetId}', ${senderSubaccountUuid ? `'${senderSubaccountUuid}'` : 'NULL'}, ${recipientSubaccountUuid ? `'${recipientSubaccountUuid}'` : 'NULL'}, ${senderWalletAddress ? `'${senderWalletAddress}'` : 'NULL'}, ${recipientWalletAddress ? `'${recipientWalletAddress}'` : 'NULL'}) AS result`); + expect(result).toEqual(TransferTable.uuid( + eventId, + assetId, + senderSubaccountUuid, + recipientSubaccountUuid, + senderWalletAddress, + recipientWalletAddress, + )); + }); + it.each([ { event: { transactionIndex: 123 }, @@ -350,18 +434,40 @@ describe('SQL Function Tests', () => { } }); - it('dydx_uuid_from_transaction_parts (%s)', async () => { - const transactionParts = { - blockHeight: '123456', - transactionIndex: 123, - }; + it.each([ + [ + '123456', + 123, + ], + ])('dydx_uuid_from_transaction_parts (%s, %s)', async (blockHeight: string, transactionIndex: number) => { + const result = await getSingleRawQueryResultRow( + `SELECT dydx_uuid_from_transaction_parts('${blockHeight}', '${transactionIndex}') AS result`); + expect(result).toEqual( + TransactionTable.uuid(blockHeight, transactionIndex), + ); + }); + + it.each([ + [ + 123, + '123456', + ], + ])('dydx_uuid_from_oracle_price_parts (%s, %s)', async (marketId: number, blockHeight: string) => { const result = await getSingleRawQueryResultRow( - `SELECT dydx_uuid_from_transaction_parts('${transactionParts.blockHeight}', '${transactionParts.transactionIndex}') AS result`); + `SELECT dydx_uuid_from_oracle_price_parts('${marketId}', '${blockHeight}') AS result`); expect(result).toEqual( - TransactionTable.uuid(transactionParts.blockHeight, transactionParts.transactionIndex), + OraclePriceTable.uuid(marketId, blockHeight), ); }); + it('dydx_clob_pair_status_to_market_status should convert all statuses', async () => { + for (const [key, value] of Object.entries(CLOB_STATUS_TO_MARKET_STATUS)) { + const result = await getSingleRawQueryResultRow( + `SELECT dydx_clob_pair_status_to_market_status('${key}') AS result`); + expect(result).toEqual(value); + } + }); + it('dydx_create_transaction.sql should insert a transaction and return correct jsonb', async () => { const transactionHash: string = 'txnhash'; const blockHeight: string = '1'; @@ -478,7 +584,7 @@ describe('SQL Function Tests', () => { }); async function getSingleRawQueryResultRow(query: string): Promise { - const queryResult = await storeHelpers.rawQuery(query, {}).catch((error) => { + const queryResult = await storeHelpers.rawQuery(query, {}).catch((error: Error) => { throw error; }); return queryResult.rows[0].result; diff --git a/indexer/services/ender/src/config.ts b/indexer/services/ender/src/config.ts index 3f6909c192..e129dace57 100644 --- a/indexer/services/ender/src/config.ts +++ b/indexer/services/ender/src/config.ts @@ -23,12 +23,48 @@ export const configSchema = { SEND_WEBSOCKET_MESSAGES: parseBoolean({ default: true, }), + USE_ASSET_CREATE_HANDLER_SQL_FUNCTION: parseBoolean({ + default: true, + }), + USE_FUNDING_HANDLER_SQL_FUNCTION: parseBoolean({ + default: true, + }), + USE_LIQUIDATION_HANDLER_SQL_FUNCTION: parseBoolean({ + default: true, + }), + USE_LIQUIDITY_TIER_HANDLER_SQL_FUNCTION: parseBoolean({ + default: true, + }), + USE_MARKET_CREATE_HANDLER_SQL_FUNCTION: parseBoolean({ + default: true, + }), + USE_MARKET_MODIFY_HANDLER_SQL_FUNCTION: parseBoolean({ + default: true, + }), + USE_MARKET_PRICE_UPDATE_HANDLER_SQL_FUNCTION: parseBoolean({ + default: true, + }), USE_ORDER_HANDLER_SQL_FUNCTION: parseBoolean({ default: true, }), + USE_PERPETUAL_MARKET_HANDLER_SQL_FUNCTION: parseBoolean({ + default: true, + }), + USE_STATEFUL_ORDER_HANDLER_SQL_FUNCTION: parseBoolean({ + default: true, + }), USE_SUBACCOUNT_UPDATE_SQL_FUNCTION: parseBoolean({ default: true, }), + USE_TRANSFER_HANDLER_SQL_FUNCTION: parseBoolean({ + default: true, + }), + USE_UPDATE_CLOB_PAIR_HANDLER_SQL_FUNCTION: parseBoolean({ + default: true, + }), + USE_UPDATE_PERPETUAL_HANDLER_SQL_FUNCTION: parseBoolean({ + default: true, + }), USE_SQL_FUNCTION_TO_CREATE_INITIAL_ROWS: parseBoolean({ default: true, }), diff --git a/indexer/services/ender/src/handlers/abstract-stateful-order-handler.ts b/indexer/services/ender/src/handlers/abstract-stateful-order-handler.ts index b535042a45..2f1ea7dcfd 100644 --- a/indexer/services/ender/src/handlers/abstract-stateful-order-handler.ts +++ b/indexer/services/ender/src/handlers/abstract-stateful-order-handler.ts @@ -1,11 +1,29 @@ import { logger } from '@dydxprotocol-indexer/base'; import { - OrderFromDatabase, OrderStatus, OrderTable, OrderUpdateObject, OrderCreateObject, SubaccountTable, - OrderSide, OrderType, protocolTranslations, + OrderFromDatabase, + OrderStatus, + OrderTable, + OrderUpdateObject, + OrderCreateObject, + SubaccountTable, + OrderSide, + OrderType, + protocolTranslations, PerpetualMarketFromDatabase, + storeHelpers, + OrderModel, + PerpetualMarketModel, + SubaccountFromDatabase, } from '@dydxprotocol-indexer/postgres'; -import { IndexerOrderId, IndexerOrder, IndexerOrder_Side } from '@dydxprotocol-indexer/v4-protos'; +import SubaccountModel from '@dydxprotocol-indexer/postgres/build/src/models/subaccount-model'; +import { + IndexerOrderId, + IndexerOrder, + IndexerOrder_Side, + StatefulOrderEventV1, +} from '@dydxprotocol-indexer/v4-protos'; import { DateTime } from 'luxon'; +import * as pg from 'pg'; import { STATEFUL_ORDER_ORDER_FILL_EVENT_TYPE } from '../constants'; import { getPrice, getSize } from '../lib/helper'; @@ -21,6 +39,37 @@ export abstract class AbstractStatefulOrderHandler extends Handler { ]; } + protected async handleEventViaSqlFunction(): + Promise<[OrderFromDatabase, + PerpetualMarketFromDatabase, + SubaccountFromDatabase | undefined]> { + const eventDataBinary: Uint8Array = this.indexerTendermintEvent.dataBytes; + const result: pg.QueryResult = await storeHelpers.rawQuery( + `SELECT dydx_stateful_order_handler( + ${this.block.height}, + '${this.block.time?.toISOString()}', + '${JSON.stringify(StatefulOrderEventV1.decode(eventDataBinary))}' + ) AS result;`, + { txId: this.txId }, + ).catch((error: Error) => { + logger.error({ + at: 'AbstractStatefulOrderHandler#handleEventViaSqlFunction', + message: 'Failed to handle StatefulOrderEventV1', + error, + }); + throw error; + }); + + return [ + OrderModel.fromJson(result.rows[0].result.order) as OrderFromDatabase, + PerpetualMarketModel.fromJson( + result.rows[0].result.perpetual_market) as PerpetualMarketFromDatabase, + result.rows[0].result.subaccount + ? SubaccountModel.fromJson(result.rows[0].result.subaccount) as SubaccountFromDatabase + : undefined, + ]; + } + protected async updateOrderStatus( orderIdProto: IndexerOrderId, status: OrderStatus, diff --git a/indexer/services/ender/src/handlers/asset-handler.ts b/indexer/services/ender/src/handlers/asset-handler.ts index 39bcb6e478..1287952cc7 100644 --- a/indexer/services/ender/src/handlers/asset-handler.ts +++ b/indexer/services/ender/src/handlers/asset-handler.ts @@ -1,8 +1,16 @@ +import { logger } from '@dydxprotocol-indexer/base'; import { - AssetFromDatabase, AssetTable, assetRefresher, marketRefresher, + AssetFromDatabase, + AssetModel, + AssetTable, + assetRefresher, + marketRefresher, + storeHelpers, } from '@dydxprotocol-indexer/postgres'; import { AssetCreateEventV1 } from '@dydxprotocol-indexer/v4-protos'; +import * as pg from 'pg'; +import config from '../config'; import { ConsolidatedKafkaEvent } from '../lib/types'; import { Handler } from './handler'; @@ -15,6 +23,36 @@ export class AssetCreationHandler extends Handler { // eslint-disable-next-line @typescript-eslint/require-await public async internalHandle(): Promise { + if (config.USE_ASSET_CREATE_HANDLER_SQL_FUNCTION) { + return this.handleViaSqlFunction(); + } + return this.handleViaKnex(); + } + + private async handleViaSqlFunction(): Promise { + const eventDataBinary: Uint8Array = this.indexerTendermintEvent.dataBytes; + const result: pg.QueryResult = await storeHelpers.rawQuery( + `SELECT dydx_asset_create_handler( + '${JSON.stringify(AssetCreateEventV1.decode(eventDataBinary))}' + ) AS result;`, + { txId: this.txId }, + ).catch((error: Error) => { + logger.error({ + at: 'AssetCreationHandler#handleViaSqlFunction', + message: 'Failed to handle AssetCreateEventV1', + error, + }); + + throw error; + }); + + const asset: AssetFromDatabase = AssetModel.fromJson( + result.rows[0].result.asset) as AssetFromDatabase; + assetRefresher.addAsset(asset); + return []; + } + + private async handleViaKnex(): Promise { await this.runFuncWithTimingStatAndErrorLogging( this.createAsset(), this.generateTimingStatsOptions('create_asset'), diff --git a/indexer/services/ender/src/handlers/funding-handler.ts b/indexer/services/ender/src/handlers/funding-handler.ts index 676cfd9976..28baf619ec 100644 --- a/indexer/services/ender/src/handlers/funding-handler.ts +++ b/indexer/services/ender/src/handlers/funding-handler.ts @@ -7,14 +7,22 @@ import { FundingIndexUpdatesCreateObject, FundingIndexUpdatesFromDatabase, protocolTranslations, + storeHelpers, + PerpetualMarketModel, } from '@dydxprotocol-indexer/postgres'; import { NextFundingCache } from '@dydxprotocol-indexer/redis'; import { bytesToBigInt } from '@dydxprotocol-indexer/v4-proto-parser'; -import { FundingEventV1_Type, FundingUpdateV1 } from '@dydxprotocol-indexer/v4-protos'; +import { + FundingEventV1, + FundingEventV1_Type, + FundingUpdateV1, +} from '@dydxprotocol-indexer/v4-protos'; import Big from 'big.js'; import _ from 'lodash'; +import * as pg from 'pg'; import { getPrice } from '../caches/price-cache'; +import config from '../config'; import { redisClient } from '../helpers/redis/redis-controller'; import { indexerTendermintEventToTransactionIndex } from '../lib/helper'; import { ConsolidatedKafkaEvent, FundingEventMessage } from '../lib/types'; @@ -48,6 +56,95 @@ export class FundingHandler extends Handler { // eslint-disable-next-line @typescript-eslint/require-await public async internalHandle(): Promise { + if (config.USE_FUNDING_HANDLER_SQL_FUNCTION) { + return this.handleViaSqlFunction(); + } + return this.handleViaKnex(); + } + + private async handleViaSqlFunction(): Promise { + const eventDataBinary: Uint8Array = this.indexerTendermintEvent.dataBytes; + const transactionIndex: number = indexerTendermintEventToTransactionIndex( + this.indexerTendermintEvent, + ); + const result: pg.QueryResult = await storeHelpers.rawQuery( + `SELECT dydx_funding_handler( + ${this.block.height}, + '${this.block.time?.toISOString()}', + '${JSON.stringify(FundingEventV1.decode(eventDataBinary))}', + ${this.indexerTendermintEvent.eventIndex}, + ${transactionIndex} + ) AS result;`, + { txId: this.txId }, + ).catch((error: Error) => { + logger.error({ + at: 'FundingHandler#handleViaSqlFunction', + message: 'Failed to handle FundingEventV1', + error, + }); + + throw error; + }); + + const perpetualMarkets: + Map = new Map(); + for (const [key, perpetualMarket] of Object.entries(result.rows[0].result.perpetual_markets)) { + perpetualMarkets.set( + key, + PerpetualMarketModel.fromJson(perpetualMarket as object) as PerpetualMarketFromDatabase, + ); + } + + const promises: Promise[] = new Array>(this.event.updates.length); + + for (let i: number = 0; i < this.event.updates.length; i++) { + const update: FundingUpdateV1 = this.event.updates[i]; + if (result.rows[0].result.errors[i] != null) { + logger.error({ + at: 'FundingHandler#handleFundingSample', + message: result.rows[0].result.errors[i], + update, + }); + continue; + } + + const perpetualMarket: + PerpetualMarketFromDatabase | undefined = perpetualMarkets.get(update.perpetualId.toString()); + if (perpetualMarket === undefined) { + logger.error({ + at: 'FundingHandler#handleFundingSample', + message: 'Received FundingUpdate with unknown perpetualId.', + update, + }); + continue; + } + + switch (this.event.type) { + case FundingEventV1_Type.TYPE_PREMIUM_SAMPLE: + promises[i] = NextFundingCache.addFundingSample( + perpetualMarket.ticker, + new Big(protocolTranslations.funding8HourValuePpmTo1HourRate(update.fundingValuePpm)), + redisClient, + ); + break; + case FundingEventV1_Type.TYPE_FUNDING_RATE_AND_INDEX: + // clear the cache for the predicted next funding rate + promises[i] = NextFundingCache.clearFundingSamples(perpetualMarket.ticker, redisClient); + break; + default: + logger.error({ + at: 'FundingHandler#handle', + message: 'Received unknown FundingEvent type.', + event: this.event, + }); + } + } + + await Promise.all(promises); + return []; + } + + private async handleViaKnex(): Promise { logger.info({ at: 'FundingHandler#handle', message: 'Received FundingEvent.', @@ -77,7 +174,7 @@ export class FundingHandler extends Handler { return []; } - public async handleFundingSample(samples: FundingUpdateV1[]): Promise { + private async handleFundingSample(samples: FundingUpdateV1[]): Promise { await Promise.all( _.map(samples, (sample: FundingUpdateV1) => { const perpetualMarket: @@ -101,7 +198,7 @@ export class FundingHandler extends Handler { ); } - public async handleFundingRate(updates: FundingUpdateV1[]): Promise { + private async handleFundingRate(updates: FundingUpdateV1[]): Promise { // clear the cache for the predicted next funding rate await Promise.all( _.map(updates, (update: FundingUpdateV1) => { diff --git a/indexer/services/ender/src/handlers/liquidity-tier-handler.ts b/indexer/services/ender/src/handlers/liquidity-tier-handler.ts index 4a3743d319..708440023b 100644 --- a/indexer/services/ender/src/handlers/liquidity-tier-handler.ts +++ b/indexer/services/ender/src/handlers/liquidity-tier-handler.ts @@ -1,15 +1,20 @@ +import { logger } from '@dydxprotocol-indexer/base'; import { LiquidityTiersCreateObject, LiquidityTiersFromDatabase, + LiquidityTiersModel, LiquidityTiersTable, PerpetualMarketFromDatabase, liquidityTierRefresher, perpetualMarketRefresher, protocolTranslations, + storeHelpers, } from '@dydxprotocol-indexer/postgres'; import { LiquidityTierUpsertEventV1 } from '@dydxprotocol-indexer/v4-protos'; import _ from 'lodash'; +import * as pg from 'pg'; +import config from '../config'; import { QUOTE_CURRENCY_ATOMIC_RESOLUTION } from '../constants'; import { generatePerpetualMarketMessage } from '../helpers/kafka-helper'; import { ConsolidatedKafkaEvent } from '../lib/types'; @@ -24,6 +29,36 @@ export class LiquidityTierHandler extends Handler { // eslint-disable-next-line @typescript-eslint/require-await public async internalHandle(): Promise { + if (config.USE_LIQUIDITY_TIER_HANDLER_SQL_FUNCTION) { + return this.handleViaSqlFunction(); + } + return this.handleViaKnex(); + } + + private async handleViaSqlFunction(): Promise { + const eventDataBinary: Uint8Array = this.indexerTendermintEvent.dataBytes; + const result: pg.QueryResult = await storeHelpers.rawQuery( + `SELECT dydx_liquidity_tier_handler( + '${JSON.stringify(LiquidityTierUpsertEventV1.decode(eventDataBinary))}' + ) AS result;`, + { txId: this.txId }, + ).catch((error: Error) => { + logger.error({ + at: 'LiquidityTierHandler#handleViaSqlFunction', + message: 'Failed to handle LiquidityTierUpsertEventV1', + error, + }); + + throw error; + }); + + const liquidityTier: LiquidityTiersFromDatabase = LiquidityTiersModel.fromJson( + result.rows[0].result.liquidity_tier) as LiquidityTiersFromDatabase; + liquidityTierRefresher.upsertLiquidityTier(liquidityTier); + return this.generateWebsocketEventsForLiquidityTier(liquidityTier); + } + + private async handleViaKnex(): Promise { const liquidityTier: LiquidityTiersFromDatabase = await this.runFuncWithTimingStatAndErrorLogging( this.upsertLiquidityTier(), diff --git a/indexer/services/ender/src/handlers/markets/market-create-handler.ts b/indexer/services/ender/src/handlers/markets/market-create-handler.ts index 4b33da3223..8c56eb1880 100644 --- a/indexer/services/ender/src/handlers/markets/market-create-handler.ts +++ b/indexer/services/ender/src/handlers/markets/market-create-handler.ts @@ -1,7 +1,15 @@ import { logger } from '@dydxprotocol-indexer/base'; -import { MarketFromDatabase, MarketTable, marketRefresher } from '@dydxprotocol-indexer/postgres'; +import { + MarketFromDatabase, + MarketModel, + MarketTable, + marketRefresher, + storeHelpers, +} from '@dydxprotocol-indexer/postgres'; import { MarketEventV1 } from '@dydxprotocol-indexer/v4-protos'; +import * as pg from 'pg'; +import config from '../../config'; import { ConsolidatedKafkaEvent, MarketCreateEventMessage } from '../../lib/types'; import { Handler } from '../handler'; @@ -13,13 +21,20 @@ export class MarketCreateHandler extends Handler { return [`${this.eventType}_${this.event.marketId}`]; } - // eslint-disable-next-line @typescript-eslint/require-await public async internalHandle(): Promise { logger.info({ at: 'MarketCreateHandler#handle', message: 'Received MarketEvent with MarketCreate.', event: this.event, }); + if (config.USE_MARKET_CREATE_HANDLER_SQL_FUNCTION) { + return this.handleViaSqlFunction(); + } + return this.handleViaKnexQueries(); + } + + // eslint-disable-next-line @typescript-eslint/require-await + public async handleViaKnexQueries(): Promise { // MarketHandler already makes sure the event has 'marketCreate' as the oneofKind. const marketCreate: MarketCreateEventMessage = this.event as MarketCreateEventMessage; @@ -39,6 +54,37 @@ export class MarketCreateHandler extends Handler { return []; } + private async handleViaSqlFunction(): Promise { + const eventDataBinary: Uint8Array = this.indexerTendermintEvent.dataBytes; + const result: pg.QueryResult = await storeHelpers.rawQuery( + `SELECT dydx_market_create_handler( + '${JSON.stringify(MarketEventV1.decode(eventDataBinary))}' + ) AS result;`, + { txId: this.txId }, + ).catch((error: Error) => { + logger.error({ + at: 'MarketCreateHandler#handleViaSqlFunction', + message: 'Failed to handle MarketEventV1', + error, + }); + + if (error.message.includes('Market in MarketCreate already exists')) { + const marketCreate: MarketCreateEventMessage = this.event as MarketCreateEventMessage; + this.logAndThrowParseMessageError( + 'Market in MarketCreate already exists', + { marketCreate }, + ); + } + + throw error; + }); + + const market: MarketFromDatabase = MarketModel.fromJson( + result.rows[0].result.market) as MarketFromDatabase; + marketRefresher.updateMarket(market); + return []; + } + private async createMarket(marketCreate: MarketCreateEventMessage): Promise { await MarketTable.create({ id: marketCreate.marketId, diff --git a/indexer/services/ender/src/handlers/markets/market-modify-handler.ts b/indexer/services/ender/src/handlers/markets/market-modify-handler.ts index cbc5803f5d..147cd4b651 100644 --- a/indexer/services/ender/src/handlers/markets/market-modify-handler.ts +++ b/indexer/services/ender/src/handlers/markets/market-modify-handler.ts @@ -1,9 +1,11 @@ import { logger } from '@dydxprotocol-indexer/base'; import { - MarketFromDatabase, MarketUpdateObject, MarketTable, marketRefresher, + MarketFromDatabase, MarketUpdateObject, MarketTable, marketRefresher, storeHelpers, MarketModel, } from '@dydxprotocol-indexer/postgres'; import { MarketEventV1 } from '@dydxprotocol-indexer/v4-protos'; +import * as pg from 'pg'; +import config from '../../config'; import { ConsolidatedKafkaEvent, MarketModifyEventMessage } from '../../lib/types'; import { Handler } from '../handler'; @@ -16,6 +18,18 @@ export class MarketModifyHandler extends Handler { } public async internalHandle(): Promise { + logger.info({ + at: 'MarketModifyHandler#handle', + message: 'Received MarketEvent with MarketCreate.', + event: this.event, + }); + if (config.USE_MARKET_MODIFY_HANDLER_SQL_FUNCTION) { + return this.handleViaSqlFunction(); + } + return this.handleViaKnexQueries(); + } + + private async handleViaKnexQueries(): Promise { logger.info({ at: 'MarketModifyHandler#handle', message: 'Received MarketEvent with MarketModify.', @@ -32,6 +46,42 @@ export class MarketModifyHandler extends Handler { return []; } + private async handleViaSqlFunction(): Promise { + const eventDataBinary: Uint8Array = this.indexerTendermintEvent.dataBytes; + const result: pg.QueryResult = await storeHelpers.rawQuery( + `SELECT dydx_market_modify_handler( + '${JSON.stringify(MarketEventV1.decode(eventDataBinary))}' + ) AS result;`, + { txId: this.txId }, + ).catch((error: Error) => { + logger.error({ + at: 'MarketModifyHandler#handleViaSqlFunction', + message: 'Failed to handle MarketEventV1', + error, + }); + + const castedMarketModifyMessage: + MarketModifyEventMessage = this.event as MarketModifyEventMessage; + + if (error.message.includes('Market in MarketModify doesn\'t exist')) { + this.logAndThrowParseMessageError( + 'Market in MarketModify doesn\'t exist', + { castedMarketModifyMessage }, + ); + } + + this.logAndThrowParseMessageError( + 'Failed to update market in markets table', + { castedMarketModifyMessage }, + ); + }); + + const market: MarketFromDatabase = MarketModel.fromJson( + result.rows[0].result.market) as MarketFromDatabase; + marketRefresher.updateMarket(market); + return []; + } + protected async updateMarketFromEvent( castedMarketModifyMessage: MarketModifyEventMessage, ): Promise { diff --git a/indexer/services/ender/src/handlers/markets/market-price-update-handler.ts b/indexer/services/ender/src/handlers/markets/market-price-update-handler.ts index 7a28e2c8a4..4efdbfc6b7 100644 --- a/indexer/services/ender/src/handlers/markets/market-price-update-handler.ts +++ b/indexer/services/ender/src/handlers/markets/market-price-update-handler.ts @@ -5,13 +5,16 @@ import { MarketTable, OraclePriceCreateObject, OraclePriceFromDatabase, + OraclePriceModel, OraclePriceTable, protocolTranslations, - MarketMessageContents, + MarketMessageContents, storeHelpers, MarketModel, marketRefresher, } from '@dydxprotocol-indexer/postgres'; import { MarketEventV1 } from '@dydxprotocol-indexer/v4-protos'; +import * as pg from 'pg'; import { updatePriceCacheWithPrice } from '../../caches/price-cache'; +import config from '../../config'; import { generateOraclePriceContents } from '../../helpers/kafka-helper'; import { ConsolidatedKafkaEvent, @@ -38,6 +41,13 @@ export class MarketPriceUpdateHandler extends Handler { message: 'Received MarketEvent with MarketPriceUpdate.', event: this.event, }); + if (config.USE_MARKET_MODIFY_HANDLER_SQL_FUNCTION) { + return this.handleViaSqlFunction(); + } + return this.handleViaKnexQueries(); + } + + private async handleViaKnexQueries(): Promise { // MarketHandler already makes sure the event has 'priceUpdate' as the oneofKind. const castedMarketPriceUpdateMessage: MarketPriceUpdateEventMessage = this.event as MarketPriceUpdateEventMessage; @@ -54,6 +64,49 @@ export class MarketPriceUpdateHandler extends Handler { ]; } + private async handleViaSqlFunction(): Promise { + const eventDataBinary: Uint8Array = this.indexerTendermintEvent.dataBytes; + const result: pg.QueryResult = await storeHelpers.rawQuery( + `SELECT dydx_market_price_update_handler( + ${this.block.height}, + '${this.block.time?.toISOString()}', + '${JSON.stringify(MarketEventV1.decode(eventDataBinary))}' + ) AS result;`, + { txId: this.txId }, + ).catch((error: Error) => { + logger.error({ + at: 'MarketPriceUpdateHandler#handleViaSqlFunction', + message: 'Failed to handle MarketEventV1', + error, + }); + + if (error.message.includes('MarketPriceUpdateEvent contains a non-existent market id')) { + const castedMarketPriceUpdateMessage: + MarketPriceUpdateEventMessage = this.event as MarketPriceUpdateEventMessage; + this.logAndThrowParseMessageError( + 'MarketPriceUpdateEvent contains a non-existent market id', + { castedMarketPriceUpdateMessage }, + ); + } + + throw error; + }); + + const market: MarketFromDatabase = MarketModel.fromJson( + result.rows[0].result.market) as MarketFromDatabase; + const oraclePrice: OraclePriceFromDatabase = OraclePriceModel.fromJson( + result.rows[0].result.oracle_price) as OraclePriceFromDatabase; + + marketRefresher.updateMarket(market); + updatePriceCacheWithPrice(oraclePrice); + + return [ + this.generateKafkaEvent( + oraclePrice, market.pair, + ), + ]; + } + protected async updateMarketFromEvent( castedMarketPriceUpdateMessage: MarketPriceUpdateEventMessage, humanPrice: string, diff --git a/indexer/services/ender/src/handlers/order-fills/abstract-order-fill-handler.ts b/indexer/services/ender/src/handlers/order-fills/abstract-order-fill-handler.ts index 1d42fe4da0..52814bc5bf 100644 --- a/indexer/services/ender/src/handlers/order-fills/abstract-order-fill-handler.ts +++ b/indexer/services/ender/src/handlers/order-fills/abstract-order-fill-handler.ts @@ -22,11 +22,13 @@ import { SubaccountMessageContents, SubaccountTable, TendermintEventTable, + TimeInForce, TradeMessageContents, UpdatedPerpetualPositionSubaccountKafkaObject, USDC_ASSET_ID, } from '@dydxprotocol-indexer/postgres'; -import { getOrderIdHash } from '@dydxprotocol-indexer/v4-proto-parser'; +import { CanceledOrderStatus } from '@dydxprotocol-indexer/redis'; +import { getOrderIdHash, ORDER_FLAG_LONG_TERM } from '@dydxprotocol-indexer/v4-proto-parser'; import { IndexerOrder, IndexerOrder_Side, @@ -289,11 +291,16 @@ export abstract class AbstractOrderFillHandler extends Handler { return updatedPerpetualPosition; } + /** + * Upsert the an order based on the event processed by the handler + * @param canceledOrderStatus - Status of the order in the CanceledOrderCache, always + * NOT_CANCELED for liquidation orders + */ protected upsertOrderFromEvent( perpetualMarket: PerpetualMarketFromDatabase, order: IndexerOrder, totalFilledFromProto: Long, - isCanceled: boolean, + canceledOrderStatus: CanceledOrderStatus, ): Promise { const size: string = getSize(order, perpetualMarket); const price: string = getPrice(order, perpetualMarket); @@ -301,6 +308,14 @@ export abstract class AbstractOrderFillHandler extends Handler { totalFilledFromProto.toString(10), perpetualMarket.atomicResolution, ); + const timeInForce: TimeInForce = protocolTranslations.protocolOrderTIFToTIF(order.timeInForce); + const status: OrderStatus = this.getOrderStatus( + canceledOrderStatus, + size, + totalFilled, + order.orderId!.orderFlags, + timeInForce, + ); const orderToCreate: OrderCreateObject = { subaccountId: SubaccountTable.subaccountIdToUuid(order.orderId!.subaccountId!), @@ -311,8 +326,8 @@ export abstract class AbstractOrderFillHandler extends Handler { totalFilled, price, type: OrderType.LIMIT, // TODO: Add additional order types once we support - status: this.getOrderStatus(isCanceled, size, totalFilled), - timeInForce: protocolTranslations.protocolOrderTIFToTIF(order.timeInForce), + status, + timeInForce, reduceOnly: order.reduceOnly, orderFlags: order.orderId!.orderFlags.toString(), goodTilBlock: protocolTranslations.getGoodTilBlock(order)?.toString(), @@ -325,16 +340,44 @@ export abstract class AbstractOrderFillHandler extends Handler { return OrderTable.upsert(orderToCreate, { txId: this.txId }); } + /** + * The obvious case is if totalFilled >= size, then the order status should always be `FILLED`. + * The difficult case is if totalFilled < size after a fill, then we need to keep the following + * cases in mind: + * 1. Stateful Orders - All cancelations are on-chain events, so the will be `OPEN`. The + * CanceledOrdersCache does not store any stateful orders and we never send + * BEST_EFFORT_CANCELED notifications for stateful orders. + * 2. Short-term FOK - FOK orders can never be `OPEN`, since they don't rest on the orderbook, so + * totalFilled cannot be < size. By the end of the block, the order will be filled, so we mark + * it as `FILLED`. + * 3. Short-term IOC - Protocol guarantees that an IOC order will only ever be filled in a single + * block, so status should be `CANCELED`. + * 4. Short-term Limit & Post-only - If the order is in the CanceledOrdersCache, then it should be + * set to the corresponding CanceledOrderStatus, otherwise `OPEN`. + * @param isCanceled - if the order is in the CanceledOrderCache, always false for liquidiation + * orders + */ protected getOrderStatus( - isCanceled: boolean, + canceledOrderStatus: CanceledOrderStatus, size: string, totalFilled: string, + orderFlags: number, + timeInForce: TimeInForce, ): OrderStatus { - if (isCanceled) { - return OrderStatus.BEST_EFFORT_CANCELED; - } - if (Big(size).lte(totalFilled)) { + if (Big(totalFilled).gte(size)) { return OrderStatus.FILLED; + } else if (orderFlags === ORDER_FLAG_LONG_TERM) { // 1. Stateful Order + return OrderStatus.OPEN; + } else if (timeInForce === TimeInForce.FOK) { // 2. Short-term FOK + return OrderStatus.FILLED; + } else if (timeInForce === TimeInForce.IOC) { // 3. Short-term IOC + return OrderStatus.CANCELED; + } + // 4. Short-term Limit & Post-only + if (canceledOrderStatus === CanceledOrderStatus.BEST_EFFORT_CANCELED) { + return OrderStatus.BEST_EFFORT_CANCELED; + } else if (canceledOrderStatus === CanceledOrderStatus.CANCELED) { + return OrderStatus.CANCELED; } return OrderStatus.OPEN; } diff --git a/indexer/services/ender/src/handlers/order-fills/liquidation-handler.ts b/indexer/services/ender/src/handlers/order-fills/liquidation-handler.ts index 8ee0073793..203dac7ee8 100644 --- a/indexer/services/ender/src/handlers/order-fills/liquidation-handler.ts +++ b/indexer/services/ender/src/handlers/order-fills/liquidation-handler.ts @@ -1,24 +1,37 @@ import { logger } from '@dydxprotocol-indexer/base'; import { FillFromDatabase, + FillModel, Liquidity, OrderFromDatabase, + OrderModel, OrderTable, PerpetualMarketFromDatabase, + PerpetualMarketModel, perpetualMarketRefresher, PerpetualPositionFromDatabase, + PerpetualPositionModel, + storeHelpers, SubaccountTable, - OrderStatus, + USDC_ASSET_ID, + OrderStatus, FillType, } from '@dydxprotocol-indexer/postgres'; +import { CanceledOrderStatus, StateFilledQuantumsCache } from '@dydxprotocol-indexer/redis'; import { isStatefulOrder } from '@dydxprotocol-indexer/v4-proto-parser'; import { - LiquidationOrderV1, IndexerOrderId, + LiquidationOrderV1, IndexerOrderId, OrderFillEventV1, } from '@dydxprotocol-indexer/v4-protos'; import Long from 'long'; +import * as pg from 'pg'; +import config from '../../config'; import { STATEFUL_ORDER_ORDER_FILL_EVENT_TYPE, SUBACCOUNT_ORDER_FILL_EVENT_TYPE } from '../../constants'; import { convertPerpetualPosition } from '../../helpers/kafka-helper'; -import { orderFillWithLiquidityToOrderFillEventWithLiquidation } from '../../helpers/translation-helper'; +import { redisClient } from '../../helpers/redis/redis-controller'; +import { + orderFillWithLiquidityToOrderFillEventWithLiquidation, +} from '../../helpers/translation-helper'; +import { indexerTendermintEventToTransactionIndex } from '../../lib/helper'; import { OrderFillWithLiquidity } from '../../lib/translated-types'; import { ConsolidatedKafkaEvent, @@ -73,8 +86,106 @@ export class LiquidationHandler extends AbstractOrderFillHandler { + const eventDataBinary: Uint8Array = this.indexerTendermintEvent.dataBytes; + const transactionIndex: number = indexerTendermintEventToTransactionIndex( + this.indexerTendermintEvent, + ); + + const castedLiquidationFillEventMessage: + OrderFillEventWithLiquidation = orderFillWithLiquidityToOrderFillEventWithLiquidation( + this.event, + ); + const field: string = this.event.liquidity === Liquidity.MAKER + ? 'makerOrder' : 'liquidationOrder'; + const fillType: string = this.event.liquidity === Liquidity.MAKER + ? FillType.LIQUIDATION : FillType.LIQUIDATED; + + const result: pg.QueryResult = await storeHelpers.rawQuery( + `SELECT dydx_liquidation_fill_handler_per_order( + '${field}', + ${this.block.height}, + '${this.block.time?.toISOString()}', + '${JSON.stringify(OrderFillEventV1.decode(eventDataBinary))}', + ${this.indexerTendermintEvent.eventIndex}, + ${transactionIndex}, + '${this.block.txHashes[transactionIndex]}', + '${this.event.liquidity}', + '${fillType}', + '${USDC_ASSET_ID}' + ) AS result;`, + { txId: this.txId }, + ).catch((error: Error) => { + logger.error({ + at: 'liquidationHandler#handleViaSqlFunction', + message: 'Failed to handle OrderFillEventV1', + error, + }); + throw error; + }); + + const fill: FillFromDatabase = FillModel.fromJson( + result.rows[0].result.fill) as FillFromDatabase; + const perpetualMarket: PerpetualMarketFromDatabase = PerpetualMarketModel.fromJson( + result.rows[0].result.perpetual_market) as PerpetualMarketFromDatabase; + const position: PerpetualPositionFromDatabase = PerpetualPositionModel.fromJson( + result.rows[0].result.perpetual_position) as PerpetualPositionFromDatabase; + + if (this.event.liquidity === Liquidity.MAKER) { + // Must be done in this order, because fills refer to an order + // We do not create a taker order for liquidations. + const makerOrder: OrderFromDatabase = OrderModel.fromJson( + result.rows[0].result.order) as OrderFromDatabase; + + // Update the cache tracking the state-filled amount per order for use in vulcan + await StateFilledQuantumsCache.updateStateFilledQuantums( + makerOrder!.id, + this.getTotalFilled(castedLiquidationFillEventMessage).toString(), + redisClient, + ); + + const kafkaEvents: ConsolidatedKafkaEvent[] = [ + this.generateConsolidatedKafkaEvent( + castedLiquidationFillEventMessage.makerOrder.orderId!.subaccountId!, + makerOrder, + convertPerpetualPosition(position), + fill, + perpetualMarket, + ), + // Update vulcan with the total filled amount of the maker order. + this.getOrderUpdateKafkaEvent( + castedLiquidationFillEventMessage.makerOrder!.orderId!, + castedLiquidationFillEventMessage.totalFilledMaker, + ), + ]; + + // If the order is stateful and fully-filled, send an order removal to vulcan. We only do this + // for stateful orders as we are guaranteed a stateful order cannot be replaced until the next + // block. + if (makerOrder?.status === OrderStatus.FILLED && isStatefulOrder(makerOrder?.orderFlags)) { + kafkaEvents.push( + this.getOrderRemoveKafkaEvent(castedLiquidationFillEventMessage.makerOrder!.orderId!), + ); + } + return kafkaEvents; + } else { + return [ + this.generateConsolidatedKafkaEvent( + castedLiquidationFillEventMessage.liquidationOrder.liquidated!, + undefined, + convertPerpetualPosition(position), + fill, + perpetualMarket, + ), + this.generateTradeKafkaEventFromTakerOrderFill( + fill, + ), + ]; + } + } + // eslint-disable-next-line @typescript-eslint/require-await - public async internalHandle(): Promise { + public async handleViaKnexQueries(): Promise { const castedLiquidationFillEventMessage: OrderFillEventWithLiquidation = orderFillWithLiquidityToOrderFillEventWithLiquidation( this.event, @@ -107,7 +218,7 @@ export class LiquidationHandler extends AbstractOrderFillHandler { + if (config.USE_LIQUIDATION_HANDLER_SQL_FUNCTION) { + return this.handleViaSqlFunction(); + } + return this.handleViaKnexQueries(); + } } diff --git a/indexer/services/ender/src/handlers/order-fills/order-handler.ts b/indexer/services/ender/src/handlers/order-fills/order-handler.ts index edd649ee90..7e2cb4ef94 100644 --- a/indexer/services/ender/src/handlers/order-fills/order-handler.ts +++ b/indexer/services/ender/src/handlers/order-fills/order-handler.ts @@ -16,7 +16,7 @@ import { USDC_ASSET_ID, OrderStatus, } from '@dydxprotocol-indexer/postgres'; -import { CanceledOrdersCache } from '@dydxprotocol-indexer/redis'; +import { CanceledOrderStatus, CanceledOrdersCache, StateFilledQuantumsCache } from '@dydxprotocol-indexer/redis'; import { isStatefulOrder } from '@dydxprotocol-indexer/v4-proto-parser'; import { OrderFillEventV1, IndexerOrderId, IndexerSubaccountId, IndexerOrder, @@ -75,8 +75,11 @@ export class OrderHandler extends AbstractOrderFillHandler { + ).catch((error: Error) => { logger.error({ at: 'orderHandler#handleViaSqlFunction', message: 'Failed to handle OrderFillEventV1', @@ -134,6 +137,13 @@ export class OrderHandler extends AbstractOrderFillHandler { + if (config.USE_PERPETUAL_MARKET_HANDLER_SQL_FUNCTION) { + return this.handleViaSqlFunction(); + } + return this.handleViaKnex(); + } + + // eslint-disable-next-line @typescript-eslint/require-await + private async handleViaSqlFunction(): Promise { + const eventDataBinary: Uint8Array = this.indexerTendermintEvent.dataBytes; + const result: pg.QueryResult = await storeHelpers.rawQuery( + `SELECT dydx_perpetual_market_handler( + '${JSON.stringify(PerpetualMarketCreateEventV1.decode(eventDataBinary))}' + ) AS result;`, + { txId: this.txId }, + ).catch((error: Error) => { + logger.error({ + at: 'PerpetualMarketCreationHandler#handleViaSqlFunction', + message: 'Failed to handle PerpetualMarketCreateEventV1', + error, + }); + + throw error; + }); + + const perpetualMarket: PerpetualMarketFromDatabase = PerpetualMarketModel.fromJson( + result.rows[0].result.perpetual_market) as PerpetualMarketFromDatabase; + + perpetualMarketRefresher.upsertPerpetualMarket(perpetualMarket); + return [ + this.generateConsolidatedMarketKafkaEvent( + JSON.stringify(generatePerpetualMarketMessage([perpetualMarket])), + ), + ]; + } + + // eslint-disable-next-line @typescript-eslint/require-await + private async handleViaKnex(): Promise { const perpetualMarket: PerpetualMarketFromDatabase = await this.runFuncWithTimingStatAndErrorLogging( this.createPerpetualMarket(), diff --git a/indexer/services/ender/src/handlers/stateful-order/conditional-order-placement-handler.ts b/indexer/services/ender/src/handlers/stateful-order/conditional-order-placement-handler.ts index fe4d6410c8..ba5acdbde6 100644 --- a/indexer/services/ender/src/handlers/stateful-order/conditional-order-placement-handler.ts +++ b/indexer/services/ender/src/handlers/stateful-order/conditional-order-placement-handler.ts @@ -5,7 +5,7 @@ import { OrderTable, PerpetualMarketFromDatabase, perpetualMarketRefresher, - protocolTranslations, + protocolTranslations, SubaccountFromDatabase, SubaccountMessageContents, } from '@dydxprotocol-indexer/postgres'; import { @@ -14,6 +14,7 @@ import { StatefulOrderEventV1, } from '@dydxprotocol-indexer/v4-protos'; +import config from '../../config'; import { generateOrderSubaccountMessage } from '../../helpers/kafka-helper'; import { getTriggerPrice } from '../../lib/helper'; import { ConsolidatedKafkaEvent } from '../../lib/types'; @@ -32,6 +33,24 @@ export class ConditionalOrderPlacementHandler extends // eslint-disable-next-line @typescript-eslint/require-await public async internalHandle(): Promise { + if (config.USE_STATEFUL_ORDER_HANDLER_SQL_FUNCTION) { + return this.handleViaSqlFunction(); + } + return this.handleViaKnex(); + } + + private async handleViaSqlFunction(): Promise { + const result: + [OrderFromDatabase, + PerpetualMarketFromDatabase, + SubaccountFromDatabase | undefined] = await this.handleEventViaSqlFunction(); + + const subaccountId: + IndexerSubaccountId = this.event.conditionalOrderPlacement!.order!.orderId!.subaccountId!; + return this.createKafkaEvents(subaccountId, result[0], result[1]); + } + + private async handleViaKnex(): Promise { const order: IndexerOrder = this.event.conditionalOrderPlacement!.order!; const subaccountId: IndexerSubaccountId = order.orderId!.subaccountId!; const clobPairId: string = order.orderId!.clobPairId.toString(); @@ -58,6 +77,14 @@ export class ConditionalOrderPlacementHandler extends this.generateTimingStatsOptions('upsert_order'), ); + return this.createKafkaEvents(subaccountId, conditionalOrder, perpetualMarket); + } + + private createKafkaEvents( + subaccountId: IndexerSubaccountId, + conditionalOrder: OrderFromDatabase, + perpetualMarket: PerpetualMarketFromDatabase): ConsolidatedKafkaEvent[] { + // Since the order isn't placed on the book, no message is sent to vulcan // ender needs to send the websocket message indicating the conditional order was placed const message: SubaccountMessageContents = { diff --git a/indexer/services/ender/src/handlers/stateful-order/conditional-order-triggered-handler.ts b/indexer/services/ender/src/handlers/stateful-order/conditional-order-triggered-handler.ts index b193565d69..5bc85e8885 100644 --- a/indexer/services/ender/src/handlers/stateful-order/conditional-order-triggered-handler.ts +++ b/indexer/services/ender/src/handlers/stateful-order/conditional-order-triggered-handler.ts @@ -6,6 +6,7 @@ import { PerpetualMarketFromDatabase, orderTranslations, perpetualMarketRefresher, + SubaccountFromDatabase, } from '@dydxprotocol-indexer/postgres'; import { getOrderIdHash } from '@dydxprotocol-indexer/v4-proto-parser'; import { @@ -16,6 +17,7 @@ import { StatefulOrderEventV1, } from '@dydxprotocol-indexer/v4-protos'; +import config from '../../config'; import { ConsolidatedKafkaEvent } from '../../lib/types'; import { AbstractStatefulOrderHandler } from '../abstract-stateful-order-handler'; @@ -32,6 +34,24 @@ export class ConditionalOrderTriggeredHandler extends // eslint-disable-next-line @typescript-eslint/require-await public async internalHandle(): Promise { + if (config.USE_STATEFUL_ORDER_HANDLER_SQL_FUNCTION) { + return this.handleViaSqlFunction(); + } + return this.handleViaKnex(); + } + + private async handleViaSqlFunction(): Promise { + const result: + [OrderFromDatabase, + PerpetualMarketFromDatabase, + SubaccountFromDatabase | undefined] = await this.handleEventViaSqlFunction(); + + const order: IndexerOrder = orderTranslations.convertToIndexerOrderWithSubaccount( + result[0], result[1], result[2]!); + return this.createKafkaEvents(order); + } + + private async handleViaKnex(): Promise { const orderIdProto: IndexerOrderId = this.event.conditionalOrderTriggered!.triggeredOrderId!; const orderFromDatabase: OrderFromDatabase = await this.runFuncWithTimingStatAndErrorLogging( this.updateOrderStatus(orderIdProto, OrderStatus.OPEN), @@ -56,6 +76,10 @@ export class ConditionalOrderTriggeredHandler extends orderFromDatabase, perpetualMarket, ); + return this.createKafkaEvents(order); + } + + private createKafkaEvents(order: IndexerOrder): ConsolidatedKafkaEvent[] { const offChainUpdate: OffChainUpdateV1 = OffChainUpdateV1.fromPartial({ orderPlace: { order, @@ -65,7 +89,7 @@ export class ConditionalOrderTriggeredHandler extends return [ this.generateConsolidatedVulcanKafkaEvent( - getOrderIdHash(orderIdProto), + getOrderIdHash(order.orderId!), offChainUpdate, ), ]; diff --git a/indexer/services/ender/src/handlers/stateful-order/stateful-order-placement-handler.ts b/indexer/services/ender/src/handlers/stateful-order/stateful-order-placement-handler.ts index f2066ead71..e941dee2f9 100644 --- a/indexer/services/ender/src/handlers/stateful-order/stateful-order-placement-handler.ts +++ b/indexer/services/ender/src/handlers/stateful-order/stateful-order-placement-handler.ts @@ -14,6 +14,7 @@ import { StatefulOrderEventV1, } from '@dydxprotocol-indexer/v4-protos'; +import config from '../../config'; import { ConsolidatedKafkaEvent } from '../../lib/types'; import { AbstractStatefulOrderHandler } from '../abstract-stateful-order-handler'; @@ -34,7 +35,28 @@ export class StatefulOrderPlacementHandler extends return this.getParallelizationIdsFromOrderId(orderId); } + // eslint-disable-next-line @typescript-eslint/require-await public async internalHandle(): Promise { + if (config.USE_STATEFUL_ORDER_HANDLER_SQL_FUNCTION) { + return this.handleViaSqlFunction(); + } + return this.handleViaKnex(); + } + + private async handleViaSqlFunction(): Promise { + await this.handleEventViaSqlFunction(); + + let order: IndexerOrder; + // TODO(IND-334): Remove after deprecating StatefulOrderPlacementEvent + if (this.event.orderPlace !== undefined) { + order = this.event.orderPlace!.order!; + } else { + order = this.event.longTermOrderPlacement!.order!; + } + return this.createKafkaEvents(order); + } + + private async handleViaKnex(): Promise { let order: IndexerOrder; // TODO(IND-334): Remove after deprecating StatefulOrderPlacementEvent if (this.event.orderPlace !== undefined) { @@ -60,6 +82,10 @@ export class StatefulOrderPlacementHandler extends this.generateTimingStatsOptions('upsert_order'), ); + return this.createKafkaEvents(order); + } + + private createKafkaEvents(order: IndexerOrder): ConsolidatedKafkaEvent[] { const kafakEvents: ConsolidatedKafkaEvent[] = []; const offChainUpdate: OffChainUpdateV1 = OffChainUpdateV1.fromPartial({ diff --git a/indexer/services/ender/src/handlers/stateful-order/stateful-order-removal-handler.ts b/indexer/services/ender/src/handlers/stateful-order/stateful-order-removal-handler.ts index df669f5a34..a835e74cba 100644 --- a/indexer/services/ender/src/handlers/stateful-order/stateful-order-removal-handler.ts +++ b/indexer/services/ender/src/handlers/stateful-order/stateful-order-removal-handler.ts @@ -10,6 +10,7 @@ import { StatefulOrderEventV1, } from '@dydxprotocol-indexer/v4-protos'; +import config from '../../config'; import { ConsolidatedKafkaEvent } from '../../lib/types'; import { AbstractStatefulOrderHandler } from '../abstract-stateful-order-handler'; @@ -23,13 +24,31 @@ export class StatefulOrderRemovalHandler extends return this.getParallelizationIdsFromOrderId(orderId); } + // eslint-disable-next-line @typescript-eslint/require-await public async internalHandle(): Promise { + if (config.USE_STATEFUL_ORDER_HANDLER_SQL_FUNCTION) { + return this.handleViaSqlFunction(); + } + return this.handleViaKnex(); + } + + private async handleViaSqlFunction(): Promise { + const orderIdProto: IndexerOrderId = this.event.orderRemoval!.removedOrderId!; + await this.handleEventViaSqlFunction(); + return this.createKafkaEvents(orderIdProto); + } + + private async handleViaKnex(): Promise { const orderIdProto: IndexerOrderId = this.event.orderRemoval!.removedOrderId!; await this.runFuncWithTimingStatAndErrorLogging( this.updateOrderStatus(orderIdProto, OrderStatus.CANCELED), this.generateTimingStatsOptions('cancel_order'), ); + return this.createKafkaEvents(orderIdProto); + } + + private createKafkaEvents(orderIdProto: IndexerOrderId): ConsolidatedKafkaEvent[] { const offChainUpdate: OffChainUpdateV1 = OffChainUpdateV1.fromPartial({ orderRemove: { removedOrderId: orderIdProto, diff --git a/indexer/services/ender/src/handlers/subaccount-update-handler.ts b/indexer/services/ender/src/handlers/subaccount-update-handler.ts index 7cb13406b1..279c3546cf 100644 --- a/indexer/services/ender/src/handlers/subaccount-update-handler.ts +++ b/indexer/services/ender/src/handlers/subaccount-update-handler.ts @@ -75,7 +75,7 @@ export class SubaccountUpdateHandler extends Handler { ${this.indexerTendermintEvent.eventIndex}, ${transactionIndex}) AS result;`, { txId: this.txId }, - ).catch((error) => { + ).catch((error: Error) => { logger.error({ at: 'subaccountUpdateHandler#handleViaSqlFunction', message: 'Failed to handle SubaccountUpdateEventV1', diff --git a/indexer/services/ender/src/handlers/transfer-handler.ts b/indexer/services/ender/src/handlers/transfer-handler.ts index d4d8fa6644..b3b3118517 100644 --- a/indexer/services/ender/src/handlers/transfer-handler.ts +++ b/indexer/services/ender/src/handlers/transfer-handler.ts @@ -1,17 +1,23 @@ +import { logger } from '@dydxprotocol-indexer/base'; import { AssetFromDatabase, + AssetModel, assetRefresher, protocolTranslations, + storeHelpers, SubaccountMessageContents, SubaccountTable, TendermintEventTable, TransferCreateObject, TransferFromDatabase, + TransferModel, TransferTable, WalletTable, } from '@dydxprotocol-indexer/postgres'; import { TransferEventV1 } from '@dydxprotocol-indexer/v4-protos'; +import * as pg from 'pg'; +import config from '../config'; import { generateTransferContents } from '../helpers/kafka-helper'; import { indexerTendermintEventToTransactionIndex } from '../lib/helper'; import { ConsolidatedKafkaEvent, TransferEventType } from '../lib/types'; @@ -25,8 +31,50 @@ export class TransferHandler extends Handler { return []; } - public async internalHandle( - ): Promise { + // eslint-disable-next-line @typescript-eslint/require-await + public async internalHandle(): Promise { + if (config.USE_TRANSFER_HANDLER_SQL_FUNCTION) { + return this.handleViaSqlFunction(); + } + return this.handleViaKnex(); + } + + private async handleViaSqlFunction(): Promise { + const transactionIndex: number = indexerTendermintEventToTransactionIndex( + this.indexerTendermintEvent, + ); + const eventDataBinary: Uint8Array = this.indexerTendermintEvent.dataBytes; + const result: pg.QueryResult = await storeHelpers.rawQuery( + `SELECT dydx_transfer_handler( + ${this.block.height}, + '${this.block.time?.toISOString()}', + '${JSON.stringify(TransferEventV1.decode(eventDataBinary))}', + ${this.indexerTendermintEvent.eventIndex}, + ${transactionIndex}, + '${this.block.txHashes[transactionIndex]}' + ) AS result;`, + { txId: this.txId }, + ).catch((error: Error) => { + logger.error({ + at: 'TransferHandler#handleViaSqlFunction', + message: 'Failed to handle TransferEventV1', + error, + }); + + throw error; + }); + + const asset: AssetFromDatabase = AssetModel.fromJson( + result.rows[0].result.asset) as AssetFromDatabase; + const transfer: TransferFromDatabase = TransferModel.fromJson( + result.rows[0].result.transfer) as TransferFromDatabase; + return this.generateKafkaEvents( + transfer, + asset, + ); + } + + private async handleViaKnex(): Promise { await this.runFuncWithTimingStatAndErrorLogging( Promise.all([ this.upsertRecipientSubaccount(), diff --git a/indexer/services/ender/src/handlers/update-clob-pair-handler.ts b/indexer/services/ender/src/handlers/update-clob-pair-handler.ts index 95c28a7d7b..90662029f0 100644 --- a/indexer/services/ender/src/handlers/update-clob-pair-handler.ts +++ b/indexer/services/ender/src/handlers/update-clob-pair-handler.ts @@ -1,10 +1,18 @@ import assert from 'assert'; +import { logger } from '@dydxprotocol-indexer/base'; import { - PerpetualMarketFromDatabase, PerpetualMarketTable, perpetualMarketRefresher, protocolTranslations, + PerpetualMarketFromDatabase, + PerpetualMarketModel, + PerpetualMarketTable, + perpetualMarketRefresher, + protocolTranslations, + storeHelpers, } from '@dydxprotocol-indexer/postgres'; import { UpdateClobPairEventV1 } from '@dydxprotocol-indexer/v4-protos'; +import * as pg from 'pg'; +import config from '../config'; import { generatePerpetualMarketMessage } from '../helpers/kafka-helper'; import { ConsolidatedKafkaEvent } from '../lib/types'; import { Handler } from './handler'; @@ -18,6 +26,42 @@ export class UpdateClobPairHandler extends Handler { // eslint-disable-next-line @typescript-eslint/require-await public async internalHandle(): Promise { + if (config.USE_UPDATE_CLOB_PAIR_HANDLER_SQL_FUNCTION) { + return this.handleViaSqlFunction(); + } + return this.handleViaKnex(); + } + + private async handleViaSqlFunction(): Promise { + const eventDataBinary: Uint8Array = this.indexerTendermintEvent.dataBytes; + const result: pg.QueryResult = await storeHelpers.rawQuery( + `SELECT dydx_update_clob_pair_handler( + '${JSON.stringify(UpdateClobPairEventV1.decode(eventDataBinary))}' + ) AS result;`, + { txId: this.txId }, + ).catch((error: Error) => { + logger.error({ + at: 'UpdateClobPairHandler#handleViaSqlFunction', + message: 'Failed to handle UpdateClobPairEventV1', + error, + }); + + throw error; + }); + + const perpetualMarket: PerpetualMarketFromDatabase = PerpetualMarketModel.fromJson( + result.rows[0].result.perpetual_market) as PerpetualMarketFromDatabase; + + perpetualMarketRefresher.upsertPerpetualMarket(perpetualMarket); + + return [ + this.generateConsolidatedMarketKafkaEvent( + JSON.stringify(generatePerpetualMarketMessage([perpetualMarket])), + ), + ]; + } + + private async handleViaKnex(): Promise { const perpetualMarket: PerpetualMarketFromDatabase = await this.runFuncWithTimingStatAndErrorLogging( this.updateClobPair(), @@ -47,7 +91,7 @@ export class UpdateClobPairHandler extends Handler { if (perpetualMarket === undefined) { this.logAndThrowParseMessageError( - 'Could not find perpetual market with corresponding updatePerpetualEvent.id', + 'Could not find perpetual market with corresponding clobPairId', { event: this.event }, ); // This assert should never be hit because a ParseMessageError should be thrown above. diff --git a/indexer/services/ender/src/handlers/update-perpetual-handler.ts b/indexer/services/ender/src/handlers/update-perpetual-handler.ts index 2c921f7e02..d7787fff28 100644 --- a/indexer/services/ender/src/handlers/update-perpetual-handler.ts +++ b/indexer/services/ender/src/handlers/update-perpetual-handler.ts @@ -1,10 +1,17 @@ import assert from 'assert'; +import { logger } from '@dydxprotocol-indexer/base'; import { - PerpetualMarketFromDatabase, PerpetualMarketTable, perpetualMarketRefresher, + PerpetualMarketFromDatabase, + PerpetualMarketTable, + perpetualMarketRefresher, + storeHelpers, + PerpetualMarketModel, } from '@dydxprotocol-indexer/postgres'; import { UpdatePerpetualEventV1 } from '@dydxprotocol-indexer/v4-protos'; +import * as pg from 'pg'; +import config from '../config'; import { generatePerpetualMarketMessage } from '../helpers/kafka-helper'; import { ConsolidatedKafkaEvent } from '../lib/types'; import { Handler } from './handler'; @@ -18,6 +25,42 @@ export class UpdatePerpetualHandler extends Handler { // eslint-disable-next-line @typescript-eslint/require-await public async internalHandle(): Promise { + if (config.USE_UPDATE_PERPETUAL_HANDLER_SQL_FUNCTION) { + return this.handleViaSqlFunction(); + } + return this.handleViaKnex(); + } + + private async handleViaSqlFunction(): Promise { + const eventDataBinary: Uint8Array = this.indexerTendermintEvent.dataBytes; + const result: pg.QueryResult = await storeHelpers.rawQuery( + `SELECT dydx_update_perpetual_handler( + '${JSON.stringify(UpdatePerpetualEventV1.decode(eventDataBinary))}' + ) AS result;`, + { txId: this.txId }, + ).catch((error: Error) => { + logger.error({ + at: 'UpdatePerpetualHandler#handleViaSqlFunction', + message: 'Failed to handle UpdatePerpetualEventV1', + error, + }); + + throw error; + }); + + const perpetualMarket: PerpetualMarketFromDatabase = PerpetualMarketModel.fromJson( + result.rows[0].result.perpetual_market) as PerpetualMarketFromDatabase; + + await perpetualMarketRefresher.upsertPerpetualMarket(perpetualMarket); + + return [ + this.generateConsolidatedMarketKafkaEvent( + JSON.stringify(generatePerpetualMarketMessage([perpetualMarket])), + ), + ]; + } + + private async handleViaKnex(): Promise { const perpetualMarket: PerpetualMarketFromDatabase = await this.runFuncWithTimingStatAndErrorLogging( this.updatePerpetual(), diff --git a/indexer/services/ender/src/helpers/postgres/postgres-functions.ts b/indexer/services/ender/src/helpers/postgres/postgres-functions.ts index 913af3a438..a32840e76b 100644 --- a/indexer/services/ender/src/helpers/postgres/postgres-functions.ts +++ b/indexer/services/ender/src/helpers/postgres/postgres-functions.ts @@ -29,29 +29,46 @@ function newScript(name: string, scriptPath: string): PostgresFunction { const scripts: string[] = [ 'create_extension_pg_stat_statements.sql', 'create_extension_uuid_ossp.sql', + 'dydx_asset_create_handler.sql', + 'dydx_clob_pair_status_to_market_status.sql', + 'dydx_market_create_handler.sql', + 'dydx_market_modify_handler.sql', + 'dydx_market_price_update_handler.sql', 'dydx_event_id_from_parts.sql', 'dydx_event_to_transaction_index.sql', 'dydx_from_jsonlib_long.sql', 'dydx_from_protocol_order_side.sql', 'dydx_from_protocol_time_in_force.sql', 'dydx_from_serializable_int.sql', + 'dydx_funding_handler.sql', 'dydx_get_fee_from_liquidity.sql', 'dydx_get_order_status.sql', 'dydx_get_total_filled_from_liquidity.sql', 'dydx_get_weighted_average.sql', + 'dydx_liquidation_fill_handler_per_order.sql', + 'dydx_liquidity_tier_handler.sql', 'dydx_order_fill_handler_per_order.sql', + 'dydx_perpetual_market_handler.sql', 'dydx_perpetual_position_and_order_side_matching.sql', + 'dydx_protocol_condition_type_to_order_type.sql', + 'dydx_stateful_order_handler.sql', 'dydx_subaccount_update_handler.sql', + 'dydx_transfer_handler.sql', 'dydx_trim_scale.sql', + 'dydx_update_clob_pair_handler.sql', + 'dydx_update_perpetual_handler.sql', 'dydx_uuid.sql', 'dydx_uuid_from_asset_position_parts.sql', 'dydx_uuid_from_fill_event_parts.sql', + 'dydx_uuid_from_funding_index_update_parts.sql', + 'dydx_uuid_from_oracle_price_parts.sql', 'dydx_uuid_from_order_id.sql', 'dydx_uuid_from_order_id_parts.sql', 'dydx_uuid_from_perpetual_position_parts.sql', 'dydx_uuid_from_subaccount_id.sql', 'dydx_uuid_from_subaccount_id_parts.sql', 'dydx_uuid_from_transaction_parts.sql', + 'dydx_uuid_from_transfer_parts.sql', 'dydx_create_transaction.sql', 'dydx_create_initial_rows_for_tendermint_block.sql', 'dydx_create_tendermint_event.sql', @@ -62,7 +79,7 @@ export async function createPostgresFunctions(): Promise { await Promise.all([ dbHelpers.createModelToJsonFunctions(), ...scripts.map((script: string) => storeHelpers.rawQuery(newScript(script, `../../scripts/${script}`).script, {}) - .catch((error) => { + .catch((error: Error) => { logger.error({ at: 'dbHelpers#createModelToJsonFunctions', message: `Failed to create or replace function contained in ${script}`, diff --git a/indexer/services/ender/src/lib/on-message.ts b/indexer/services/ender/src/lib/on-message.ts index bb49082581..5b9722b0f1 100644 --- a/indexer/services/ender/src/lib/on-message.ts +++ b/indexer/services/ender/src/lib/on-message.ts @@ -277,7 +277,7 @@ async function createInitialRowsViaSqlFunction( await storeHelpers.rawQuery( queryString, { txId }, - ).catch((error) => { + ).catch((error: Error) => { logger.error({ at: 'on-message#createInitialRowsViaSqlFunction', message: 'Failed to create initial rows', diff --git a/indexer/services/ender/src/scripts/dydx_asset_create_handler.sql b/indexer/services/ender/src/scripts/dydx_asset_create_handler.sql new file mode 100644 index 0000000000..0e3a9dce8a --- /dev/null +++ b/indexer/services/ender/src/scripts/dydx_asset_create_handler.sql @@ -0,0 +1,34 @@ +/** + Parameters: + - event_data: The 'data' field of the IndexerTendermintEvent (https://github.com/dydxprotocol/v4-proto/blob/8d35c86/dydxprotocol/indexer/indexer_manager/event.proto#L25) + converted to JSON format. Conversion to JSON is expected to be done by JSON.stringify. + Returns: JSON object containing fields: + - asset: The created asset in asset-model format (https://github.com/dydxprotocol/indexer/blob/cc70982/packages/postgres/src/models/asset-model.ts). +*/ +CREATE OR REPLACE FUNCTION dydx_asset_create_handler(event_data jsonb) RETURNS jsonb AS $$ +DECLARE + market_record_id integer; + asset_record assets%ROWTYPE; +BEGIN + asset_record."id" = event_data->>'id'; + asset_record."atomicResolution" = (event_data->'atomicResolution')::integer; + asset_record."symbol" = event_data->>'symbol'; + + asset_record."hasMarket" = (event_data->'hasMarket')::bool; + if asset_record."hasMarket" THEN + market_record_id = (event_data->'marketId')::integer; + SELECT "id" INTO asset_record."marketId" FROM markets WHERE "id" = market_record_id; + + IF NOT FOUND THEN + RAISE EXCEPTION 'Unable to find market with id: %', market_record_id; + END IF; + END IF; + + INSERT INTO assets VALUES (asset_record.*); + + RETURN jsonb_build_object( + 'asset', + dydx_to_jsonb(asset_record) + ); +END; +$$ LANGUAGE plpgsql; \ No newline at end of file diff --git a/indexer/services/ender/src/scripts/dydx_clob_pair_status_to_market_status.sql b/indexer/services/ender/src/scripts/dydx_clob_pair_status_to_market_status.sql new file mode 100644 index 0000000000..6d69b9dd7f --- /dev/null +++ b/indexer/services/ender/src/scripts/dydx_clob_pair_status_to_market_status.sql @@ -0,0 +1,21 @@ +/** + Returns the market status (https://github.com/dydxprotocol/v4-chain/blob/ea4f6895a73627aaa9bc5e21eed1ba51313b1ce4/indexer/packages/postgres/src/types/perpetual-market-types.ts#L60) + from the clob pair status (https://github.com/dydxprotocol/v4-chain/blob/ea4f6895a73627aaa9bc5e21eed1ba51313b1ce4/proto/dydxprotocol/indexer/protocol/v1/clob.proto#L157). + The conversion is equivalent to https://github.com/dydxprotocol/v4-chain/blob/ea4f6895a73627aaa9bc5e21eed1ba51313b1ce4/indexer/packages/postgres/src/lib/protocol-translations.ts#L351. + + Parameters: + - status: the ClobPairStatus (https://github.com/dydxprotocol/v4-chain/blob/ea4f6895a73627aaa9bc5e21eed1ba51313b1ce4/proto/dydxprotocol/indexer/protocol/v1/clob.proto#L157) +*/ +CREATE OR REPLACE FUNCTION dydx_clob_pair_status_to_market_status(status jsonb) + RETURNS text AS $$ +BEGIN + CASE status + WHEN '1'::jsonb THEN RETURN 'ACTIVE'; /** CLOB_PAIR_STATUS_ACTIVE */ + WHEN '2'::jsonb THEN RETURN 'PAUSED'; /** CLOB_PAIR_STATUS_PAUSED */ + WHEN '3'::jsonb THEN RETURN 'CANCEL_ONLY'; /** CLOB_PAIR_STATUS_CANCEL_ONLY */ + WHEN '4'::jsonb THEN RETURN 'POST_ONLY'; /** CLOB_PAIR_STATUS_POST_ONLY */ + WHEN '5'::jsonb THEN RETURN 'INITIALIZING'; /** CLOB_PAIR_STATUS_INITIALIZING */ + ELSE RAISE EXCEPTION 'Invalid clob pair status: %', status; + END CASE; +END; +$$ LANGUAGE plpgsql IMMUTABLE PARALLEL SAFE; \ No newline at end of file diff --git a/indexer/services/ender/src/scripts/dydx_from_protocol_order_side.sql b/indexer/services/ender/src/scripts/dydx_from_protocol_order_side.sql index 300f3f0a92..f9d7571761 100644 --- a/indexer/services/ender/src/scripts/dydx_from_protocol_order_side.sql +++ b/indexer/services/ender/src/scripts/dydx_from_protocol_order_side.sql @@ -5,8 +5,10 @@ CREATE OR REPLACE FUNCTION dydx_from_protocol_order_side(order_side jsonb) RETURNS text AS $$ BEGIN CASE order_side - WHEN '1'::jsonb THEN RETURN 'BUY'; - ELSE RETURN 'SELL'; - END CASE; + WHEN '1'::jsonb THEN + RETURN 'BUY'; + ELSE + RETURN 'SELL'; + END CASE; END; $$ LANGUAGE plpgsql IMMUTABLE PARALLEL SAFE; diff --git a/indexer/services/ender/src/scripts/dydx_funding_handler.sql b/indexer/services/ender/src/scripts/dydx_funding_handler.sql new file mode 100644 index 0000000000..d9c057cbda --- /dev/null +++ b/indexer/services/ender/src/scripts/dydx_funding_handler.sql @@ -0,0 +1,93 @@ +/** + Parameters: + - block_height: the height of the block being processing. + - block_time: the time of the block being processed. + - event_data: The 'data' field of the IndexerTendermintEvent (https://github.com/dydxprotocol/v4-proto/blob/8d35c86/dydxprotocol/indexer/indexer_manager/event.proto#L25) + converted to JSON format. Conversion to JSON is expected to be done by JSON.stringify. + - event_index: The 'event_index' of the IndexerTendermintEvent. + - transaction_index: The transaction_index of the IndexerTendermintEvent after the conversion that takes into + account the block_event (https://github.com/dydxprotocol/indexer/blob/cc70982/services/ender/src/lib/helper.ts#L33) + Returns: JSON object containing fields: + - perpetual_markets: A mapping from perpetual market id to the associated perpetual market in perpetual-market-model format (https://github.com/dydxprotocol/indexer/blob/cc70982/packages/postgres/src/models/perpetual-market-model.ts). + - errors: An array containing an error string (or NULL if no error occurred) for each FundingEventUpdate. +*/ +CREATE OR REPLACE FUNCTION dydx_funding_handler( + block_height int, block_time timestamp, event_data jsonb, event_index int, transaction_index int) RETURNS jsonb AS $$ +DECLARE + PPM_EXPONENT constant numeric = -6; + FUNDING_RATE_FROM_PROTOCOL_IN_HOURS constant numeric = 8; + QUOTE_CURRENCY_ATOMIC_RESOLUTION constant numeric = -6; + + TYPE_PREMIUM_SAMPLE constant jsonb = '1'; + TYPE_FUNDING_RATE_AND_INDEX constant jsonb = '2'; + + perpetual_market_id bigint; + perpetual_market_record perpetual_markets%ROWTYPE; + funding_index_updates_record funding_index_updates%ROWTYPE; + oracle_prices_record oracle_prices%ROWTYPE; + + funding_update jsonb; + perpetual_markets_response jsonb = jsonb_build_object(); + errors_response jsonb[]; + event_id bytea; +BEGIN + FOR funding_update IN SELECT * FROM jsonb_array_elements(event_data->'updates') LOOP + perpetual_market_id = (funding_update->'perpetualId')::bigint; + SELECT * INTO perpetual_market_record FROM perpetual_markets WHERE "id" = perpetual_market_id; + IF NOT FOUND THEN + errors_response = array_append(errors_response, 'Received FundingUpdate with unknown perpetualId.'); + END IF; + + perpetual_markets_response = jsonb_set(perpetual_markets_response, ARRAY[(perpetual_market_record."id")::text], dydx_to_jsonb(perpetual_market_record)); + + CASE event_data->'type' + WHEN TYPE_PREMIUM_SAMPLE THEN + /** Here we just need to return the associated perpetual market. */ + WHEN TYPE_FUNDING_RATE_AND_INDEX THEN + /** Returns the latest oracle price <= current block_height. */ + SELECT * INTO oracle_prices_record + FROM oracle_prices + WHERE "marketId" = perpetual_market_record."marketId" AND "effectiveAtHeight" <= block_height + ORDER BY "effectiveAtHeight" + DESC LIMIT 1; + IF NOT FOUND THEN + RAISE EXCEPTION 'price not found for marketId %', perpetual_market_record."marketId"; + END IF; + + event_id = dydx_event_id_from_parts(block_height, transaction_index, event_index); + + funding_index_updates_record."id" = dydx_uuid_from_funding_index_update_parts( + block_height, + event_id, + perpetual_market_record."id"); + funding_index_updates_record."perpetualId" = perpetual_market_id; + funding_index_updates_record."eventId" = event_id; + funding_index_updates_record."effectiveAt" = block_time; + funding_index_updates_record."rate" = dydx_trim_scale( + power(10, PPM_EXPONENT) / + FUNDING_RATE_FROM_PROTOCOL_IN_HOURS * + (funding_update->'fundingValuePpm')::numeric); + funding_index_updates_record."oraclePrice" = oracle_prices_record."price"; + funding_index_updates_record."fundingIndex" = dydx_trim_scale( + dydx_from_serializable_int(funding_update->'fundingIndex') * + power(10, + PPM_EXPONENT + QUOTE_CURRENCY_ATOMIC_RESOLUTION - perpetual_market_record."atomicResolution")); + funding_index_updates_record."effectiveAtHeight" = block_height; + + INSERT INTO funding_index_updates VALUES (funding_index_updates_record.*); + ELSE + errors_response = array_append(errors_response, 'Received unknown FundingEvent type.'); + CONTINUE; + END CASE; + + errors_response = array_append(errors_response, NULL); + END LOOP; + + RETURN jsonb_build_object( + 'perpetual_markets', + perpetual_markets_response, + 'errors', + to_jsonb(errors_response) + ); +END; +$$ LANGUAGE plpgsql; \ No newline at end of file diff --git a/indexer/services/ender/src/scripts/dydx_get_fee_from_liquidity.sql b/indexer/services/ender/src/scripts/dydx_get_fee_from_liquidity.sql index 6dc226c3f9..b961433273 100644 --- a/indexer/services/ender/src/scripts/dydx_get_fee_from_liquidity.sql +++ b/indexer/services/ender/src/scripts/dydx_get_fee_from_liquidity.sql @@ -1,7 +1,7 @@ /** Returns the fee given the liquidity side. */ -CREATE OR REPLACE FUNCTION get_fee(fill_liquidity text, event_data jsonb) RETURNS numeric AS $$ +CREATE OR REPLACE FUNCTION dydx_get_fee(fill_liquidity text, event_data jsonb) RETURNS numeric AS $$ BEGIN IF fill_liquidity = 'TAKER' THEN RETURN dydx_from_jsonlib_long(event_data->'takerFee'); diff --git a/indexer/services/ender/src/scripts/dydx_get_order_status.sql b/indexer/services/ender/src/scripts/dydx_get_order_status.sql index 10aed2b394..b53f5b709d 100644 --- a/indexer/services/ender/src/scripts/dydx_get_order_status.sql +++ b/indexer/services/ender/src/scripts/dydx_get_order_status.sql @@ -1,15 +1,38 @@ /** - Returns the order status given the total filled amount, the order size and whether the order was cancelled. -*/ -CREATE OR REPLACE FUNCTION get_order_status(total_filled numeric, size numeric, is_cancelled boolean) + * The obvious case is if totalFilled >= size, then the order status should always be `FILLED`. + * The difficult case is if totalFilled < size after a fill, then we need to keep the following + * cases in mind: + * 1. Stateful Orders - All cancelations are on-chain events, so the will be `OPEN`. The + * CanceledOrdersCache does not store any stateful orders and we never send + * BEST_EFFORT_CANCELED notifications for stateful orders. + * 2. Short-term FOK - FOK orders can never be `OPEN`, since they don't rest on the orderbook, so + * totalFilled cannot be < size. By the end of the block, the order will be filled, so we mark + * it as `FILLED`. + * 3. Short-term IOC - Protocol guarantees that an IOC order will only ever be filled in a single + * block, so status should be `CANCELED`. + * 4. Short-term Limit & Post-only - If the order is in the CanceledOrdersCache, then it should be + * set to the corresponding CanceledOrderStatus, otherwise `OPEN`. + * @param isCanceled - if the order is in the CanceledOrderCache, always false for liquidiation + * orders + */ +CREATE OR REPLACE FUNCTION dydx_get_order_status(total_filled numeric, size numeric, order_canceled_status text, order_flags bigint, time_in_force text) RETURNS text AS $$ DECLARE order_status text; BEGIN - IF is_cancelled = true THEN - order_status = 'BEST_EFFORT_CANCELED'; - ELSIF total_filled >= size THEN - order_status = 'FILLED'; + IF total_filled >= size THEN + RETURN 'FILLED'; + /** Order flag of 64 is a stateful term order */ + ELSIF order_flags = 64 THEN /** 1. Stateful Order */ + RETURN 'OPEN'; + ELSIF time_in_force = 'FOK' THEN /** 2. Short-term FOK */ + RETURN 'FILLED'; + ELSIF time_in_force = 'IOC' THEN /** 3. Short-term IOC */ + RETURN 'CANCELED'; + ELSIF order_canceled_status = 'BEST_EFFORT_CANCELED' THEN /** 4. Short-term Limit & Postonly */ + RETURN 'BEST_EFFORT_CANCELED'; + ELSIF order_canceled_status = 'CANCELED' THEN + RETURN 'CANCELED'; ELSE order_status = 'OPEN'; END IF; diff --git a/indexer/services/ender/src/scripts/dydx_get_total_filled_from_liquidity.sql b/indexer/services/ender/src/scripts/dydx_get_total_filled_from_liquidity.sql index f01fd26b6a..b85c0c7545 100644 --- a/indexer/services/ender/src/scripts/dydx_get_total_filled_from_liquidity.sql +++ b/indexer/services/ender/src/scripts/dydx_get_total_filled_from_liquidity.sql @@ -1,7 +1,7 @@ /** Returns the order total filled amount given the liquidity side. */ -CREATE OR REPLACE FUNCTION get_total_filled(fill_liquidity text, event_data jsonb) RETURNS numeric AS $$ +CREATE OR REPLACE FUNCTION dydx_get_total_filled(fill_liquidity text, event_data jsonb) RETURNS numeric AS $$ BEGIN IF fill_liquidity = 'TAKER' THEN RETURN dydx_from_jsonlib_long(event_data->'totalFilledTaker'); diff --git a/indexer/services/ender/src/scripts/dydx_liquidation_fill_handler_per_order.sql b/indexer/services/ender/src/scripts/dydx_liquidation_fill_handler_per_order.sql new file mode 100644 index 0000000000..745edda0b0 --- /dev/null +++ b/indexer/services/ender/src/scripts/dydx_liquidation_fill_handler_per_order.sql @@ -0,0 +1,242 @@ +/** + Parameters: + - field: the field storing the order to process. + - block_height: the height of the block being processing. + - block_time: the time of the block being processed. + - event_data: The 'data' field of the IndexerTendermintEvent (https://github.com/dydxprotocol/v4-proto/blob/8d35c86/dydxprotocol/indexer/indexer_manager/event.proto#L25) + converted to JSON format. Conversion to JSON is expected to be done by JSON.stringify. + - event_index: The 'event_index' of the IndexerTendermintEvent. + - transaction_index: The transaction_index of the IndexerTendermintEvent after the conversion that takes into + account the block_event (https://github.com/dydxprotocol/indexer/blob/cc70982/services/ender/src/lib/helper.ts#L33) + - transaction_hash: The transaction hash corresponding to this event from the IndexerTendermintBlock 'tx_hashes'. + - fill_liquidity: The liquidity for the fill record. + - fill_type: The type for the fill record. + - usdc_asset_id: The USDC asset id. + Returns: JSON object containing fields: + - order: The updated order in order-model format (https://github.com/dydxprotocol/indexer/blob/cc70982/packages/postgres/src/models/order-model.ts). + Only returned if field == 'makerOrder'. + - fill: The updated fill in fill-model format (https://github.com/dydxprotocol/indexer/blob/cc70982/packages/postgres/src/models/fill-model.ts). + - perpetual_market: The perpetual market for the order in perpetual-market-model format (https://github.com/dydxprotocol/indexer/blob/cc70982/packages/postgres/src/models/perpetual-market-model.ts). + - perpetual_position: The updated perpetual position in perpetual-position-model format (https://github.com/dydxprotocol/indexer/blob/cc70982/packages/postgres/src/models/perpetual-position-model.ts). +*/ +CREATE OR REPLACE FUNCTION dydx_liquidation_fill_handler_per_order( + field text, block_height int, block_time timestamp, event_data jsonb, event_index int, transaction_index int, + transaction_hash text, fill_liquidity text, fill_type text, usdc_asset_id text) RETURNS jsonb AS $$ +DECLARE + order_ jsonb; + maker_order jsonb; + clob_pair_id bigint; + subaccount_uuid uuid; + perpetual_market_record perpetual_markets%ROWTYPE; + order_record orders%ROWTYPE; + fill_record fills%ROWTYPE; + perpetual_position_record perpetual_positions%ROWTYPE; + asset_record assets%ROWTYPE; + order_uuid uuid; + order_side text; + order_size numeric; + order_price numeric; + order_client_metadata bigint; + fee numeric; + fill_amount numeric; + total_filled numeric; + maker_price numeric; + event_id bytea; +BEGIN + order_ = event_data->field; + maker_order = event_data->'makerOrder'; + + IF field = 'makerOrder' THEN + clob_pair_id = jsonb_extract_path(order_, 'orderId', 'clobPairId')::bigint; + ELSE + clob_pair_id = jsonb_extract_path(order_, 'clobPairId')::bigint; + END IF; + + BEGIN + SELECT * INTO STRICT perpetual_market_record FROM perpetual_markets WHERE "clobPairId" = clob_pair_id; + EXCEPTION + WHEN NO_DATA_FOUND THEN + RAISE EXCEPTION 'Unable to find perpetual market with clobPairId %', clob_pair_id; + WHEN TOO_MANY_ROWS THEN + /** This should never happen and if it ever were to would indicate that the table has malformed data. */ + RAISE EXCEPTION 'Found multiple perpetual markets with clobPairId %', clob_pair_id; + END; + + BEGIN + SELECT * INTO STRICT asset_record FROM assets WHERE "id" = usdc_asset_id; + EXCEPTION + WHEN NO_DATA_FOUND THEN + RAISE EXCEPTION 'Unable to find asset with id %', usdc_asset_id; + END; + + /** + Calculate sizes, prices, and fill amounts. + + TODO(IND-238): Extract out calculation of quantums and subticks to their own SQL functions. + */ + fill_amount = dydx_trim_scale(dydx_from_jsonlib_long(event_data->'fillAmount') * + power(10, perpetual_market_record."atomicResolution")::numeric); + maker_price = dydx_trim_scale(dydx_from_jsonlib_long(maker_order->'subticks') * + power(10, perpetual_market_record."quantumConversionExponent" + + asset_record."atomicResolution" - + perpetual_market_record."atomicResolution")::numeric); + total_filled = dydx_trim_scale(dydx_get_total_filled(fill_liquidity, event_data) * + power(10, perpetual_market_record."atomicResolution")::numeric); + fee = dydx_trim_scale(dydx_get_fee(fill_liquidity, event_data) * + power(10, asset_record."atomicResolution")::numeric); + order_price = dydx_trim_scale(dydx_from_jsonlib_long(order_->'subticks') * + power(10, perpetual_market_record."quantumConversionExponent" + + asset_record."atomicResolution" - + perpetual_market_record."atomicResolution")::numeric); + order_side = dydx_from_protocol_order_side(order_->'side'); + + IF field = 'makerOrder' THEN + order_uuid = dydx_uuid_from_order_id(order_->'orderId'); + subaccount_uuid = dydx_uuid_from_subaccount_id(jsonb_extract_path(order_, 'orderId', 'subaccountId')); + order_client_metadata = (order_->'clientMetadata')::bigint; + ELSE + order_uuid = NULL; + subaccount_uuid = dydx_uuid_from_subaccount_id(jsonb_extract_path(order_, 'liquidated')); + order_client_metadata = NULL; + END IF; + + IF field = 'makerOrder' THEN + order_size = dydx_trim_scale(dydx_from_jsonlib_long(order_->'quantums') * + power(10, perpetual_market_record."atomicResolution")::numeric); + + /** Upsert the order, populating the order_record fields with what will be in the database. */ + SELECT * INTO order_record FROM orders WHERE "id" = order_uuid; + order_record."size" = order_size; + order_record."price" = order_price; + order_record."timeInForce" = dydx_from_protocol_time_in_force(order_->'timeInForce'); + order_record."reduceOnly" = (order_->>'reduceOnly')::boolean; + order_record."orderFlags" = jsonb_extract_path(order_, 'orderId', 'orderFlags')::bigint; + order_record."goodTilBlock" = (order_->'goodTilBlock')::bigint; + order_record."goodTilBlockTime" = to_timestamp((order_->'goodTilBlockTime')::double precision); + order_record."clientMetadata" = order_client_metadata; + order_record."updatedAt" = block_time; + order_record."updatedAtHeight" = block_height; + + IF FOUND THEN + order_record."totalFilled" = total_filled; + order_record."status" = dydx_get_order_status(total_filled, order_record.size, 'NOT_CANCELED', order_record."orderFlags", order_record."timeInForce"); + + UPDATE orders + SET + "size" = order_record."size", + "totalFilled" = order_record."totalFilled", + "price" = order_record."price", + "status" = order_record."status", + "orderFlags" = order_record."orderFlags", + "goodTilBlock" = order_record."goodTilBlock", + "goodTilBlockTime" = order_record."goodTilBlockTime", + "timeInForce" = order_record."timeInForce", + "reduceOnly" = order_record."reduceOnly", + "clientMetadata" = order_record."clientMetadata", + "updatedAt" = order_record."updatedAt", + "updatedAtHeight" = order_record."updatedAtHeight" + WHERE id = order_uuid; + ELSE + order_record."id" = order_uuid; + order_record."subaccountId" = subaccount_uuid; + order_record."clientId" = jsonb_extract_path_text(order_, 'orderId', 'clientId')::bigint; + order_record."clobPairId" = clob_pair_id; + order_record."side" = order_side; + order_record."type" = 'LIMIT'; + + order_record."totalFilled" = fill_amount; + order_record."status" = dydx_get_order_status(fill_amount, order_size, 'NOT_CANCELED', order_record."orderFlags", order_record."timeInForce"); + order_record."createdAtHeight" = block_height; + INSERT INTO orders + ("id", "subaccountId", "clientId", "clobPairId", "side", "size", "totalFilled", "price", "type", + "status", "timeInForce", "reduceOnly", "orderFlags", "goodTilBlock", "goodTilBlockTime", "createdAtHeight", + "clientMetadata", "triggerPrice", "updatedAt", "updatedAtHeight") + VALUES (order_record.*); + END IF; + END IF; + + /* Insert the associated fill record for this order_fill event. */ + event_id = dydx_event_id_from_parts( + block_height, transaction_index, event_index); + INSERT INTO fills + ("id", "subaccountId", "side", "liquidity", "type", "clobPairId", "orderId", "size", "price", "quoteAmount", + "eventId", "transactionHash", "createdAt", "createdAtHeight", "clientMetadata", "fee") + VALUES (dydx_uuid_from_fill_event_parts(event_id, fill_liquidity), + subaccount_uuid, + order_side, + fill_liquidity, + fill_type, + clob_pair_id, + order_uuid, + fill_amount, + maker_price, + dydx_trim_scale(fill_amount * maker_price), + event_id, + transaction_hash, + block_time, + block_height, + order_client_metadata, + fee) + RETURNING * INTO fill_record; + + /* Upsert the perpetual_position record for this order_fill event. */ + SELECT * INTO perpetual_position_record FROM perpetual_positions WHERE "subaccountId" = subaccount_uuid + AND "perpetualId" = perpetual_market_record."id" + ORDER BY "createdAtHeight" DESC; + IF NOT FOUND THEN + RAISE EXCEPTION 'Unable to find existing perpetual position, subaccountId: %, perpetualId: %', subaccount_uuid, perpetual_market_record."id"; + END IF; + DECLARE + sum_open numeric = perpetual_position_record."sumOpen"; + entry_price numeric = perpetual_position_record."entryPrice"; + sum_close numeric = perpetual_position_record."sumClose"; + exit_price numeric = perpetual_position_record."exitPrice"; + BEGIN + IF dydx_perpetual_position_and_order_side_matching( + perpetual_position_record."side", order_side) THEN + sum_open = dydx_trim_scale(perpetual_position_record."sumOpen" + fill_amount); + entry_price = dydx_get_weighted_average( + perpetual_position_record."entryPrice", perpetual_position_record."sumOpen", + maker_price, fill_amount); + perpetual_position_record."sumOpen" = sum_open; + perpetual_position_record."entryPrice" = entry_price; + ELSE + sum_close = dydx_trim_scale(perpetual_position_record."sumClose" + fill_amount); + exit_price = dydx_get_weighted_average( + perpetual_position_record."exitPrice", perpetual_position_record."sumClose", + maker_price, fill_amount); + perpetual_position_record."sumClose" = sum_close; + perpetual_position_record."exitPrice" = exit_price; + END IF; + UPDATE perpetual_positions + SET + "sumOpen" = sum_open, + "entryPrice" = entry_price, + "sumClose" = sum_close, + "exitPrice" = exit_price + WHERE "id" = perpetual_position_record.id; + END; + + IF field = 'makerOrder' THEN + RETURN jsonb_build_object( + 'order', + dydx_to_jsonb(order_record), + 'fill', + dydx_to_jsonb(fill_record), + 'perpetual_market', + dydx_to_jsonb(perpetual_market_record), + 'perpetual_position', + dydx_to_jsonb(perpetual_position_record) + ); + ELSE + RETURN jsonb_build_object( + 'fill', + dydx_to_jsonb(fill_record), + 'perpetual_market', + dydx_to_jsonb(perpetual_market_record), + 'perpetual_position', + dydx_to_jsonb(perpetual_position_record) + ); + END IF; +END; +$$ LANGUAGE plpgsql; diff --git a/indexer/services/ender/src/scripts/dydx_liquidity_tier_handler.sql b/indexer/services/ender/src/scripts/dydx_liquidity_tier_handler.sql new file mode 100644 index 0000000000..c59b05ac5b --- /dev/null +++ b/indexer/services/ender/src/scripts/dydx_liquidity_tier_handler.sql @@ -0,0 +1,34 @@ +/** + Parameters: + - event_data: The 'data' field of the IndexerTendermintEvent (https://github.com/dydxprotocol/v4-proto/blob/8d35c86/dydxprotocol/indexer/indexer_manager/event.proto#L25) + converted to JSON format. Conversion to JSON is expected to be done by JSON.stringify. + Returns: JSON object containing fields: + - liquidy_tier: The upserted liquidity tier in liquidity-tiers-model format (https://github.com/dydxprotocol/indexer/blob/cc70982/packages/postgres/src/models/liquidity-tiers-model.ts). +*/ +CREATE OR REPLACE FUNCTION dydx_liquidity_tier_handler(event_data jsonb) RETURNS jsonb AS $$ +DECLARE + liquidity_tier_record liquidity_tiers%ROWTYPE; +BEGIN + liquidity_tier_record."id" = (event_data->'id')::integer; + liquidity_tier_record."name" = event_data->>'name'; + liquidity_tier_record."initialMarginPpm" = (event_data->'initialMarginPpm')::bigint; + liquidity_tier_record."maintenanceFractionPpm" = (event_data->'maintenanceFractionPpm')::bigint; + liquidity_tier_record."basePositionNotional" = dydx_trim_scale(power(10, -6)::numeric * dydx_from_jsonlib_long(event_data->'basePositionNotional')); + + INSERT INTO liquidity_tiers + VALUES (liquidity_tier_record.*) + ON CONFLICT ("id") DO + UPDATE + SET + "name" = liquidity_tier_record."name", + "initialMarginPpm" = liquidity_tier_record."initialMarginPpm", + "maintenanceFractionPpm" = liquidity_tier_record."maintenanceFractionPpm", + "basePositionNotional" = liquidity_tier_record."basePositionNotional" + RETURNING * INTO liquidity_tier_record; + + RETURN jsonb_build_object( + 'liquidity_tier', + dydx_to_jsonb(liquidity_tier_record) + ); +END; +$$ LANGUAGE plpgsql; \ No newline at end of file diff --git a/indexer/services/ender/src/scripts/dydx_market_create_handler.sql b/indexer/services/ender/src/scripts/dydx_market_create_handler.sql new file mode 100644 index 0000000000..db130c4ec5 --- /dev/null +++ b/indexer/services/ender/src/scripts/dydx_market_create_handler.sql @@ -0,0 +1,32 @@ +/** + Parameters: + - event_data: The 'data' field of the IndexerTendermintEvent (https://github.com/dydxprotocol/v4-proto/blob/8d35c86/dydxprotocol/indexer/indexer_manager/event.proto#L25) + converted to JSON format. Conversion to JSON is expected to be done by JSON.stringify. + Returns: JSON object containing fields: + - market: The created market in market-model format (https://github.com/dydxprotocol/indexer/blob/cc70982/packages/postgres/src/models/market-model.ts). +*/ +CREATE OR REPLACE FUNCTION dydx_market_create_handler(event_data jsonb) RETURNS jsonb AS $$ +DECLARE + market_record_id integer; + market_record markets%ROWTYPE; +BEGIN + market_record_id = (event_data->'marketId')::integer; + SELECT * INTO market_record FROM markets WHERE "id" = market_record_id; + + IF FOUND THEN + RAISE EXCEPTION 'Market in MarketCreate already exists. Record: %', market_record; + END IF; + + market_record."id" = market_record_id; + market_record."pair" = event_data->'marketCreate'->'base'->>'pair'; + market_record."exponent" = (event_data->'marketCreate'->'exponent')::integer; + market_record."minPriceChangePpm" = (event_data->'marketCreate'->'base'->'minPriceChangePpm')::integer; + + INSERT INTO markets VALUES (market_record.*); + + RETURN jsonb_build_object( + 'market', + dydx_to_jsonb(market_record) + ); +END; +$$ LANGUAGE plpgsql; \ No newline at end of file diff --git a/indexer/services/ender/src/scripts/dydx_market_modify_handler.sql b/indexer/services/ender/src/scripts/dydx_market_modify_handler.sql new file mode 100644 index 0000000000..de6b3af1b5 --- /dev/null +++ b/indexer/services/ender/src/scripts/dydx_market_modify_handler.sql @@ -0,0 +1,34 @@ +/** + Parameters: + - event_data: The 'data' field of the IndexerTendermintEvent (https://github.com/dydxprotocol/v4-proto/blob/8d35c86/dydxprotocol/indexer/indexer_manager/event.proto#L25) + converted to JSON format. Conversion to JSON is expected to be done by JSON.stringify. + Returns: JSON object containing fields: + - market: The updated market in market-model format (https://github.com/dydxprotocol/indexer/blob/cc70982/packages/postgres/src/models/market-model.ts). +*/ +CREATE OR REPLACE FUNCTION dydx_market_modify_handler(event_data jsonb) RETURNS jsonb AS $$ +DECLARE + market_record_id integer; + market_record markets%ROWTYPE; +BEGIN + market_record_id = (event_data->'marketId')::integer; + SELECT * INTO market_record FROM markets WHERE "id" = market_record_id; + + IF NOT FOUND THEN + RAISE EXCEPTION E'Market in MarketModify doesn\'t exist. Id: %', market_record_id; + END IF; + + market_record."pair" = event_data->'marketModify'->'base'->>'pair'; + market_record."minPriceChangePpm" = (event_data->'marketModify'->'base'->'minPriceChangePpm')::integer; + + UPDATE markets + SET + "pair" = market_record."pair", + "minPriceChangePpm" = market_record."minPriceChangePpm" + WHERE id = market_record."id"; + + RETURN jsonb_build_object( + 'market', + dydx_to_jsonb(market_record) + ); +END; +$$ LANGUAGE plpgsql; \ No newline at end of file diff --git a/indexer/services/ender/src/scripts/dydx_market_price_update_handler.sql b/indexer/services/ender/src/scripts/dydx_market_price_update_handler.sql new file mode 100644 index 0000000000..afbabce7eb --- /dev/null +++ b/indexer/services/ender/src/scripts/dydx_market_price_update_handler.sql @@ -0,0 +1,51 @@ +/** + Parameters: + - block_height: the height of the block being processing. + - block_time: the time of the block being processed. + - event_data: The 'data' field of the IndexerTendermintEvent (https://github.com/dydxprotocol/v4-proto/blob/8d35c86/dydxprotocol/indexer/indexer_manager/event.proto#L25) + converted to JSON format. Conversion to JSON is expected to be done by JSON.stringify. + Returns: JSON object containing fields: + - market: The updated market in market-model format (https://github.com/dydxprotocol/indexer/blob/cc70982/packages/postgres/src/models/market-model.ts). + - oracle_price: The created oracle price in oracle-price-model format (https://github.com/dydxprotocol/indexer/blob/cc70982/packages/postgres/src/models/oracle-price-model.ts). +*/ +CREATE OR REPLACE FUNCTION dydx_market_price_update_handler(block_height int, block_time timestamp, event_data jsonb) RETURNS jsonb AS $$ +DECLARE + market_record_id integer; + market_record markets%ROWTYPE; + oracle_price numeric; + oracle_price_record oracle_prices%ROWTYPE; +BEGIN + market_record_id = (event_data->'marketId')::integer; + SELECT * INTO market_record FROM markets WHERE "id" = market_record_id; + + IF NOT FOUND THEN + RAISE EXCEPTION 'MarketPriceUpdateEvent contains a non-existent market id. Id: %', market_record_id; + END IF; + + oracle_price = dydx_trim_scale( + dydx_from_jsonlib_long(event_data->'priceUpdate'->'priceWithExponent') * + power(10, market_record.exponent::numeric)); + + market_record."oraclePrice" = oracle_price; + + UPDATE markets + SET + "oraclePrice" = market_record."oraclePrice" + WHERE id = market_record."id"; + + oracle_price_record."id" = dydx_uuid_from_oracle_price_parts(market_record_id, block_height); + oracle_price_record."effectiveAt" = block_time; + oracle_price_record."effectiveAtHeight" = block_height; + oracle_price_record."marketId" = market_record_id; + oracle_price_record."price" = oracle_price; + + INSERT INTO oracle_prices VALUES (oracle_price_record.*); + + RETURN jsonb_build_object( + 'market', + dydx_to_jsonb(market_record), + 'oracle_price', + dydx_to_jsonb(oracle_price_record) + ); +END; +$$ LANGUAGE plpgsql; \ No newline at end of file diff --git a/indexer/services/ender/src/scripts/dydx_order_fill_handler_per_order.sql b/indexer/services/ender/src/scripts/dydx_order_fill_handler_per_order.sql index 20d8248af7..ddb45e55d4 100644 --- a/indexer/services/ender/src/scripts/dydx_order_fill_handler_per_order.sql +++ b/indexer/services/ender/src/scripts/dydx_order_fill_handler_per_order.sql @@ -11,15 +11,17 @@ - transaction_hash: The transaction hash corresponding to this event from the IndexerTendermintBlock 'tx_hashes'. - fill_liquidity: The liquidity for the fill record. - fill_type: The type for the fill record. - - is_cancelled: Whether the order is cancelled. + - usdc_asset_id: The USDC asset id. + - order_canceled_status: Status of order cancelation Returns: JSON object containing fields: - order: The updated order in order-model format (https://github.com/dydxprotocol/indexer/blob/cc70982/packages/postgres/src/models/order-model.ts). - fill: The updated fill in fill-model format (https://github.com/dydxprotocol/indexer/blob/cc70982/packages/postgres/src/models/fill-model.ts). - perpetual_market: The perpetual market for the order in perpetual-market-model format (https://github.com/dydxprotocol/indexer/blob/cc70982/packages/postgres/src/models/perpetual-market-model.ts). + - perpetual_position: The updated perpetual position in perpetual-position-model format (https://github.com/dydxprotocol/indexer/blob/cc70982/packages/postgres/src/models/perpetual-position-model.ts). */ CREATE OR REPLACE FUNCTION dydx_order_fill_handler_per_order( field text, block_height int, block_time timestamp, event_data jsonb, event_index int, transaction_index int, - transaction_hash text, fill_liquidity text, fill_type text, usdc_asset_id text, is_cancelled boolean) RETURNS jsonb AS $$ + transaction_hash text, fill_liquidity text, fill_type text, usdc_asset_id text, order_canceled_status text) RETURNS jsonb AS $$ DECLARE order_ jsonb; maker_order jsonb; @@ -34,6 +36,7 @@ DECLARE order_side text; order_size numeric; order_price numeric; + order_client_metadata bigint; fee numeric; fill_amount numeric; total_filled numeric; @@ -77,14 +80,15 @@ BEGIN power(10, perpetual_market_record."quantumConversionExponent" + asset_record."atomicResolution" - perpetual_market_record."atomicResolution")::numeric); - total_filled = dydx_trim_scale(get_total_filled(fill_liquidity, event_data) * + total_filled = dydx_trim_scale(dydx_get_total_filled(fill_liquidity, event_data) * power(10, perpetual_market_record."atomicResolution")::numeric); - fee = dydx_trim_scale(get_fee(fill_liquidity, event_data) * + fee = dydx_trim_scale(dydx_get_fee(fill_liquidity, event_data) * power(10, asset_record."atomicResolution")::numeric); order_uuid = dydx_uuid_from_order_id(order_->'orderId'); subaccount_uuid = dydx_uuid_from_subaccount_id(jsonb_extract_path(order_, 'orderId', 'subaccountId')); order_side = dydx_from_protocol_order_side(order_->'side'); + order_client_metadata = (order_->'clientMetadata')::bigint; /** Upsert the order, populating the order_record fields with what will be in the database. */ SELECT * INTO order_record FROM orders WHERE "id" = order_uuid; @@ -95,11 +99,13 @@ BEGIN order_record."orderFlags" = jsonb_extract_path(order_, 'orderId', 'orderFlags')::bigint; order_record."goodTilBlock" = (order_->'goodTilBlock')::bigint; order_record."goodTilBlockTime" = to_timestamp((order_->'goodTilBlockTime')::double precision); - order_record."clientMetadata" = (order_->'clientMetadata')::bigint; + order_record."clientMetadata" = order_client_metadata; + order_record."updatedAt" = block_time; + order_record."updatedAtHeight" = block_height; IF FOUND THEN order_record."totalFilled" = total_filled; - order_record."status" = get_order_status(total_filled, order_record.size, is_cancelled); + order_record."status" = dydx_get_order_status(total_filled, order_record.size, order_canceled_status, order_record."orderFlags", order_record."timeInForce"); UPDATE orders SET @@ -113,8 +119,8 @@ BEGIN "timeInForce" = order_record."timeInForce", "reduceOnly" = order_record."reduceOnly", "clientMetadata" = order_record."clientMetadata", - "updatedAt" = block_time, - "updatedAtHeight" = block_height + "updatedAt" = order_record."updatedAt", + "updatedAtHeight" = order_record."updatedAtHeight" WHERE id = order_uuid; ELSE order_record."id" = order_uuid; @@ -125,10 +131,8 @@ BEGIN order_record."type" = 'LIMIT'; /* TODO: Add additional order types once we support */ order_record."totalFilled" = fill_amount; - order_record."status" = get_order_status(fill_amount, order_size, is_cancelled); + order_record."status" = dydx_get_order_status(fill_amount, order_size, order_canceled_status, order_record."orderFlags", order_record."timeInForce"); order_record."createdAtHeight" = block_height; - order_record."updatedAt" = block_time; - order_record."updatedAtHeight" = block_height; INSERT INTO orders ("id", "subaccountId", "clientId", "clobPairId", "side", "size", "totalFilled", "price", "type", "status", "timeInForce", "reduceOnly", "orderFlags", "goodTilBlock", "goodTilBlockTime", "createdAtHeight", @@ -156,7 +160,7 @@ BEGIN transaction_hash, block_time, block_height, - order_record."clientMetadata", + order_client_metadata, fee) RETURNING * INTO fill_record; diff --git a/indexer/services/ender/src/scripts/dydx_perpetual_market_handler.sql b/indexer/services/ender/src/scripts/dydx_perpetual_market_handler.sql new file mode 100644 index 0000000000..e34d6867f3 --- /dev/null +++ b/indexer/services/ender/src/scripts/dydx_perpetual_market_handler.sql @@ -0,0 +1,36 @@ +/** + Parameters: + - event_data: The 'data' field of the IndexerTendermintEvent (https://github.com/dydxprotocol/v4-proto/blob/8d35c86/dydxprotocol/indexer/indexer_manager/event.proto#L25) + converted to JSON format. Conversion to JSON is expected to be done by JSON.stringify. + Returns: JSON object containing fields: + - perpetual_market: The updated perpetual market in perpetual-market-model format (https://github.com/dydxprotocol/indexer/blob/cc70982/packages/postgres/src/models/perpetual-market-model.ts). +*/ +CREATE OR REPLACE FUNCTION dydx_perpetual_market_handler(event_data jsonb) RETURNS jsonb AS $$ +DECLARE + perpetual_market_record perpetual_markets%ROWTYPE; +BEGIN + perpetual_market_record."id" = (event_data->'id')::bigint; + perpetual_market_record."clobPairId" = (event_data->'clobPairId')::bigint; + perpetual_market_record."ticker" = event_data->>'ticker'; + perpetual_market_record."marketId" = (event_data->'marketId')::integer; + perpetual_market_record."status" = dydx_clob_pair_status_to_market_status(event_data->'status'); + perpetual_market_record."lastPrice" = 0; + perpetual_market_record."priceChange24H" = 0; + perpetual_market_record."trades24H" = 0; + perpetual_market_record."volume24H" = 0; + perpetual_market_record."nextFundingRate" = 0; + perpetual_market_record."openInterest"= 0; + perpetual_market_record."quantumConversionExponent" = (event_data->'quantumConversionExponent')::integer; + perpetual_market_record."atomicResolution" = (event_data->'atomicResolution')::integer; + perpetual_market_record."subticksPerTick" = (event_data->'subticksPerTick')::integer; + perpetual_market_record."stepBaseQuantums" = dydx_from_jsonlib_long(event_data->'stepBaseQuantums'); + perpetual_market_record."liquidityTierId" = (event_data->'liquidityTier')::integer; + + INSERT INTO perpetual_markets VALUES (perpetual_market_record.*) RETURNING * INTO perpetual_market_record; + + RETURN jsonb_build_object( + 'perpetual_market', + dydx_to_jsonb(perpetual_market_record) + ); +END; +$$ LANGUAGE plpgsql; \ No newline at end of file diff --git a/indexer/services/ender/src/scripts/dydx_protocol_condition_type_to_order_type.sql b/indexer/services/ender/src/scripts/dydx_protocol_condition_type_to_order_type.sql new file mode 100644 index 0000000000..8ca8a7bf99 --- /dev/null +++ b/indexer/services/ender/src/scripts/dydx_protocol_condition_type_to_order_type.sql @@ -0,0 +1,25 @@ +/** + Converts the 'ConditionType' enum from the IndexerOrder protobuf (https://github.com/dydxprotocol/v4-proto/blob/4b721881fdfe99485336e221def03dc5b86eb0a1/dydxprotocol/indexer/protocol/v1/clob.proto#L131) + to the 'OrderType' enum in postgres. + */ +CREATE OR REPLACE FUNCTION dydx_protocol_condition_type_to_order_type(condition_type jsonb) RETURNS text AS $$ +DECLARE + UNRECOGNIZED constant jsonb = '-1'::jsonb; + CONDITION_TYPE_UNSPECIFIED constant jsonb = '0'::jsonb; + CONDITION_TYPE_STOP_LOSS constant jsonb = '1'::jsonb; + CONDITION_TYPE_TAKE_PROFIT constant jsonb = '2'::jsonb; +BEGIN + CASE condition_type + WHEN UNRECOGNIZED THEN + RETURN 'LIMIT'; + WHEN CONDITION_TYPE_UNSPECIFIED THEN + RETURN 'LIMIT'; + WHEN CONDITION_TYPE_STOP_LOSS THEN + RETURN 'STOP_LIMIT'; + WHEN CONDITION_TYPE_TAKE_PROFIT THEN + RETURN 'TAKE_PROFIT'; + ELSE + RAISE EXCEPTION 'Unexpected ConditionType: %', condition_type; + END CASE; +END; +$$ LANGUAGE plpgsql IMMUTABLE PARALLEL SAFE; diff --git a/indexer/services/ender/src/scripts/dydx_stateful_order_handler.sql b/indexer/services/ender/src/scripts/dydx_stateful_order_handler.sql new file mode 100644 index 0000000000..275a2a6185 --- /dev/null +++ b/indexer/services/ender/src/scripts/dydx_stateful_order_handler.sql @@ -0,0 +1,159 @@ +/** + Parameters: + - block_height: the height of the block being processing. + - block_time: the time of the block being processed. + - event_data: The 'data' field of the IndexerTendermintEvent (https://github.com/dydxprotocol/v4-proto/blob/8d35c86/dydxprotocol/indexer/indexer_manager/event.proto#L25) + converted to JSON format. Conversion to JSON is expected to be done by JSON.stringify. + Returns: JSON object containing fields: + - order: The upserted order in order-model format (https://github.com/dydxprotocol/indexer/blob/cc70982/packages/postgres/src/models/order-model.ts). +*/ +CREATE OR REPLACE FUNCTION dydx_stateful_order_handler( + block_height int, block_time timestamp, event_data jsonb) RETURNS jsonb AS $$ +DECLARE + QUOTE_CURRENCY_ATOMIC_RESOLUTION constant numeric = -6; + + order_ jsonb; + order_id jsonb; + clob_pair_id bigint; + subaccount_id uuid; + perpetual_market_record perpetual_markets%ROWTYPE; + order_record orders%ROWTYPE; + subaccount_record subaccounts%ROWTYPE; +BEGIN + /** TODO(IND-334): Remove after deprecating StatefulOrderPlacementEvent. */ + IF event_data->'orderPlace' IS NOT NULL OR event_data->'longTermOrderPlacement' IS NOT NULL OR event_data->'conditionalOrderPlacement' IS NOT NULL THEN + order_ = COALESCE(event_data->'orderPlace'->'order', event_data->'longTermOrderPlacement'->'order', event_data->'conditionalOrderPlacement'->'order'); + clob_pair_id = (order_->'orderId'->'clobPairId')::bigint; + + BEGIN + SELECT * INTO STRICT perpetual_market_record FROM perpetual_markets WHERE "clobPairId" = clob_pair_id; + EXCEPTION + WHEN NO_DATA_FOUND THEN + RAISE EXCEPTION 'Unable to find perpetual market with clobPairId: %', clob_pair_id; + WHEN TOO_MANY_ROWS THEN + /** This should never happen and if it ever were to would indicate that the table has malformed data. */ + RAISE EXCEPTION 'Found multiple perpetual markets with clobPairId: %', clob_pair_id; + END; + + /** + Calculate sizes, prices, and fill amounts. + + TODO(IND-238): Extract out calculation of quantums and subticks to their own SQL functions. + */ + order_record."id" = dydx_uuid_from_order_id(order_->'orderId'); + order_record."subaccountId" = dydx_uuid_from_subaccount_id(order_->'orderId'->'subaccountId'); + order_record."clientId" = jsonb_extract_path_text(order_, 'orderId', 'clientId')::bigint; + order_record."clobPairId" = clob_pair_id; + order_record."side" = dydx_from_protocol_order_side(order_->'side'); + order_record."size" = dydx_trim_scale(dydx_from_jsonlib_long(order_->'quantums') * + power(10, perpetual_market_record."atomicResolution")::numeric); + order_record."totalFilled" = 0; + order_record."price" = dydx_trim_scale(dydx_from_jsonlib_long(order_->'subticks') * + power(10, perpetual_market_record."quantumConversionExponent" + + QUOTE_CURRENCY_ATOMIC_RESOLUTION - + perpetual_market_record."atomicResolution")::numeric); + order_record."timeInForce" = dydx_from_protocol_time_in_force(order_->'timeInForce'); + order_record."reduceOnly" = (order_->>'reduceOnly')::boolean; + order_record."orderFlags" = (order_->'orderId'->'orderFlags')::bigint; + order_record."goodTilBlockTime" = to_timestamp((order_->'goodTilBlockTime')::double precision); + order_record."clientMetadata" = (order_->'clientMetadata')::bigint; + order_record."createdAtHeight" = block_height; + order_record."updatedAt" = block_time; + order_record."updatedAtHeight" = block_height; + + CASE + WHEN event_data->'conditionalOrderPlacement' IS NOT NULL THEN + order_record."type" = dydx_protocol_condition_type_to_order_type(order_->'conditionType'); + order_record."status" = 'UNTRIGGERED'; + order_record."triggerPrice" = dydx_trim_scale(dydx_from_jsonlib_long(order_->'conditionalOrderTriggerSubticks') * + power(10, perpetual_market_record."quantumConversionExponent" + + QUOTE_CURRENCY_ATOMIC_RESOLUTION - + perpetual_market_record."atomicResolution")::numeric); + ELSE + order_record."type" = 'LIMIT'; + order_record."status" = 'OPEN'; + END CASE; + + INSERT INTO orders VALUES (order_record.*) ON CONFLICT ("id") DO + UPDATE SET + "subaccountId" = order_record."subaccountId", + "clientId" = order_record."clientId", + "clobPairId" = order_record."clobPairId", + "side" = order_record."side", + "size" = order_record."size", + "totalFilled" = order_record."totalFilled", + "price" = order_record."price", + "timeInForce" = order_record."timeInForce", + "reduceOnly" = order_record."reduceOnly", + "orderFlags" = order_record."orderFlags", + "goodTilBlockTime" = order_record."goodTilBlockTime", + "clientMetadata" = order_record."clientMetadata", + "createdAtHeight" = order_record."createdAtHeight", + "updatedAt" = order_record."updatedAt", + "updatedAtHeight" = order_record."updatedAtHeight", + "type" = order_record."type", + "status" = order_record."status", + "triggerPrice" = order_record."triggerPrice" + RETURNING * INTO order_record; + + RETURN jsonb_build_object( + 'order', + dydx_to_jsonb(order_record), + 'perpetual_market', + dydx_to_jsonb(perpetual_market_record) + ); + ELSIF event_data->'conditionalOrderTriggered' IS NOT NULL OR event_data->'orderRemoval' IS NOT NULL THEN + CASE + WHEN event_data->'conditionalOrderTriggered' IS NOT NULL THEN + order_id = event_data->'conditionalOrderTriggered'->'triggeredOrderId'; + order_record."status" = 'OPEN'; + ELSE + order_id = event_data->'orderRemoval'->'removedOrderId'; + order_record."status" = 'CANCELED'; + END CASE; + + clob_pair_id = (order_id->'clobPairId')::bigint; + BEGIN + SELECT * INTO STRICT perpetual_market_record FROM perpetual_markets WHERE "clobPairId" = clob_pair_id; + EXCEPTION + WHEN NO_DATA_FOUND THEN + RAISE EXCEPTION 'Unable to find perpetual market with clobPairId: %', clob_pair_id; + WHEN TOO_MANY_ROWS THEN + /** This should never happen and if it ever were to would indicate that the table has malformed data. */ + RAISE EXCEPTION 'Found multiple perpetual markets with clobPairId: %', clob_pair_id; + END; + + subaccount_id = dydx_uuid_from_subaccount_id(order_id->'subaccountId'); + SELECT * INTO subaccount_record FROM subaccounts WHERE "id" = subaccount_id; + IF NOT FOUND THEN + RAISE EXCEPTION 'Subaccount for order not found: %', order_; + END IF; + + order_record."id" = dydx_uuid_from_order_id(order_id); + order_record."updatedAt" = block_time; + order_record."updatedAtHeight" = block_height; + UPDATE orders + SET + "status" = order_record."status", + "updatedAt" = order_record."updatedAt", + "updatedAtHeight" = order_record."updatedAtHeight" + WHERE "id" = order_record."id" + RETURNING * INTO order_record; + + IF NOT FOUND THEN + RAISE EXCEPTION 'Unable to update order status with orderId: %', dydx_uuid_from_order_id(order_id); + END IF; + + RETURN jsonb_build_object( + 'order', + dydx_to_jsonb(order_record), + 'perpetual_market', + dydx_to_jsonb(perpetual_market_record), + 'subaccount', + dydx_to_jsonb(subaccount_record) + ); + ELSE + RAISE EXCEPTION 'Unkonwn sub-event type %', event_data; + END IF; +END; +$$ LANGUAGE plpgsql; diff --git a/indexer/services/ender/src/scripts/dydx_transfer_handler.sql b/indexer/services/ender/src/scripts/dydx_transfer_handler.sql new file mode 100644 index 0000000000..3077c74b6c --- /dev/null +++ b/indexer/services/ender/src/scripts/dydx_transfer_handler.sql @@ -0,0 +1,95 @@ +/** + Parameters: + - block_height: the height of the block being processing. + - block_time: the time of the block being processed. + - event_data: The 'data' field of the IndexerTendermintEvent (https://github.com/dydxprotocol/v4-proto/blob/8d35c86/dydxprotocol/indexer/indexer_manager/event.proto#L25) + converted to JSON format. Conversion to JSON is expected to be done by JSON.stringify. + - event_index: The 'event_index' of the IndexerTendermintEvent. + - transaction_index: The transaction_index of the IndexerTendermintEvent after the conversion that takes into + account the block_event (https://github.com/dydxprotocol/indexer/blob/cc70982/services/ender/src/lib/helper.ts#L33) + - transaction_hash: The transaction hash corresponding to this event from the IndexerTendermintBlock 'tx_hashes'. + Returns: JSON object containing fields: + - asset: The existing asset in asset-model format (https://github.com/dydxprotocol/indexer/blob/cc70982/packages/postgres/src/models/asset-model.ts). + - transfer: The new transfer in transfer-model format (https://github.com/dydxprotocol/indexer/blob/cc70982/packages/postgres/src/models/transfer-model.ts). +*/ +CREATE OR REPLACE FUNCTION dydx_transfer_handler( + block_height int, block_time timestamp, event_data jsonb, event_index int, transaction_index int, + transaction_hash text) RETURNS jsonb AS $$ +DECLARE + asset_record assets%ROWTYPE; + recipient_subaccount_record subaccounts%ROWTYPE; + recipient_wallet_record wallets%ROWTYPE; + sender_wallet_record wallets%ROWTYPE; + transfer_record transfers%ROWTYPE; +BEGIN + asset_record."id" = event_data->>'assetId'; + SELECT * INTO asset_record FROM assets WHERE "id" = asset_record."id"; + + IF NOT FOUND THEN + RAISE EXCEPTION 'Unable to find asset with assetId: %', asset_record."id"; + END IF; + + IF event_data->'recipient'->'subaccountId' IS NOT NULL THEN + transfer_record."recipientSubaccountId" = dydx_uuid_from_subaccount_id(event_data->'recipient'->'subaccountId'); + + recipient_subaccount_record."id" = transfer_record."recipientSubaccountId"; + recipient_subaccount_record."address" = event_data->'recipient'->'subaccountId'->>'owner'; + recipient_subaccount_record."subaccountNumber" = (event_data->'recipient'->'subaccountId'->'number')::int; + recipient_subaccount_record."updatedAtHeight" = block_height; + recipient_subaccount_record."updatedAt" = block_time; + + INSERT INTO subaccounts VALUES (recipient_subaccount_record.*) + ON CONFLICT ("id") DO + UPDATE + SET + "updatedAtHeight" = recipient_subaccount_record."updatedAtHeight", + "updatedAt" = recipient_subaccount_record."updatedAt"; + END IF; + + IF event_data->'sender'->'subaccountId' IS NOT NULL THEN + transfer_record."senderSubaccountId" = dydx_uuid_from_subaccount_id(event_data->'sender'->'subaccountId'); + END IF; + + IF event_data->'recipient'->'address' IS NOT NULL THEN + transfer_record."recipientWalletAddress" = event_data->'recipient'->>'address'; + + recipient_wallet_record."address" = transfer_record."recipientWalletAddress"; + INSERT INTO wallets VALUES (recipient_wallet_record.*) ON CONFLICT DO NOTHING; + END IF; + + IF event_data->'sender'->'address' IS NOT NULL THEN + transfer_record."senderWalletAddress" = event_data->'sender'->>'address'; + + sender_wallet_record."address" = transfer_record."senderWalletAddress"; + INSERT INTO wallets VALUES (sender_wallet_record.*) ON CONFLICT DO NOTHING; + END IF; + + transfer_record."assetId" = event_data->>'assetId'; + transfer_record."size" = dydx_trim_scale(dydx_from_jsonlib_long(event_data->'amount') * power(10, asset_record."atomicResolution")::numeric); + transfer_record."eventId" = dydx_event_id_from_parts(block_height, transaction_index, event_index); + transfer_record."transactionHash" = transaction_hash; + transfer_record."createdAt" = block_time; + transfer_record."createdAtHeight" = block_height; + transfer_record."id" = dydx_uuid_from_transfer_parts( + transfer_record."eventId", + transfer_record."assetId", + transfer_record."senderSubaccountId", + transfer_record."recipientSubaccountId", + transfer_record."senderWalletAddress", + transfer_record."recipientWalletAddress"); + + BEGIN + INSERT INTO transfers VALUES (transfer_record.*); + EXCEPTION + WHEN check_violation THEN + RAISE EXCEPTION 'Record: %, event: %', transfer_record, event_data; + END; + + RETURN jsonb_build_object( + 'asset', + dydx_to_jsonb(asset_record), + 'transfer', + dydx_to_jsonb(transfer_record) + ); +END; +$$ LANGUAGE plpgsql; \ No newline at end of file diff --git a/indexer/services/ender/src/scripts/dydx_update_clob_pair_handler.sql b/indexer/services/ender/src/scripts/dydx_update_clob_pair_handler.sql new file mode 100644 index 0000000000..3ed74ac0cd --- /dev/null +++ b/indexer/services/ender/src/scripts/dydx_update_clob_pair_handler.sql @@ -0,0 +1,37 @@ +/** + Parameters: + - event_data: The 'data' field of the IndexerTendermintEvent (https://github.com/dydxprotocol/v4-proto/blob/8d35c86/dydxprotocol/indexer/indexer_manager/event.proto#L25) + converted to JSON format. Conversion to JSON is expected to be done by JSON.stringify. + Returns: JSON object containing fields: + - perpetual_market: The updated perpetual market in perpetual-market-model format (https://github.com/dydxprotocol/indexer/blob/cc70982/packages/postgres/src/models/perpetual-market-model.ts). +*/ +CREATE OR REPLACE FUNCTION dydx_update_clob_pair_handler(event_data jsonb) RETURNS jsonb AS $$ +DECLARE + clob_pair_id bigint; + perpetual_market_record perpetual_markets%ROWTYPE; +BEGIN + clob_pair_id = (event_data->'clobPairId')::bigint; + perpetual_market_record."status" = dydx_clob_pair_status_to_market_status(event_data->'status'); + perpetual_market_record."quantumConversionExponent" = (event_data->'quantumConversionExponent')::integer; + perpetual_market_record."subticksPerTick" = (event_data->'subticksPerTick')::integer; + perpetual_market_record."stepBaseQuantums" = dydx_from_jsonlib_long(event_data->'stepBaseQuantums'); + + UPDATE perpetual_markets + SET + "status" = perpetual_market_record."status", + "quantumConversionExponent" = perpetual_market_record."quantumConversionExponent", + "subticksPerTick" = perpetual_market_record."subticksPerTick", + "stepBaseQuantums" = perpetual_market_record."stepBaseQuantums" + WHERE "clobPairId" = clob_pair_id + RETURNING * INTO perpetual_market_record; + + IF NOT FOUND THEN + RAISE EXCEPTION 'Could not find perpetual market with corresponding clobPairId %', event_data; + END IF; + + RETURN jsonb_build_object( + 'perpetual_market', + dydx_to_jsonb(perpetual_market_record) + ); +END; +$$ LANGUAGE plpgsql; \ No newline at end of file diff --git a/indexer/services/ender/src/scripts/dydx_update_perpetual_handler.sql b/indexer/services/ender/src/scripts/dydx_update_perpetual_handler.sql new file mode 100644 index 0000000000..c1df39c358 --- /dev/null +++ b/indexer/services/ender/src/scripts/dydx_update_perpetual_handler.sql @@ -0,0 +1,37 @@ +/** + Parameters: + - event_data: The 'data' field of the IndexerTendermintEvent (https://github.com/dydxprotocol/v4-proto/blob/8d35c86/dydxprotocol/indexer/indexer_manager/event.proto#L25) + converted to JSON format. Conversion to JSON is expected to be done by JSON.stringify. + Returns: JSON object containing fields: + - perpetual_market: The updated perpetual market in perpetual-market-model format (https://github.com/dydxprotocol/indexer/blob/cc70982/packages/postgres/src/models/perpetual-market-model.ts). +*/ +CREATE OR REPLACE FUNCTION dydx_update_perpetual_handler(event_data jsonb) RETURNS jsonb AS $$ +DECLARE + perpetual_market_id bigint; + perpetual_market_record perpetual_markets%ROWTYPE; +BEGIN + perpetual_market_id = (event_data->'id')::bigint; + perpetual_market_record."ticker" = event_data->>'ticker'; + perpetual_market_record."marketId" = (event_data->'marketId')::integer; + perpetual_market_record."atomicResolution" = (event_data->'atomicResolution')::integer; + perpetual_market_record."liquidityTierId" = (event_data->'liquidityTier')::integer; + + UPDATE perpetual_markets + SET + "ticker" = perpetual_market_record."ticker", + "marketId" = perpetual_market_record."marketId", + "atomicResolution" = perpetual_market_record."atomicResolution", + "liquidityTierId" = perpetual_market_record."liquidityTierId" + WHERE "id" = perpetual_market_id + RETURNING * INTO perpetual_market_record; + + IF NOT FOUND THEN + RAISE EXCEPTION 'Could not find perpetual market with corresponding id %', perpetual_market_id; + END IF; + + RETURN jsonb_build_object( + 'perpetual_market', + dydx_to_jsonb(perpetual_market_record) + ); +END; +$$ LANGUAGE plpgsql; \ No newline at end of file diff --git a/indexer/services/ender/src/scripts/dydx_uuid_from_funding_index_update_parts.sql b/indexer/services/ender/src/scripts/dydx_uuid_from_funding_index_update_parts.sql new file mode 100644 index 0000000000..dfcf4455c0 --- /dev/null +++ b/indexer/services/ender/src/scripts/dydx_uuid_from_funding_index_update_parts.sql @@ -0,0 +1,8 @@ +/** + Returns a UUID using the parts of a funding index update. +*/ +CREATE OR REPLACE FUNCTION dydx_uuid_from_funding_index_update_parts(block_height int, event_id bytea, perpetual_id bigint) RETURNS uuid AS $$ +BEGIN + return dydx_uuid(concat(block_height, '-', encode(event_id, 'hex'), '-', perpetual_id)); +END; +$$ LANGUAGE plpgsql IMMUTABLE PARALLEL SAFE; diff --git a/indexer/services/ender/src/scripts/dydx_uuid_from_oracle_price_parts.sql b/indexer/services/ender/src/scripts/dydx_uuid_from_oracle_price_parts.sql new file mode 100644 index 0000000000..52ed712276 --- /dev/null +++ b/indexer/services/ender/src/scripts/dydx_uuid_from_oracle_price_parts.sql @@ -0,0 +1,8 @@ +/** + Returns a UUID using the parts of an OraclePrice (https://github.com/dydxprotocol/v4-chain/blob/755b0b928be793072d19eb3a1608e7a2503f396a/indexer/packages/postgres/src/stores/oracle-price-table.ts#L24). +*/ +CREATE OR REPLACE FUNCTION dydx_uuid_from_oracle_price_parts(market_id int, block_height int) RETURNS uuid AS $$ +BEGIN + return dydx_uuid(concat(market_id, '-', block_height)); +END; +$$ LANGUAGE plpgsql IMMUTABLE PARALLEL SAFE; \ No newline at end of file diff --git a/indexer/services/ender/src/scripts/dydx_uuid_from_transfer_parts.sql b/indexer/services/ender/src/scripts/dydx_uuid_from_transfer_parts.sql new file mode 100644 index 0000000000..16870fed82 --- /dev/null +++ b/indexer/services/ender/src/scripts/dydx_uuid_from_transfer_parts.sql @@ -0,0 +1,34 @@ +/** + Returns a UUID using the parts of a transfer. +*/ +CREATE OR REPLACE FUNCTION dydx_uuid_from_transfer_parts(event_id bytea, asset_id text, sender_subaccount_id uuid, recipient_subaccount_id uuid, sender_wallet_address text, recipient_wallet_address text) RETURNS uuid AS $$ +DECLARE + sender_subaccount_id_or_undefined text; + recipient_subaccount_id_or_undefined text; + sender_wallet_address_or_undefined text; + recipient_wallet_address_or_undefined text; +BEGIN + /** TODO(IND-483): Fix all uuid string substitutions to use Array.join so that we can drop the 'undefined' substitutions below. */ + IF sender_subaccount_id IS NULL THEN + sender_subaccount_id_or_undefined = 'undefined'; + ELSE + sender_subaccount_id_or_undefined = sender_subaccount_id; + END IF; + IF recipient_subaccount_id IS NULL THEN + recipient_subaccount_id_or_undefined = 'undefined'; + ELSE + recipient_subaccount_id_or_undefined = recipient_subaccount_id; + END IF; + IF sender_wallet_address IS NULL THEN + sender_wallet_address_or_undefined = 'undefined'; + ELSE + sender_wallet_address_or_undefined = sender_wallet_address; + END IF; + IF recipient_wallet_address IS NULL THEN + recipient_wallet_address_or_undefined = 'undefined'; + ELSE + recipient_wallet_address_or_undefined = recipient_wallet_address; + END IF; + return dydx_uuid(concat(sender_subaccount_id_or_undefined, '-', recipient_subaccount_id_or_undefined, '-', sender_wallet_address_or_undefined, '-', recipient_wallet_address_or_undefined, '-', encode(event_id, 'hex'), '-', asset_id)); +END; +$$ LANGUAGE plpgsql IMMUTABLE PARALLEL SAFE; diff --git a/indexer/services/vulcan/__tests__/handlers/order-place-handler.test.ts b/indexer/services/vulcan/__tests__/handlers/order-place-handler.test.ts index 2010a3adb7..d05feb2abd 100644 --- a/indexer/services/vulcan/__tests__/handlers/order-place-handler.test.ts +++ b/indexer/services/vulcan/__tests__/handlers/order-place-handler.test.ts @@ -41,6 +41,7 @@ import { CanceledOrdersCache, updateOrder, StatefulOrderUpdatesCache, + CanceledOrderStatus, } from '@dydxprotocol-indexer/redis'; import { @@ -58,10 +59,10 @@ import Long from 'long'; import { convertToRedisOrder, getTriggerPrice } from '../../src/handlers/helpers'; import { redisClient, redisClient as client } from '../../src/helpers/redis/redis-controller'; import { onMessage } from '../../src/lib/on-message'; -import { expectCanceledOrdersCacheEmpty, expectOpenOrderIds, handleInitialOrderPlace } from '../helpers/helpers'; +import { expectCanceledOrderStatus, expectOpenOrderIds, handleInitialOrderPlace } from '../helpers/helpers'; import { expectOffchainUpdateMessage, expectWebsocketOrderbookMessage, expectWebsocketSubaccountMessage } from '../helpers/websocket-helpers'; import { OrderbookSide } from '../../src/lib/types'; -import { getOrderIdHash } from '@dydxprotocol-indexer/v4-proto-parser'; +import { getOrderIdHash, isStatefulOrder } from '@dydxprotocol-indexer/v4-proto-parser'; jest.mock('@dydxprotocol-indexer/base', () => ({ ...jest.requireActual('@dydxprotocol-indexer/base'), @@ -178,7 +179,7 @@ describe('order-place-handler', () => { ]); jest.spyOn(stats, 'timing'); jest.spyOn(OrderbookLevelsCache, 'updatePriceLevel'); - jest.spyOn(CanceledOrdersCache, 'removeOrderFromCache'); + jest.spyOn(CanceledOrdersCache, 'removeOrderFromCaches'); jest.spyOn(stats, 'increment'); jest.spyOn(redisPackage, 'placeOrder'); jest.spyOn(logger, 'error'); @@ -381,9 +382,9 @@ describe('order-place-handler', () => { ); expect(OrderbookLevelsCache.updatePriceLevel).not.toHaveBeenCalled(); if (hasCanceledOrderId) { - expect(CanceledOrdersCache.removeOrderFromCache).toHaveBeenCalled(); + expect(CanceledOrdersCache.removeOrderFromCaches).toHaveBeenCalled(); } - await expectCanceledOrdersCacheEmpty(expectedOrderUuid); + await expectCanceledOrderStatus(expectedOrderUuid, CanceledOrderStatus.NOT_CANCELED); expect(logger.error).not.toHaveBeenCalled(); expectWebsocketMessagesSent( @@ -748,21 +749,18 @@ describe('order-place-handler', () => { 'good-til-block-time', redisTestConstants.defaultOrderGoodTilBlockTime, redisTestConstants.defaultRedisOrderGoodTilBlockTime, - redisTestConstants.defaultOrderUuidGoodTilBlockTime, dbOrderGoodTilBlockTime, ], [ 'conditional', redisTestConstants.defaultConditionalOrder, redisTestConstants.defaultRedisOrderConditional, - redisTestConstants.defaultOrderUuidConditional, dbConditionalOrder, ], ])('handles order place with OPEN placement status, exists initially (with %s)', async ( _name: string, orderToPlace: IndexerOrder, expectedRedisOrder: RedisOrder, - expectedOrderUuid: string, placedOrder: OrderFromDatabase, ) => { synchronizeWrapBackgroundTask(wrapBackgroundTask); @@ -812,6 +810,39 @@ describe('order-place-handler', () => { expectStats(); }); + it('handles unplaced and unreplaced order place with BEST_EFFORT_OPENED placement status', async () => { + synchronizeWrapBackgroundTask(wrapBackgroundTask); + const producerSendSpy: jest.SpyInstance = jest.spyOn(producer, 'send').mockReturnThis(); + // Handle the order place event for the initial order with BEST_EFFORT_OPENED + await handleInitialOrderPlace(redisTestConstants.orderPlace); + expectWebsocketMessagesSent( + producerSendSpy, + redisTestConstants.defaultRedisOrder, + dbDefaultOrder, + testConstants.defaultPerpetualMarket, + APIOrderStatusEnum.BEST_EFFORT_OPENED, + true, + ); + expectStats(); + // clear mocks + jest.clearAllMocks(); + + // Handle the order place with OPEN placement status + await handleInitialOrderPlace(redisTestConstants.orderPlace); + expectWebsocketMessagesSent( + producerSendSpy, + redisTestConstants.defaultRedisOrder, + dbDefaultOrder, + testConstants.defaultPerpetualMarket, + APIOrderStatusEnum.BEST_EFFORT_OPENED, + // Subaccount messages should be sent for stateful order with OPEN status + false, + ); + + expect(logger.error).not.toHaveBeenCalled(); + expectStats(); + }); + it.each([ [ 'missing order', @@ -1047,6 +1078,7 @@ function expectWebsocketMessagesSent( const orderTIF: TimeInForce = protocolTranslations.protocolOrderTIFToTIF( redisOrder.order!.timeInForce, ); + const isStateful: boolean = isStatefulOrder(redisOrder.order!.orderId!.orderFlags); const contents: SubaccountMessageContents = { orders: [ { @@ -1073,7 +1105,9 @@ function expectWebsocketMessagesSent( ?.toString(), goodTilBlockTime: protocolTranslations.getGoodTilBlockTime(redisOrder.order!), ticker: redisOrder.ticker, - ...(dbOrder.createdAtHeight && { createdAtHeight: dbOrder.createdAtHeight }), + ...(isStateful && { createdAtHeight: dbOrder.createdAtHeight }), + ...(isStateful && { updatedAt: dbOrder.updatedAt }), + ...(isStateful && { updatedAtHeight: dbOrder.updatedAtHeight }), clientMetadata: redisOrder.order!.clientMetadata.toString(), triggerPrice: getTriggerPrice(redisOrder.order!, perpetualMarket), }, diff --git a/indexer/services/vulcan/__tests__/handlers/order-remove-handler.test.ts b/indexer/services/vulcan/__tests__/handlers/order-remove-handler.test.ts index 095e0901e1..7d62beac72 100644 --- a/indexer/services/vulcan/__tests__/handlers/order-remove-handler.test.ts +++ b/indexer/services/vulcan/__tests__/handlers/order-remove-handler.test.ts @@ -37,8 +37,10 @@ import { placeOrder, redis, redisTestConstants, + StateFilledQuantumsCache, SubaccountOrderIdsCache, updateOrder, + CanceledOrderStatus, } from '@dydxprotocol-indexer/redis'; import { OffChainUpdateV1, @@ -58,8 +60,7 @@ import { OrderRemoveHandler } from '../../src/handlers/order-remove-handler'; import { OrderbookSide } from '../../src/lib/types'; import { redisClient } from '../../src/helpers/redis/redis-controller'; import { - expectCanceledOrdersCacheEmpty, - expectCanceledOrdersCacheFound, + expectCanceledOrderStatus, expectOpenOrderIds, expectOrderbookLevelCache, handleOrderUpdate, @@ -308,7 +309,7 @@ describe('OrderRemoveHandler', () => { expectSubaccountsOrderIdsCacheEmpty(redisTestConstants.defaultSubaccountUuid), // Check order is removed from open orders cache expectOpenOrderIds(testConstants.defaultPerpetualMarket.clobPairId, []), - expectCanceledOrdersCacheFound(expectedOrderUuid), + expectCanceledOrderStatus(expectedOrderUuid, CanceledOrderStatus.CANCELED), ]); // Subaccounts message is sent first followed by orderbooks message @@ -340,6 +341,9 @@ describe('OrderRemoveHandler', () => { goodTilBlockTime: protocolTranslations.getGoodTilBlockTime(removedRedisOrder.order!), ticker: redisTestConstants.defaultRedisOrder.ticker, removalReason: OrderRemovalReason[defaultOrderRemove.reason], + createdAtHeight: removedOrder.createdAtHeight, + updatedAt: removedOrder.updatedAt, + updatedAtHeight: removedOrder.updatedAtHeight, clientMetadata: removedRedisOrder.order!.clientMetadata.toString(), triggerPrice, }, @@ -442,6 +446,7 @@ describe('OrderRemoveHandler', () => { expectOrdersCacheEmpty(expectedOrderUuid), expectOrdersDataCacheEmpty(removedOrderId), expectSubaccountsOrderIdsCacheEmpty(redisTestConstants.defaultSubaccountUuid), + expectCanceledOrderStatus(expectedOrderUuid, CanceledOrderStatus.BEST_EFFORT_CANCELED), ]); // Subaccounts message is sent first followed by orderbooks message @@ -473,6 +478,9 @@ describe('OrderRemoveHandler', () => { goodTilBlockTime: protocolTranslations.getGoodTilBlockTime(removedRedisOrder.order!), ticker: redisTestConstants.defaultRedisOrder.ticker, removalReason: OrderRemovalReason[defaultOrderRemove.reason], + createdAtHeight: removedOrder.createdAtHeight, + updatedAt: removedOrder.updatedAt, + updatedAtHeight: removedOrder.updatedAtHeight, clientMetadata: removedRedisOrder.order!.clientMetadata.toString(), triggerPrice, }, @@ -577,6 +585,7 @@ describe('OrderRemoveHandler', () => { expectOrdersCacheEmpty(expectedOrderUuid), expectOrdersDataCacheEmpty(removedOrderId), expectSubaccountsOrderIdsCacheEmpty(redisTestConstants.defaultSubaccountUuid), + expectCanceledOrderStatus(expectedOrderUuid, CanceledOrderStatus.CANCELED), ]); // Subaccounts message is sent first followed by orderbooks message @@ -607,6 +616,9 @@ describe('OrderRemoveHandler', () => { goodTilBlockTime: protocolTranslations.getGoodTilBlockTime(removedRedisOrder.order!), ticker: redisTestConstants.defaultRedisOrder.ticker, removalReason: OrderRemovalReason[defaultOrderRemove.reason], + createdAtHeight: removedOrder.createdAtHeight, + updatedAt: removedOrder.updatedAt, + updatedAtHeight: removedOrder.updatedAtHeight, clientMetadata: removedRedisOrder.order!.clientMetadata.toString(), triggerPrice, }], @@ -711,6 +723,7 @@ describe('OrderRemoveHandler', () => { expectOrdersCacheEmpty(expectedOrderUuid), expectOrdersDataCacheEmpty(removedOrderId), expectSubaccountsOrderIdsCacheEmpty(redisTestConstants.defaultSubaccountUuid), + expectCanceledOrderStatus(expectedOrderUuid, CanceledOrderStatus.CANCELED), ]); // Subaccounts message is sent first followed by orderbooks message @@ -742,6 +755,9 @@ describe('OrderRemoveHandler', () => { goodTilBlockTime: protocolTranslations.getGoodTilBlockTime(removedRedisOrder.order!), ticker: redisTestConstants.defaultRedisOrder.ticker, removalReason: OrderRemovalReason[defaultOrderRemove.reason], + createdAtHeight: removedOrder.createdAtHeight, + updatedAt: removedOrder.updatedAt, + updatedAtHeight: removedOrder.updatedAtHeight, clientMetadata: removedRedisOrder.order!.clientMetadata.toString(), triggerPrice, }], @@ -766,27 +782,36 @@ describe('OrderRemoveHandler', () => { [ 'goodTilBlock', redisTestConstants.defaultOrderId, - testConstants.defaultOrder, + { + ...testConstants.defaultOrder, + status: OrderStatus.FILLED, + }, redisTestConstants.defaultRedisOrder, redisTestConstants.defaultOrderUuid, ], [ 'goodTilBlockTime', redisTestConstants.defaultOrderIdGoodTilBlockTime, - testConstants.defaultOrderGoodTilBlockTime, + { + ...testConstants.defaultOrderGoodTilBlockTime, + status: OrderStatus.FILLED, + }, redisTestConstants.defaultRedisOrderGoodTilBlockTime, redisTestConstants.defaultOrderUuidGoodTilBlockTime, ], [ 'conditional', redisTestConstants.defaultOrderIdConditional, - testConstants.defaultConditionalOrder, + { + ...testConstants.defaultConditionalOrder, + status: OrderStatus.FILLED, + }, redisTestConstants.defaultRedisOrderConditional, redisTestConstants.defaultOrderUuidConditional, ], ])( - 'does not send subaccount message for fully-filled orders for best effort user cancel ' + - '(with %s)', + 'does not send subaccount message for orders fully-filled in state for best effort ' + + 'user cancel (with %s)', async ( _name: string, removedOrderId: IndexerOrderId, @@ -810,6 +835,11 @@ describe('OrderRemoveHandler', () => { sizeDeltaInQuantums: defaultQuantums.toString(), client: redisClient, }), + StateFilledQuantumsCache.updateStateFilledQuantums( + expectedOrderUuid, + removedRedisOrder.order!.quantums.toString(), + redisClient, + ), ]); const fullyFilledUpdate: redisTestConstants.OffChainUpdateOrderUpdateUpdateMessage = { @@ -835,7 +865,7 @@ describe('OrderRemoveHandler', () => { await orderRemoveHandler.handleUpdate(offChainUpdate); await Promise.all([ - expectOrderStatus(expectedOrderUuid, OrderStatus.BEST_EFFORT_CANCELED), + expectOrderStatus(expectedOrderUuid, removedOrder.status), // orderbook should not be affected, so it will be set to defaultQuantums expectOrderbookLevelCache( removedRedisOrder.ticker, @@ -846,12 +876,13 @@ describe('OrderRemoveHandler', () => { expectOrdersCacheEmpty(expectedOrderUuid), expectOrdersDataCacheEmpty(removedOrderId), expectSubaccountsOrderIdsCacheEmpty(redisTestConstants.defaultSubaccountUuid), + expectCanceledOrderStatus(expectedOrderUuid, CanceledOrderStatus.BEST_EFFORT_CANCELED), ]); // no orderbook message because no change in orderbook levels expectNoWebsocketMessagesSent(producerSendSpy); expect(logger.error).not.toHaveBeenCalled(); - expectTimingStats(true, true); + expectTimingStats(true, false); }, ); @@ -928,6 +959,7 @@ describe('OrderRemoveHandler', () => { expectOrdersCacheEmpty(expectedOrderUuid), expectOrdersDataCacheEmpty(removedOrderId), expectSubaccountsOrderIdsCacheEmpty(redisTestConstants.defaultSubaccountUuid), + expectCanceledOrderStatus(expectedOrderUuid, CanceledOrderStatus.NOT_CANCELED), ]); // no orderbook message because no change in orderbook levels @@ -1021,6 +1053,9 @@ describe('OrderRemoveHandler', () => { goodTilBlockTime: removedOrder.goodTilBlockTime, ticker: removedRedisOrder.ticker, removalReason: OrderRemovalReason[statefulCancelationOrderRemove.reason], + createdAtHeight: removedOrder.createdAtHeight, + updatedAt: removedOrder.updatedAt, + updatedAtHeight: removedOrder.updatedAtHeight, clientMetadata: removedOrder.clientMetadata.toString(), triggerPrice, }], @@ -1107,7 +1142,7 @@ describe('OrderRemoveHandler', () => { expectOrdersCacheEmpty(expectedOrderUuid), expectOrdersDataCacheEmpty(removedOrderId), expectSubaccountsOrderIdsCacheEmpty(redisTestConstants.defaultSubaccountUuid), - expectCanceledOrdersCacheEmpty(expectedOrderUuid), + expectCanceledOrderStatus(expectedOrderUuid, CanceledOrderStatus.NOT_CANCELED), ]); // Subaccounts message is sent first followed by orderbooks message @@ -1132,6 +1167,9 @@ describe('OrderRemoveHandler', () => { goodTilBlockTime: removedOrder.goodTilBlockTime, ticker: removedRedisOrder.ticker, removalReason: OrderRemovalReason[statefulCancelationOrderRemove.reason], + createdAtHeight: removedOrder.createdAtHeight, + updatedAt: removedOrder.updatedAt, + updatedAtHeight: removedOrder.updatedAtHeight, clientMetadata: removedOrder.clientMetadata.toString(), triggerPrice, }], @@ -1226,6 +1264,7 @@ describe('OrderRemoveHandler', () => { expectOrdersCacheEmpty(expectedOrderUuid), expectOrdersDataCacheEmpty(removedOrderId), expectSubaccountsOrderIdsCacheEmpty(redisTestConstants.defaultSubaccountUuid), + expectCanceledOrderStatus(expectedOrderUuid, CanceledOrderStatus.NOT_CANCELED), ]); // Subaccounts message is sent first followed by orderbooks message @@ -1250,6 +1289,9 @@ describe('OrderRemoveHandler', () => { goodTilBlockTime: removedOrder.goodTilBlockTime, ticker: removedRedisOrder.ticker, removalReason: OrderRemovalReason[statefulCancelationOrderRemove.reason], + createdAtHeight: removedOrder.createdAtHeight, + updatedAt: removedOrder.updatedAt, + updatedAtHeight: removedOrder.updatedAtHeight, clientMetadata: removedOrder.clientMetadata.toString(), triggerPrice, }], @@ -1360,6 +1402,7 @@ describe('OrderRemoveHandler', () => { expectOrdersCacheEmpty(expectedOrderUuid), expectOrdersDataCacheEmpty(removedOrderId), expectSubaccountsOrderIdsCacheEmpty(redisTestConstants.defaultSubaccountUuid), + expectCanceledOrderStatus(expectedOrderUuid, CanceledOrderStatus.NOT_CANCELED), ]); // Subaccounts message is sent first followed by orderbooks message @@ -1384,6 +1427,9 @@ describe('OrderRemoveHandler', () => { goodTilBlockTime: removedOrder.goodTilBlockTime, ticker: removedRedisOrder.ticker, removalReason: OrderRemovalReason[statefulCancelationOrderRemove.reason], + createdAtHeight: removedOrder.createdAtHeight, + updatedAt: removedOrder.updatedAt, + updatedAtHeight: removedOrder.updatedAtHeight, clientMetadata: removedOrder.clientMetadata.toString(), triggerPrice, }], @@ -1485,6 +1531,7 @@ describe('OrderRemoveHandler', () => { expectOrdersCacheEmpty(expectedOrderUuid), expectOrdersDataCacheEmpty(removedOrderId), expectSubaccountsOrderIdsCacheEmpty(redisTestConstants.defaultSubaccountUuid), + expectCanceledOrderStatus(expectedOrderUuid, CanceledOrderStatus.CANCELED), ]); // Subaccounts message is sent first followed by orderbooks message @@ -1514,6 +1561,9 @@ describe('OrderRemoveHandler', () => { goodTilBlockTime: protocolTranslations.getGoodTilBlockTime(removedRedisOrder.order!), ticker: redisTestConstants.defaultRedisOrder.ticker, removalReason: OrderRemovalReason[indexerExpiredOrderRemoved.reason], + createdAtHeight: removedOrder.createdAtHeight, + updatedAt: removedOrder.updatedAt, + updatedAtHeight: removedOrder.updatedAtHeight, clientMetadata: testConstants.defaultOrderGoodTilBlockTime.clientMetadata.toString(), }, ], @@ -1545,7 +1595,10 @@ describe('OrderRemoveHandler', () => { it('successfully removes fully filled expired order and does not send websocket message', async () => { const removedOrderId: IndexerOrderId = redisTestConstants.defaultOrderId; - const removedOrder: OrderCreateObject = indexerExpiredDefaultOrder; + const removedOrder: OrderCreateObject = { + ...indexerExpiredDefaultOrder, + status: OrderStatus.FILLED, + }; const removedRedisOrder: RedisOrder = redisTestConstants.defaultRedisOrder; const expectedOrderUuid: string = redisTestConstants.defaultOrderUuid; @@ -1566,6 +1619,11 @@ describe('OrderRemoveHandler', () => { sizeDeltaInQuantums: orderbookLevel, client: redisClient, }), + StateFilledQuantumsCache.updateStateFilledQuantums( + expectedOrderUuid, + removedRedisOrder.order!.quantums.toString(), + redisClient, + ), ]); await Promise.all([ @@ -1592,7 +1650,7 @@ describe('OrderRemoveHandler', () => { orderbookLevel, ).toString(); await Promise.all([ - expectOrderStatus(expectedOrderUuid, OrderStatus.CANCELED), + expectOrderStatus(expectedOrderUuid, removedOrder.status), expectOrderbookLevelCache( removedRedisOrder.ticker, OrderSide.BUY, @@ -1602,9 +1660,10 @@ describe('OrderRemoveHandler', () => { expectOrdersCacheEmpty(expectedOrderUuid), expectOrdersDataCacheEmpty(removedOrderId), expectSubaccountsOrderIdsCacheEmpty(redisTestConstants.defaultSubaccountUuid), + expectCanceledOrderStatus(expectedOrderUuid, CanceledOrderStatus.CANCELED), ]); expectNoWebsocketMessagesSent(producerSendSpy); - expectTimingStats(true, true); + expectTimingStats(true, false); }); it('error: when latest block not found, log and exit', async () => { @@ -1663,6 +1722,7 @@ describe('OrderRemoveHandler', () => { expectOrdersCacheFound(expectedOrderUuid), expectOrdersDataCacheFound(removedOrderId), expectSubaccountsOrderIdsCacheFound(redisTestConstants.defaultSubaccountUuid), + expectCanceledOrderStatus(expectedOrderUuid, CanceledOrderStatus.NOT_CANCELED), ]); expectTimingStats(false, false, false, false, true); @@ -1725,6 +1785,7 @@ describe('OrderRemoveHandler', () => { expectOrdersCacheFound(expectedOrderUuid), expectOrdersDataCacheFound(removedOrderId), expectSubaccountsOrderIdsCacheFound(redisTestConstants.defaultSubaccountUuid), + expectCanceledOrderStatus(expectedOrderUuid, CanceledOrderStatus.NOT_CANCELED), ]); expectTimingStats(false, false, false, false, true, true); @@ -1804,6 +1865,7 @@ describe('OrderRemoveHandler', () => { expectOrdersCacheFound(expectedOrderUuid), expectOrdersDataCacheFound(removedOrderId), expectSubaccountsOrderIdsCacheFound(redisTestConstants.defaultSubaccountUuid), + expectCanceledOrderStatus(expectedOrderUuid, CanceledOrderStatus.NOT_CANCELED), ]); expectTimingStats(false, false, false, false, true, true); @@ -1860,6 +1922,7 @@ describe('OrderRemoveHandler', () => { expectOrdersCacheFound(expectedOrderUuid), expectOrdersDataCacheFound(removedOrderId), expectSubaccountsOrderIdsCacheFound(redisTestConstants.defaultSubaccountUuid), + expectCanceledOrderStatus(expectedOrderUuid, CanceledOrderStatus.NOT_CANCELED), ]); expectTimingStats(false, false, false, false, true, true); diff --git a/indexer/services/vulcan/__tests__/helpers/helpers.ts b/indexer/services/vulcan/__tests__/helpers/helpers.ts index f0b74080fb..370949bdf0 100644 --- a/indexer/services/vulcan/__tests__/helpers/helpers.ts +++ b/indexer/services/vulcan/__tests__/helpers/helpers.ts @@ -5,6 +5,7 @@ import { redisTestConstants, OrderbookLevelsCache, CanceledOrdersCache, + CanceledOrderStatus, } from '@dydxprotocol-indexer/redis'; import { OffChainUpdateV1 } from '@dydxprotocol-indexer/v4-protos'; import { KafkaMessage } from 'kafkajs'; @@ -80,16 +81,11 @@ export function setTransactionHash( return messageWithTxhash; } -export async function expectCanceledOrdersCacheFound( +export async function expectCanceledOrderStatus( orderId: string, -): Promise { - const orderExists: boolean = await CanceledOrdersCache.isOrderCanceled(orderId, redisClient); - expect(orderExists).toEqual(true); -} - -export async function expectCanceledOrdersCacheEmpty( - orderId: string, -): Promise { - const orderExists: boolean = await CanceledOrdersCache.isOrderCanceled(orderId, redisClient); - expect(orderExists).toEqual(false); + canceledOrderStatus: CanceledOrderStatus, +) { + expect(await CanceledOrdersCache.getOrderCanceledStatus(orderId, redisClient)).toEqual( + canceledOrderStatus, + ); } diff --git a/indexer/services/vulcan/src/handlers/helpers.ts b/indexer/services/vulcan/src/handlers/helpers.ts index 13639535fb..156b79119a 100644 --- a/indexer/services/vulcan/src/handlers/helpers.ts +++ b/indexer/services/vulcan/src/handlers/helpers.ts @@ -4,6 +4,7 @@ import { protocolTranslations, } from '@dydxprotocol-indexer/postgres'; import { subticksToPrice } from '@dydxprotocol-indexer/postgres/build/src/lib/protocol-translations'; +import { StateFilledQuantumsCache } from '@dydxprotocol-indexer/redis'; import { IndexerOrder, IndexerOrder_ConditionType, @@ -11,7 +12,9 @@ import { RedisOrder, RedisOrder_TickerType, } from '@dydxprotocol-indexer/v4-protos'; +import Big from 'big.js'; +import { redisClient } from '../helpers/redis/redis-controller'; import { OrderbookSide } from '../lib/types'; /** @@ -65,3 +68,26 @@ export function orderSideToOrderbookSide( ): OrderbookSide { return orderSide === IndexerOrder_Side.SIDE_BUY ? OrderbookSide.BIDS : OrderbookSide.ASKS; } + +/** + * Gets the remaining quantums for an order based on the filled amount of the order in state + * @param order + * @returns + */ +export async function getStateRemainingQuantums( + order: RedisOrder, +): Promise { + const orderQuantums: Big = Big(order.order!.quantums.toString()); + const stateFilledQuantums: Big = convertToBig( + await StateFilledQuantumsCache.getStateFilledQuantums(order.id, redisClient), + ); + return orderQuantums.minus(stateFilledQuantums); +} + +function convertToBig(value: string | undefined) { + if (value === undefined) { + return Big(0); + } else { + return Big(value); + } +} diff --git a/indexer/services/vulcan/src/handlers/order-place-handler.ts b/indexer/services/vulcan/src/handlers/order-place-handler.ts index 6f9ba098a2..699bf933fb 100644 --- a/indexer/services/vulcan/src/handlers/order-place-handler.ts +++ b/indexer/services/vulcan/src/handlers/order-place-handler.ts @@ -16,6 +16,7 @@ import { perpetualMarketRefresher, protocolTranslations, OrderFromDatabase, + IsoString, } from '@dydxprotocol-indexer/postgres'; import { OpenOrdersCache, @@ -64,9 +65,10 @@ export class OrderPlaceHandler extends Handler { update, txHash: this.txHash, }); + const orderPlace: OrderPlaceV1 = update.orderPlace!; this.validateOrderPlace(update.orderPlace!); - const order: IndexerOrder = update.orderPlace!.order!; - const placementStatus: OrderPlaceV1_OrderPlacementStatus = update.orderPlace!.placementStatus; + const order: IndexerOrder = orderPlace.order!; + const placementStatus: OrderPlaceV1_OrderPlacementStatus = orderPlace.placementStatus; const perpetualMarket: PerpetualMarketFromDatabase | undefined = perpetualMarketRefresher .getPerpetualMarketFromClobPairId(order.orderId!.clobPairId.toString()); @@ -125,7 +127,7 @@ export class OrderPlaceHandler extends Handler { // TODO(CLOB-597): Remove this logic and log erorrs once best-effort-open is not sent for // stateful orders in the protocol - if (this.shouldSendSubaccountMessage(update.orderPlace!)) { + if (this.shouldSendSubaccountMessage(orderPlace, placeOrderResult, placementStatus)) { // TODO(IND-171): Determine whether we should always be sending a message, even when the cache // isn't updated. // For stateful and conditional orders, look the order up in the db for the createdAtHeight @@ -279,6 +281,8 @@ export class OrderPlaceHandler extends Handler { : APIOrderStatusEnum.BEST_EFFORT_OPENED ); const createdAtHeight: string | undefined = order?.createdAtHeight; + const updatedAt: IsoString | undefined = order?.updatedAt; + const updatedAtHeight: string | undefined = order?.updatedAtHeight; const contents: SubaccountMessageContents = { orders: [ { @@ -304,6 +308,8 @@ export class OrderPlaceHandler extends Handler { goodTilBlockTime: protocolTranslations.getGoodTilBlockTime(redisOrder.order!), ticker: redisOrder.ticker, ...(createdAtHeight && { createdAtHeight }), + ...(updatedAt && { updatedAt }), + ...(updatedAtHeight && { updatedAtHeight }), clientMetadata: redisOrder.order!.clientMetadata.toString(), triggerPrice: getTriggerPrice(redisOrder.order!, perpetualMarket), }, @@ -325,7 +331,11 @@ export class OrderPlaceHandler extends Handler { * @returns TODO(CLOB-597): Remove once best-effort-opened messages are not sent for stateful * orders. */ - protected shouldSendSubaccountMessage(orderPlace: OrderPlaceV1): boolean { + protected shouldSendSubaccountMessage( + orderPlace: OrderPlaceV1, + placeOrderResult: PlaceOrderResult, + placementStatus: OrderPlaceV1_OrderPlacementStatus, + ): boolean { const orderFlags: number = orderPlace.order!.orderId!.orderFlags; const status: OrderPlaceV1_OrderPlacementStatus = orderPlace.placementStatus; // Best-effort-opened status should only be sent for short-term orders @@ -335,6 +345,16 @@ export class OrderPlaceHandler extends Handler { ) { return false; } + + // In the case where a stateful orderPlace is opened with a more recent expiry than an + // existing order on the indexer, then the order will not have been placed or replaced and + // no subaccount message should be sent. + if (placeOrderResult.placed === false && + placeOrderResult.replaced === false && + placementStatus === + OrderPlaceV1_OrderPlacementStatus.ORDER_PLACEMENT_STATUS_BEST_EFFORT_OPENED) { + return false; + } return true; } @@ -349,7 +369,7 @@ export class OrderPlaceHandler extends Handler { orderId: string, ): Promise { await runFuncWithTimingStat( - CanceledOrdersCache.removeOrderFromCache(orderId, redisClient), + CanceledOrdersCache.removeOrderFromCaches(orderId, redisClient), this.generateTimingStatsOptions('remove_order_from_cancel_cache'), ); } diff --git a/indexer/services/vulcan/src/handlers/order-remove-handler.ts b/indexer/services/vulcan/src/handlers/order-remove-handler.ts index 9bcd27dd53..0672b865e5 100644 --- a/indexer/services/vulcan/src/handlers/order-remove-handler.ts +++ b/indexer/services/vulcan/src/handlers/order-remove-handler.ts @@ -13,6 +13,7 @@ import { SubaccountTable, apiTranslations, TimeInForce, + IsoString, } from '@dydxprotocol-indexer/postgres'; import { OpenOrdersCache, @@ -39,7 +40,7 @@ import config from '../config'; import { redisClient } from '../helpers/redis/redis-controller'; import { sendMessageWrapper } from '../lib/send-message-helper'; import { Handler } from './handler'; -import { getTriggerPrice } from './helpers'; +import { getStateRemainingQuantums, getTriggerPrice } from './helpers'; /** * Handler for OrderRemove messages. @@ -260,6 +261,9 @@ export class OrderRemoveHandler extends Handler { return; } + const stateRemainingQuantums: Big = await getStateRemainingQuantums( + removeOrderResult.removedOrder!, + ); const perpetualMarket: PerpetualMarketFromDatabase | undefined = perpetualMarketRefresher .getPerpetualMarketFromTicker(removeOrderResult.removedOrder!.ticker); if (perpetualMarket === undefined) { @@ -271,21 +275,31 @@ export class OrderRemoveHandler extends Handler { return; } - await runFuncWithTimingStat( - this.cancelOrderInPostgres(orderRemove), - this.generateTimingStatsOptions('cancel_order_in_postgres'), - ); + // If the remaining amount of the order in state is <= 0, the order is filled and + // does not need to have it's status updated + let canceledOrder: OrderFromDatabase | undefined; + if (stateRemainingQuantums.gt(0)) { + canceledOrder = await runFuncWithTimingStat( + this.cancelOrderInPostgres(orderRemove), + this.generateTimingStatsOptions('cancel_order_in_postgres'), + ); + } else { + canceledOrder = await runFuncWithTimingStat( + OrderTable.findById(OrderTable.orderIdToUuid(orderRemove.removedOrderId!)), + this.generateTimingStatsOptions('find_order'), + ); + } const subaccountMessage: Message = { value: this.createSubaccountWebsocketMessageFromRemoveOrderResult( removeOrderResult, + canceledOrder, orderRemove, perpetualMarket, ), }; - // TODO(IND-147): Remove this check once fully-filled orders are removed by ender - if (this.shouldSendSubaccountMessage(orderRemove, removeOrderResult)) { + if (this.shouldSendSubaccountMessage(orderRemove, removeOrderResult, stateRemainingQuantums)) { sendMessageWrapper(subaccountMessage, KafkaTopics.TO_WEBSOCKETS_SUBACCOUNTS); } @@ -300,7 +314,7 @@ export class OrderRemoveHandler extends Handler { } // TODO: consolidate remove handler logic into a single lua script. await this.addOrderToCanceledOrdersCache( - OrderTable.orderIdToUuid(orderRemove.removedOrderId!), + orderRemove, Date.now(), ); } @@ -338,13 +352,28 @@ export class OrderRemoveHandler extends Handler { * @protected */ protected async addOrderToCanceledOrdersCache( - orderId: string, + orderRemove: OrderRemoveV1, timestampMs: number, ): Promise { - await runFuncWithTimingStat( - CanceledOrdersCache.addCanceledOrderId(orderId, timestampMs, redisClient), - this.generateTimingStatsOptions('add_order_to_canceled_order_cache'), - ); + const orderId: string = OrderTable.orderIdToUuid(orderRemove.removedOrderId!); + + if ( + orderRemove.removalStatus === + OrderRemoveV1_OrderRemovalStatus.ORDER_REMOVAL_STATUS_BEST_EFFORT_CANCELED + ) { + await runFuncWithTimingStat( + CanceledOrdersCache.addBestEffortCanceledOrderId(orderId, timestampMs, redisClient), + this.generateTimingStatsOptions('add_order_to_canceled_order_cache'), + ); + } else if ( + orderRemove.removalStatus === + OrderRemoveV1_OrderRemovalStatus.ORDER_REMOVAL_STATUS_CANCELED + ) { + await runFuncWithTimingStat( + CanceledOrdersCache.addCanceledOrderId(orderId, timestampMs, redisClient), + this.generateTimingStatsOptions('add_order_to_canceled_order_cache'), + ); + } } /** @@ -484,6 +513,7 @@ export class OrderRemoveHandler extends Handler { protected createSubaccountWebsocketMessageFromRemoveOrderResult( removeOrderResult: RemoveOrderResult, + canceledOrder: OrderFromDatabase | undefined, orderRemove: OrderRemoveV1, perpetualMarket: PerpetualMarketFromDatabase, ): Buffer { @@ -491,6 +521,9 @@ export class OrderRemoveHandler extends Handler { const orderTIF: TimeInForce = protocolTranslations.protocolOrderTIFToTIF( redisOrder.order!.timeInForce, ); + const createdAtHeight: string | undefined = canceledOrder?.createdAtHeight; + const updatedAt: IsoString | undefined = canceledOrder?.updatedAt; + const updatedAtHeight: string | undefined = canceledOrder?.updatedAtHeight; const contents: SubaccountMessageContents = { orders: [ { @@ -520,6 +553,9 @@ export class OrderRemoveHandler extends Handler { goodTilBlockTime: protocolTranslations.getGoodTilBlockTime(redisOrder.order!), ticker: redisOrder.ticker, removalReason: OrderRemovalReason[orderRemove.reason], + ...(createdAtHeight && { createdAtHeight }), + ...(updatedAt && { updatedAt }), + ...(updatedAtHeight && { updatedAtHeight }), clientMetadata: redisOrder.order!.clientMetadata.toString(), triggerPrice: getTriggerPrice(redisOrder.order!, perpetualMarket), }, @@ -563,6 +599,9 @@ export class OrderRemoveHandler extends Handler { goodTilBlockTime: order.goodTilBlockTime ?? undefined, ticker: orderTicker, removalReason: OrderRemovalReason[orderRemove.reason], + createdAtHeight: order.createdAtHeight, + updatedAt: order.updatedAt, + updatedAtHeight: order.updatedAtHeight, clientMetadata: order.clientMetadata, triggerPrice: order.triggerPrice ?? undefined, }, @@ -601,21 +640,26 @@ export class OrderRemoveHandler extends Handler { protected shouldSendSubaccountMessage( orderRemove: OrderRemoveV1, removeOrderResult: RemoveOrderResult, + stateRemainingQuantums: Big, ): boolean { - const remainingQuantums: Big = Big(this.getSizeDeltaInQuantums( - removeOrderResult, - removeOrderResult.removedOrder!, - )); const status: OrderRemoveV1_OrderRemovalStatus = orderRemove.removalStatus; const reason: OrderRemovalReason = orderRemove.reason; + + logger.info({ + at: 'orderRemoveHandler#shouldSendSubaccountMessage', + message: 'Compared state filled quantums and size', + stateRemainingQuantums: stateRemainingQuantums.toFixed(), + removeOrderResult, + }); + if ( - remainingQuantums.eq(0) && + stateRemainingQuantums.lte(0) && status === OrderRemoveV1_OrderRemovalStatus.ORDER_REMOVAL_STATUS_BEST_EFFORT_CANCELED && reason === OrderRemovalReason.ORDER_REMOVAL_REASON_USER_CANCELED ) { return false; } else if ( - remainingQuantums.eq(0) && + stateRemainingQuantums.lte(0) && status === OrderRemoveV1_OrderRemovalStatus.ORDER_REMOVAL_STATUS_CANCELED && reason === OrderRemovalReason.ORDER_REMOVAL_REASON_INDEXER_EXPIRED ) {