diff --git a/indexer/packages/postgres/src/models/fill-model.ts b/indexer/packages/postgres/src/models/fill-model.ts index c4fa9d2815..30927a6d57 100644 --- a/indexer/packages/postgres/src/models/fill-model.ts +++ b/indexer/packages/postgres/src/models/fill-model.ts @@ -84,7 +84,7 @@ export default class FillModel extends Model { transactionHash: { type: 'string' }, createdAt: { type: 'string', format: 'date-time' }, createdAtHeight: { type: 'string', pattern: IntegerPattern }, - clientMetadata: { type: 'string', pattern: IntegerPattern }, + clientMetadata: { type: ['string', 'null'], pattern: IntegerPattern }, fee: { type: 'string', pattern: NumericPattern }, }, }; diff --git a/indexer/services/ender/__tests__/handlers/order-fills/liquidation-handler.test.ts b/indexer/services/ender/__tests__/handlers/order-fills/liquidation-handler.test.ts index a88f11efdc..ca746bcac0 100644 --- a/indexer/services/ender/__tests__/handlers/order-fills/liquidation-handler.test.ts +++ b/indexer/services/ender/__tests__/handlers/order-fills/liquidation-handler.test.ts @@ -73,6 +73,7 @@ import { LiquidationHandler } from '../../../src/handlers/order-fills/liquidatio import { clearCandlesMap } from '../../../src/caches/candle-cache'; import Long from 'long'; import { createPostgresFunctions } from '../../../src/helpers/postgres/postgres-functions'; +import config from '../../../src/config'; const defaultClobPairId: string = testConstants.defaultPerpetualMarket.clobPairId; const defaultMakerFeeQuantum: number = 1_000_000; @@ -203,18 +204,32 @@ describe('LiquidationHandler', () => { it.each([ [ - 'goodTilBlock', + 'goodTilBlock via knex', { goodTilBlock: 10, - goodTilBlockTime: undefined, }, + false, ], [ - 'goodTilBlockTime', + 'goodTilBlock via SQL function', + { + goodTilBlock: 10, + }, + true, + ], + [ + 'goodTilBlockTime via knex', + { + goodTilBlockTime: 1_000_000_000, + }, + false, + ], + [ + 'goodTilBlockTime via SQL function', { - goodTilBlock: undefined, goodTilBlockTime: 1_000_000_000, }, + true, ], ])( 'creates fills and orders (with %s), sends vulcan message for maker order update and updates ' + @@ -222,7 +237,9 @@ describe('LiquidationHandler', () => { async ( _name: string, goodTilOneof: Partial, + useSqlFunction: boolean, ) => { + config.USE_LIQUIDATION_HANDLER_SQL_FUNCTION = useSqlFunction; const transactionIndex: number = 0; const eventIndex: number = 0; const makerQuantums: number = 10_000_000; @@ -410,23 +427,45 @@ describe('LiquidationHandler', () => { expectCandlesUpdated(), ]); - expectTimingStats(); + if (!useSqlFunction) { + expectTimingStats(); + } }); it.each([ [ - 'goodTilBlock', + 'goodTilBlock via knex', { goodTilBlock: 10, }, + false, '5', undefined, ], [ - 'goodTilBlockTime', + 'goodTilBlock via SQL function', + { + goodTilBlock: 10, + }, + true, + '5', + undefined, + ], + [ + 'goodTilBlockTime via knex', { goodTilBlockTime: 1_000_000, }, + false, + undefined, + '1970-01-11T13:46:40.000Z', + ], + [ + 'goodTilBlockTime via SQL function', + { + goodTilBlockTime: 1_000_000, + }, + true, undefined, '1970-01-11T13:46:40.000Z', ], @@ -436,10 +475,13 @@ describe('LiquidationHandler', () => { async ( _name: string, goodTilOneof: Partial, + useSqlFunction: boolean, existingGoodTilBlock?: string, existingGoodTilBlockTime?: string, ) => { - // create initial orders + config.USE_LIQUIDATION_HANDLER_SQL_FUNCTION = useSqlFunction; + + // create initial orders const existingMakerOrder: OrderCreateObject = { subaccountId: testConstants.defaultSubaccountId, clientId: '0', @@ -456,7 +498,7 @@ describe('LiquidationHandler', () => { goodTilBlock: existingGoodTilBlock, goodTilBlockTime: existingGoodTilBlockTime, clientMetadata: '0', - updatedAt: defaultDateTime.toISO(), + updatedAt: DateTime.fromMillis(0).toISO(), updatedAtHeight: '0', }; @@ -626,150 +668,167 @@ describe('LiquidationHandler', () => { expectCandlesUpdated(), ]); - expectTimingStats(); + if (!useSqlFunction) { + expectTimingStats(); + } }); - it('creates fills and orders with fixed-point notation quoteAmount', async () => { - const transactionIndex: number = 0; - const eventIndex: number = 0; - const makerQuantums: number = 100; - const makerSubticks: number = 1_000_000; + it.each([ + [ + 'via knex', + false, + ], + [ + 'via SQL function', + true, + ], + ])( + 'creates fills and orders (%s) with fixed-point notation quoteAmount', + async ( + _name: string, + useSqlFunction: boolean, + ) => { + config.USE_LIQUIDATION_HANDLER_SQL_FUNCTION = useSqlFunction; + const transactionIndex: number = 0; + const eventIndex: number = 0; + const makerQuantums: number = 100; + const makerSubticks: number = 1_000_000; - const makerOrderProto: IndexerOrder = createOrder({ - subaccountId: defaultSubaccountId, - clientId: 0, - side: IndexerOrder_Side.SIDE_BUY, - quantums: makerQuantums, - subticks: makerSubticks, - goodTilOneof: { goodTilBlock: 10 }, - clobPairId: defaultClobPairId, - orderFlags: ORDER_FLAG_SHORT_TERM.toString(), - timeInForce: IndexerOrder_TimeInForce.TIME_IN_FORCE_UNSPECIFIED, - reduceOnly: false, - clientMetadata: 0, - }); + const makerOrderProto: IndexerOrder = createOrder({ + subaccountId: defaultSubaccountId, + clientId: 0, + side: IndexerOrder_Side.SIDE_BUY, + quantums: makerQuantums, + subticks: makerSubticks, + goodTilOneof: { goodTilBlock: 10 }, + clobPairId: defaultClobPairId, + orderFlags: ORDER_FLAG_SHORT_TERM.toString(), + timeInForce: IndexerOrder_TimeInForce.TIME_IN_FORCE_UNSPECIFIED, + reduceOnly: false, + clientMetadata: 0, + }); - const takerSubticks: number = 150_000; - const takerQuantums: number = 10; - const liquidationOrder: LiquidationOrderV1 = createLiquidationOrder({ - subaccountId: defaultSubaccountId2, - clobPairId: defaultClobPairId, - perpetualId: defaultPerpetualPosition.perpetualId, - quantums: takerQuantums, - isBuy: false, - subticks: takerSubticks, - }); + const takerSubticks: number = 150_000; + const takerQuantums: number = 10; + const liquidationOrder: LiquidationOrderV1 = createLiquidationOrder({ + subaccountId: defaultSubaccountId2, + clobPairId: defaultClobPairId, + perpetualId: defaultPerpetualPosition.perpetualId, + quantums: takerQuantums, + isBuy: false, + subticks: takerSubticks, + }); - const fillAmount: number = 10; - const orderFillEvent: OrderFillEventV1 = createLiquidationOrderFillEvent( - makerOrderProto, - liquidationOrder, - fillAmount, - fillAmount, - ); - const kafkaMessage: KafkaMessage = createKafkaMessageFromOrderFillEvent({ - orderFillEvent, - transactionIndex, - eventIndex, - height: parseInt(defaultHeight, 10), - time: defaultTime, - txHash: defaultTxHash, - }); + const fillAmount: number = 10; + const orderFillEvent: OrderFillEventV1 = createLiquidationOrderFillEvent( + makerOrderProto, + liquidationOrder, + fillAmount, + fillAmount, + ); + const kafkaMessage: KafkaMessage = createKafkaMessageFromOrderFillEvent({ + orderFillEvent, + transactionIndex, + eventIndex, + height: parseInt(defaultHeight, 10), + time: defaultTime, + txHash: defaultTxHash, + }); - // create initial PerpetualPositions - await Promise.all([ - PerpetualPositionTable.create(defaultPerpetualPosition), - PerpetualPositionTable.create({ - ...defaultPerpetualPosition, - subaccountId: testConstants.defaultSubaccountId2, - }), - ]); + // create initial PerpetualPositions + await Promise.all([ + PerpetualPositionTable.create(defaultPerpetualPosition), + PerpetualPositionTable.create({ + ...defaultPerpetualPosition, + subaccountId: testConstants.defaultSubaccountId2, + }), + ]); - const producerSendMock: jest.SpyInstance = jest.spyOn(producer, 'send'); - await onMessage(kafkaMessage); - - // This size should be in fixed-point notation rather than exponential notation (1e-8) - const makerOrderSize: string = '0.00000001'; // quantums in human = 1e2 * 1e-10 = 1e-8 - const makerPrice: string = '100'; // quote currency / base currency = 1e6 * 1e-8 * 1e-6 / 1e-10 = 1e2 - const totalFilled: string = '0.000000001'; // fillAmount in human = 1e1 * 1e-10 = 1e-9 - await expectOrderInDatabase({ - subaccountId: testConstants.defaultSubaccountId, - clientId: '0', - size: makerOrderSize, - totalFilled, - price: makerPrice, - status: OrderStatus.OPEN, // orderSize > totalFilled so status is open - clobPairId: defaultClobPairId, - side: makerOrderProto.side === IndexerOrder_Side.SIDE_BUY ? OrderSide.BUY : OrderSide.SELL, - orderFlags: makerOrderProto.orderId!.orderFlags.toString(), - timeInForce: TimeInForce.GTT, - reduceOnly: false, - goodTilBlock: protocolTranslations.getGoodTilBlock(makerOrderProto)?.toString(), - goodTilBlockTime: protocolTranslations.getGoodTilBlockTime(makerOrderProto), - clientMetadata: makerOrderProto.clientMetadata.toString(), - updatedAt: defaultDateTime.toISO(), - updatedAtHeight: defaultHeight.toString(), - }); + const producerSendMock: jest.SpyInstance = jest.spyOn(producer, 'send'); + await onMessage(kafkaMessage); - const eventId: Buffer = TendermintEventTable.createEventId( - defaultHeight, - transactionIndex, - eventIndex, - ); + // This size should be in fixed-point notation rather than exponential notation (1e-8) + const makerOrderSize: string = '0.00000001'; // quantums in human = 1e2 * 1e-10 = 1e-8 + const makerPrice: string = '100'; // quote currency / base currency = 1e6 * 1e-8 * 1e-6 / 1e-10 = 1e2 + const totalFilled: string = '0.000000001'; // fillAmount in human = 1e1 * 1e-10 = 1e-9 + await expectOrderInDatabase({ + subaccountId: testConstants.defaultSubaccountId, + clientId: '0', + size: makerOrderSize, + totalFilled, + price: makerPrice, + status: OrderStatus.OPEN, // orderSize > totalFilled so status is open + clobPairId: defaultClobPairId, + side: makerOrderProto.side === IndexerOrder_Side.SIDE_BUY ? OrderSide.BUY : OrderSide.SELL, + orderFlags: makerOrderProto.orderId!.orderFlags.toString(), + timeInForce: TimeInForce.GTT, + reduceOnly: false, + goodTilBlock: protocolTranslations.getGoodTilBlock(makerOrderProto)?.toString(), + goodTilBlockTime: protocolTranslations.getGoodTilBlockTime(makerOrderProto), + clientMetadata: makerOrderProto.clientMetadata.toString(), + updatedAt: defaultDateTime.toISO(), + updatedAtHeight: defaultHeight.toString(), + }); - // This size should be in fixed-point notation rather than exponential notation (1e-5) - const quoteAmount: string = '0.0000001'; // quote amount is price * fillAmount = 1e2 * 1e-9 = 1e-7 - await expectFillInDatabase({ - subaccountId: testConstants.defaultSubaccountId, - clientId: '0', - liquidity: Liquidity.MAKER, - size: totalFilled, - price: makerPrice, - quoteAmount, - eventId, - transactionHash: defaultTxHash, - createdAt: defaultDateTime.toISO(), - createdAtHeight: defaultHeight, - type: FillType.LIQUIDATION, - clobPairId: defaultClobPairId, - side: protocolTranslations.protocolOrderSideToOrderSide(makerOrderProto.side), - orderFlags: ORDER_FLAG_SHORT_TERM.toString(), - clientMetadata: makerOrderProto.clientMetadata.toString(), - fee: defaultMakerFee, - }); - await expectFillInDatabase({ - subaccountId: testConstants.defaultSubaccountId2, - clientId: '0', - liquidity: Liquidity.TAKER, - size: totalFilled, - price: makerPrice, - quoteAmount, - eventId, - transactionHash: defaultTxHash, - createdAt: defaultDateTime.toISO(), - createdAtHeight: defaultHeight, - type: FillType.LIQUIDATED, - clobPairId: defaultClobPairId, - side: liquidationOrderToOrderSide(liquidationOrder), - orderFlags: ORDER_FLAG_SHORT_TERM.toString(), - clientMetadata: null, - fee: defaultTakerFee, - hasOrderId: false, - }); + const eventId: Buffer = TendermintEventTable.createEventId( + defaultHeight, + transactionIndex, + eventIndex, + ); - await Promise.all([ - expectDefaultOrderFillAndPositionSubaccountKafkaMessages( - producerSendMock, + // This size should be in fixed-point notation rather than exponential notation (1e-5) + const quoteAmount: string = '0.0000001'; // quote amount is price * fillAmount = 1e2 * 1e-9 = 1e-7 + await expectFillInDatabase({ + subaccountId: testConstants.defaultSubaccountId, + clientId: '0', + liquidity: Liquidity.MAKER, + size: totalFilled, + price: makerPrice, + quoteAmount, eventId, - ORDER_FLAG_SHORT_TERM, - ), - expectDefaultTradeKafkaMessageFromTakerFillId( - producerSendMock, + transactionHash: defaultTxHash, + createdAt: defaultDateTime.toISO(), + createdAtHeight: defaultHeight, + type: FillType.LIQUIDATION, + clobPairId: defaultClobPairId, + side: protocolTranslations.protocolOrderSideToOrderSide(makerOrderProto.side), + orderFlags: ORDER_FLAG_SHORT_TERM.toString(), + clientMetadata: makerOrderProto.clientMetadata.toString(), + fee: defaultMakerFee, + }); + await expectFillInDatabase({ + subaccountId: testConstants.defaultSubaccountId2, + clientId: '0', + liquidity: Liquidity.TAKER, + size: totalFilled, + price: makerPrice, + quoteAmount, eventId, - ), - expectCandlesUpdated(), - ]); - }); + transactionHash: defaultTxHash, + createdAt: defaultDateTime.toISO(), + createdAtHeight: defaultHeight, + type: FillType.LIQUIDATED, + clobPairId: defaultClobPairId, + side: liquidationOrderToOrderSide(liquidationOrder), + orderFlags: ORDER_FLAG_SHORT_TERM.toString(), + clientMetadata: null, + fee: defaultTakerFee, + hasOrderId: false, + }); + + await Promise.all([ + expectDefaultOrderFillAndPositionSubaccountKafkaMessages( + producerSendMock, + eventId, + ORDER_FLAG_SHORT_TERM, + ), + expectDefaultTradeKafkaMessageFromTakerFillId( + producerSendMock, + eventId, + ), + expectCandlesUpdated(), + ]); + }); it('LiquidationOrderFillEvent fails liquidationOrder validation', async () => { const makerQuantums: number = 10_000_000; diff --git a/indexer/services/ender/src/config.ts b/indexer/services/ender/src/config.ts index 3f6909c192..850515af37 100644 --- a/indexer/services/ender/src/config.ts +++ b/indexer/services/ender/src/config.ts @@ -26,6 +26,9 @@ export const configSchema = { USE_ORDER_HANDLER_SQL_FUNCTION: parseBoolean({ default: true, }), + USE_LIQUIDATION_HANDLER_SQL_FUNCTION: parseBoolean({ + default: true, + }), USE_SUBACCOUNT_UPDATE_SQL_FUNCTION: parseBoolean({ default: true, }), diff --git a/indexer/services/ender/src/handlers/order-fills/liquidation-handler.ts b/indexer/services/ender/src/handlers/order-fills/liquidation-handler.ts index 8ee0073793..5c6c4a2354 100644 --- a/indexer/services/ender/src/handlers/order-fills/liquidation-handler.ts +++ b/indexer/services/ender/src/handlers/order-fills/liquidation-handler.ts @@ -1,24 +1,35 @@ import { logger } from '@dydxprotocol-indexer/base'; import { FillFromDatabase, + FillModel, Liquidity, OrderFromDatabase, + OrderModel, OrderTable, PerpetualMarketFromDatabase, + PerpetualMarketModel, perpetualMarketRefresher, PerpetualPositionFromDatabase, + PerpetualPositionModel, + storeHelpers, SubaccountTable, - OrderStatus, + USDC_ASSET_ID, + OrderStatus, FillType, } from '@dydxprotocol-indexer/postgres'; import { isStatefulOrder } from '@dydxprotocol-indexer/v4-proto-parser'; import { - LiquidationOrderV1, IndexerOrderId, + LiquidationOrderV1, IndexerOrderId, OrderFillEventV1, } from '@dydxprotocol-indexer/v4-protos'; import Long from 'long'; +import * as pg from 'pg'; +import config from '../../config'; import { STATEFUL_ORDER_ORDER_FILL_EVENT_TYPE, SUBACCOUNT_ORDER_FILL_EVENT_TYPE } from '../../constants'; import { convertPerpetualPosition } from '../../helpers/kafka-helper'; -import { orderFillWithLiquidityToOrderFillEventWithLiquidation } from '../../helpers/translation-helper'; +import { + orderFillWithLiquidityToOrderFillEventWithLiquidation, +} from '../../helpers/translation-helper'; +import { indexerTendermintEventToTransactionIndex } from '../../lib/helper'; import { OrderFillWithLiquidity } from '../../lib/translated-types'; import { ConsolidatedKafkaEvent, @@ -73,8 +84,99 @@ export class LiquidationHandler extends AbstractOrderFillHandler { + const eventDataBinary: Uint8Array = this.indexerTendermintEvent.dataBytes; + const transactionIndex: number = indexerTendermintEventToTransactionIndex( + this.indexerTendermintEvent, + ); + + const castedLiquidationFillEventMessage: + OrderFillEventWithLiquidation = orderFillWithLiquidityToOrderFillEventWithLiquidation( + this.event, + ); + const field: string = this.event.liquidity === Liquidity.MAKER + ? 'makerOrder' : 'liquidationOrder'; + const fillType: string = this.event.liquidity === Liquidity.MAKER + ? FillType.LIQUIDATION : FillType.LIQUIDATED; + + const result: pg.QueryResult = await storeHelpers.rawQuery( + `SELECT dydx_liquidation_fill_handler_per_order( + '${field}', + ${this.block.height}, + '${this.block.time?.toISOString()}', + '${JSON.stringify(OrderFillEventV1.decode(eventDataBinary))}', + ${this.indexerTendermintEvent.eventIndex}, + ${transactionIndex}, + '${this.block.txHashes[transactionIndex]}', + '${this.event.liquidity}', + '${fillType}', + '${USDC_ASSET_ID}' + ) AS result;`, + { txId: this.txId }, + ).catch((error) => { + logger.error({ + at: 'orderHandler#handleViaSqlFunction', + message: 'Failed to handle OrderFillEventV1', + error, + }); + throw error; + }); + + const fill: FillFromDatabase = FillModel.fromJson( + result.rows[0].result.fill) as FillFromDatabase; + const perpetualMarket: PerpetualMarketFromDatabase = PerpetualMarketModel.fromJson( + result.rows[0].result.perpetual_market) as PerpetualMarketFromDatabase; + const position: PerpetualPositionFromDatabase = PerpetualPositionModel.fromJson( + result.rows[0].result.perpetual_position) as PerpetualPositionFromDatabase; + + if (this.event.liquidity === Liquidity.MAKER) { + // Must be done in this order, because fills refer to an order + // We do not create a taker order for liquidations. + const makerOrder: OrderFromDatabase = OrderModel.fromJson( + result.rows[0].result.order) as OrderFromDatabase; + + const kafkaEvents: ConsolidatedKafkaEvent[] = [ + this.generateConsolidatedKafkaEvent( + castedLiquidationFillEventMessage.makerOrder.orderId!.subaccountId!, + makerOrder, + convertPerpetualPosition(position), + fill, + perpetualMarket, + ), + // Update vulcan with the total filled amount of the maker order. + this.getOrderUpdateKafkaEvent( + castedLiquidationFillEventMessage.makerOrder!.orderId!, + castedLiquidationFillEventMessage.totalFilledMaker, + ), + ]; + + // If the order is stateful and fully-filled, send an order removal to vulcan. We only do this + // for stateful orders as we are guaranteed a stateful order cannot be replaced until the next + // block. + if (makerOrder?.status === OrderStatus.FILLED && isStatefulOrder(makerOrder?.orderFlags)) { + kafkaEvents.push( + this.getOrderRemoveKafkaEvent(castedLiquidationFillEventMessage.makerOrder!.orderId!), + ); + } + return kafkaEvents; + } else { + return [ + this.generateConsolidatedKafkaEvent( + castedLiquidationFillEventMessage.liquidationOrder.liquidated!, + undefined, + convertPerpetualPosition(position), + fill, + perpetualMarket, + ), + this.generateTradeKafkaEventFromTakerOrderFill( + fill, + ), + ]; + } + } + // eslint-disable-next-line @typescript-eslint/require-await - public async internalHandle(): Promise { + public async handleViaKnexQueries(): Promise { const castedLiquidationFillEventMessage: OrderFillEventWithLiquidation = orderFillWithLiquidityToOrderFillEventWithLiquidation( this.event, @@ -161,4 +263,11 @@ export class LiquidationHandler extends AbstractOrderFillHandler { + if (config.USE_LIQUIDATION_HANDLER_SQL_FUNCTION) { + return this.handleViaSqlFunction(); + } + return this.handleViaKnexQueries(); + } } diff --git a/indexer/services/ender/src/helpers/postgres/postgres-functions.ts b/indexer/services/ender/src/helpers/postgres/postgres-functions.ts index 913af3a438..b8e4b43b54 100644 --- a/indexer/services/ender/src/helpers/postgres/postgres-functions.ts +++ b/indexer/services/ender/src/helpers/postgres/postgres-functions.ts @@ -39,6 +39,7 @@ const scripts: string[] = [ 'dydx_get_order_status.sql', 'dydx_get_total_filled_from_liquidity.sql', 'dydx_get_weighted_average.sql', + 'dydx_liquidation_fill_handler_per_order.sql', 'dydx_order_fill_handler_per_order.sql', 'dydx_perpetual_position_and_order_side_matching.sql', 'dydx_subaccount_update_handler.sql', diff --git a/indexer/services/ender/src/scripts/dydx_liquidation_fill_handler_per_order.sql b/indexer/services/ender/src/scripts/dydx_liquidation_fill_handler_per_order.sql new file mode 100644 index 0000000000..2eb5e52957 --- /dev/null +++ b/indexer/services/ender/src/scripts/dydx_liquidation_fill_handler_per_order.sql @@ -0,0 +1,242 @@ +/** + Parameters: + - field: the field storing the order to process. + - block_height: the height of the block being processing. + - block_time: the time of the block being processed. + - event_data: The 'data' field of the IndexerTendermintEvent (https://github.com/dydxprotocol/v4-proto/blob/8d35c86/dydxprotocol/indexer/indexer_manager/event.proto#L25) + converted to JSON format. Conversion to JSON is expected to be done by JSON.stringify. + - event_index: The 'event_index' of the IndexerTendermintEvent. + - transaction_index: The transaction_index of the IndexerTendermintEvent after the conversion that takes into + account the block_event (https://github.com/dydxprotocol/indexer/blob/cc70982/services/ender/src/lib/helper.ts#L33) + - transaction_hash: The transaction hash corresponding to this event from the IndexerTendermintBlock 'tx_hashes'. + - fill_liquidity: The liquidity for the fill record. + - fill_type: The type for the fill record. + - usdc_asset_id: The USDC asset id. + Returns: JSON object containing fields: + - order: The updated order in order-model format (https://github.com/dydxprotocol/indexer/blob/cc70982/packages/postgres/src/models/order-model.ts). + Only returned if field == 'makerOrder'. + - fill: The updated fill in fill-model format (https://github.com/dydxprotocol/indexer/blob/cc70982/packages/postgres/src/models/fill-model.ts). + - perpetual_market: The perpetual market for the order in perpetual-market-model format (https://github.com/dydxprotocol/indexer/blob/cc70982/packages/postgres/src/models/perpetual-market-model.ts). + - perpetual_position: The updated perpetual position in perpetual-position-model format (https://github.com/dydxprotocol/indexer/blob/cc70982/packages/postgres/src/models/perpetual-position-model.ts). +*/ +CREATE OR REPLACE FUNCTION dydx_liquidation_fill_handler_per_order( + field text, block_height int, block_time timestamp, event_data jsonb, event_index int, transaction_index int, + transaction_hash text, fill_liquidity text, fill_type text, usdc_asset_id text) RETURNS jsonb AS $$ +DECLARE + order_ jsonb; + maker_order jsonb; + clob_pair_id bigint; + subaccount_uuid uuid; + perpetual_market_record perpetual_markets%ROWTYPE; + order_record orders%ROWTYPE; + fill_record fills%ROWTYPE; + perpetual_position_record perpetual_positions%ROWTYPE; + asset_record assets%ROWTYPE; + order_uuid uuid; + order_side text; + order_size numeric; + order_price numeric; + order_client_metadata bigint; + fee numeric; + fill_amount numeric; + total_filled numeric; + maker_price numeric; + event_id bytea; +BEGIN + order_ = event_data->field; + maker_order = event_data->'makerOrder'; + + IF field = 'makerOrder' THEN + clob_pair_id = jsonb_extract_path(order_, 'orderId', 'clobPairId')::bigint; + ELSE + clob_pair_id = jsonb_extract_path(order_, 'clobPairId')::bigint; + END IF; + + BEGIN + SELECT * INTO STRICT perpetual_market_record FROM perpetual_markets WHERE "clobPairId" = clob_pair_id; + EXCEPTION + WHEN NO_DATA_FOUND THEN + RAISE EXCEPTION 'Unable to find perpetual market with clobPairId %', clob_pair_id; + WHEN TOO_MANY_ROWS THEN + /** This should never happen and if it ever were to would indicate that the table has malformed data. */ + RAISE EXCEPTION 'Found multiple perpetual markets with clobPairId %', clob_pair_id; + END; + + BEGIN + SELECT * INTO STRICT asset_record FROM assets WHERE "id" = usdc_asset_id; + EXCEPTION + WHEN NO_DATA_FOUND THEN + RAISE EXCEPTION 'Unable to find asset with id %', usdc_asset_id; + END; + + /** + Calculate sizes, prices, and fill amounts. + + TODO(IND-238): Extract out calculation of quantums and subticks to their own SQL functions. + */ + fill_amount = dydx_trim_scale(dydx_from_jsonlib_long(event_data->'fillAmount') * + power(10, perpetual_market_record."atomicResolution")::numeric); + maker_price = dydx_trim_scale(dydx_from_jsonlib_long(maker_order->'subticks') * + power(10, perpetual_market_record."quantumConversionExponent" + + asset_record."atomicResolution" - + perpetual_market_record."atomicResolution")::numeric); + total_filled = dydx_trim_scale(dydx_get_total_filled(fill_liquidity, event_data) * + power(10, perpetual_market_record."atomicResolution")::numeric); + fee = dydx_trim_scale(dydx_get_fee(fill_liquidity, event_data) * + power(10, asset_record."atomicResolution")::numeric); + order_price = dydx_trim_scale(dydx_from_jsonlib_long(order_->'subticks') * + power(10, perpetual_market_record."quantumConversionExponent" + + asset_record."atomicResolution" - + perpetual_market_record."atomicResolution")::numeric); + order_side = dydx_from_protocol_order_side(order_->'side'); + + IF field = 'makerOrder' THEN + order_uuid = dydx_uuid_from_order_id(order_->'orderId'); + subaccount_uuid = dydx_uuid_from_subaccount_id(jsonb_extract_path(order_, 'orderId', 'subaccountId')); + order_client_metadata = (order_->'clientMetadata')::bigint; + ELSE + order_uuid = NULL; + subaccount_uuid = dydx_uuid_from_subaccount_id(jsonb_extract_path(order_, 'liquidated')); + order_client_metadata = NULL; + END IF; + + IF field = 'makerOrder' THEN + order_size = dydx_trim_scale(dydx_from_jsonlib_long(order_->'quantums') * + power(10, perpetual_market_record."atomicResolution")::numeric); + + /** Upsert the order, populating the order_record fields with what will be in the database. */ + SELECT * INTO order_record FROM orders WHERE "id" = order_uuid; + order_record."size" = order_size; + order_record."price" = order_price; + order_record."timeInForce" = dydx_from_protocol_time_in_force(order_->'timeInForce'); + order_record."reduceOnly" = (order_->>'reduceOnly')::boolean; + order_record."orderFlags" = jsonb_extract_path(order_, 'orderId', 'orderFlags')::bigint; + order_record."goodTilBlock" = (order_->'goodTilBlock')::bigint; + order_record."goodTilBlockTime" = to_timestamp((order_->'goodTilBlockTime')::double precision); + order_record."clientMetadata" = order_client_metadata; + order_record."updatedAt" = block_time; + order_record."updatedAtHeight" = block_height; + + IF FOUND THEN + order_record."totalFilled" = total_filled; + order_record."status" = dydx_get_order_status(total_filled, order_record.size, false, order_record."orderFlags", order_record."timeInForce"); + + UPDATE orders + SET + "size" = order_record."size", + "totalFilled" = order_record."totalFilled", + "price" = order_record."price", + "status" = order_record."status", + "orderFlags" = order_record."orderFlags", + "goodTilBlock" = order_record."goodTilBlock", + "goodTilBlockTime" = order_record."goodTilBlockTime", + "timeInForce" = order_record."timeInForce", + "reduceOnly" = order_record."reduceOnly", + "clientMetadata" = order_record."clientMetadata", + "updatedAt" = order_record."updatedAt", + "updatedAtHeight" = order_record."updatedAtHeight" + WHERE id = order_uuid; + ELSE + order_record."id" = order_uuid; + order_record."subaccountId" = subaccount_uuid; + order_record."clientId" = jsonb_extract_path_text(order_, 'orderId', 'clientId')::bigint; + order_record."clobPairId" = clob_pair_id; + order_record."side" = order_side; + order_record."type" = 'LIMIT'; + + order_record."totalFilled" = fill_amount; + order_record."status" = dydx_get_order_status(fill_amount, order_size, false, order_record."orderFlags", order_record."timeInForce"); + order_record."createdAtHeight" = block_height; + INSERT INTO orders + ("id", "subaccountId", "clientId", "clobPairId", "side", "size", "totalFilled", "price", "type", + "status", "timeInForce", "reduceOnly", "orderFlags", "goodTilBlock", "goodTilBlockTime", "createdAtHeight", + "clientMetadata", "triggerPrice", "updatedAt", "updatedAtHeight") + VALUES (order_record.*); + END IF; + END IF; + + /* Insert the associated fill record for this order_fill event. */ + event_id = dydx_event_id_from_parts( + block_height, transaction_index, event_index); + INSERT INTO fills + ("id", "subaccountId", "side", "liquidity", "type", "clobPairId", "orderId", "size", "price", "quoteAmount", + "eventId", "transactionHash", "createdAt", "createdAtHeight", "clientMetadata", "fee") + VALUES (dydx_uuid_from_fill_event_parts(event_id, fill_liquidity), + subaccount_uuid, + order_side, + fill_liquidity, + fill_type, + clob_pair_id, + order_uuid, + fill_amount, + maker_price, + dydx_trim_scale(fill_amount * maker_price), + event_id, + transaction_hash, + block_time, + block_height, + order_client_metadata, + fee) + RETURNING * INTO fill_record; + + /* Upsert the perpetual_position record for this order_fill event. */ + SELECT * INTO perpetual_position_record FROM perpetual_positions WHERE "subaccountId" = subaccount_uuid + AND "perpetualId" = perpetual_market_record."id" + ORDER BY "createdAtHeight" DESC; + IF NOT FOUND THEN + RAISE EXCEPTION 'Unable to find existing perpetual position, subaccountId: %, perpetualId: %', subaccount_uuid, perpetual_market_record."id"; + END IF; + DECLARE + sum_open numeric = perpetual_position_record."sumOpen"; + entry_price numeric = perpetual_position_record."entryPrice"; + sum_close numeric = perpetual_position_record."sumClose"; + exit_price numeric = perpetual_position_record."exitPrice"; + BEGIN + IF dydx_perpetual_position_and_order_side_matching( + perpetual_position_record."side", order_side) THEN + sum_open = dydx_trim_scale(perpetual_position_record."sumOpen" + fill_amount); + entry_price = dydx_get_weighted_average( + perpetual_position_record."entryPrice", perpetual_position_record."sumOpen", + maker_price, fill_amount); + perpetual_position_record."sumOpen" = sum_open; + perpetual_position_record."entryPrice" = entry_price; + ELSE + sum_close = dydx_trim_scale(perpetual_position_record."sumClose" + fill_amount); + exit_price = dydx_get_weighted_average( + perpetual_position_record."exitPrice", perpetual_position_record."sumClose", + maker_price, fill_amount); + perpetual_position_record."sumClose" = sum_close; + perpetual_position_record."exitPrice" = exit_price; + END IF; + UPDATE perpetual_positions + SET + "sumOpen" = sum_open, + "entryPrice" = entry_price, + "sumClose" = sum_close, + "exitPrice" = exit_price + WHERE "id" = perpetual_position_record.id; + END; + + IF field = 'makerOrder' THEN + RETURN jsonb_build_object( + 'order', + dydx_to_jsonb(order_record), + 'fill', + dydx_to_jsonb(fill_record), + 'perpetual_market', + dydx_to_jsonb(perpetual_market_record), + 'perpetual_position', + dydx_to_jsonb(perpetual_position_record) + ); + ELSE + RETURN jsonb_build_object( + 'fill', + dydx_to_jsonb(fill_record), + 'perpetual_market', + dydx_to_jsonb(perpetual_market_record), + 'perpetual_position', + dydx_to_jsonb(perpetual_position_record) + ); + END IF; +END; +$$ LANGUAGE plpgsql;