Skip to content

Commit

Permalink
[ECO-2728] Fix parsing issues with new column names and types, add a …
Browse files Browse the repository at this point in the history
…fallback parsing function; bump processor submodule to `4.0.0` (#536)
  • Loading branch information
xbtmatt authored Jan 31, 2025
1 parent 47e4e36 commit ad47a88
Show file tree
Hide file tree
Showing 7 changed files with 141 additions and 11 deletions.
32 changes: 29 additions & 3 deletions src/typescript/sdk/src/indexer-v2/json-bigint.ts
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,19 @@ const JSON_BIGINT = parse({
constructorAction: "ignore",
});

/**
* In case a field doesn't match up with its proper parsing function, fall back to not using a
* reviver parse function at all, and simply return the data as is.
*/
const tryWithFallbackParse = (parser: (v: any) => any) => (v: any) => {
try {
return parser(v);
} catch {
// Log an error on the server.
console.error(`Failed to parse value: ${v}`);
return v;
}
};
const parseFloat = (v: any) => Big(v).toString();
const parseBigInt = (v: any) => BigInt(v);
const parseInteger = (v: any) => Number(v);
Expand All @@ -30,12 +43,25 @@ const converter = new Map<AnyColumnName, (value: any) => any>([
* Parses a JSON string that uses bigints- i.e., numbers too large for a normal number, but not used
* as strings. Without this parsing method, the parsed value loses precision or results in an error.
*
* Eventually, this could be more fully fleshed out to utilize more precise deserialization.
* THe parsing functions are designated by the column field name, which means there shouldn't ever
* be overlapping column names with different types in the database, otherwise the parsing function
* may fail.
*
* In case this does happen though, there is a fallback function to try to parse the value without
* any assumptions about how to parse it.
*
* @see {@link tryWithFallbackParse}
*/
export const parseJSONWithBigInts = <T>(msg: string): T => {
return JSON_BIGINT.parse(msg, (key, value) => {
const fn = converter.get(key as AnyColumnName) ?? parseDefault;
return fn(value);
try {
const fn = converter.get(key as AnyColumnName) ?? parseDefault;
// Curry the retrieved parsing function to add a fallback parsing function.
const fnWithFallback = tryWithFallbackParse(fn);
return fnWithFallback(value);
} catch {
console.error(`Failed to parse ${key}: ${value} as a ${converter.get(key as AnyColumnName)}`);
}
});
};

Expand Down
4 changes: 2 additions & 2 deletions src/typescript/sdk/src/indexer-v2/types/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,7 @@ const toArenaMeleeFromDatabase = (data: DatabaseStructType["ArenaMelee"]): Types
meleeId: BigInt(data.melee_id),
emojicoin0MarketAddress: data.emojicoin_0_market_address,
emojicoin1MarketAddress: data.emojicoin_1_market_address,
startTime: BigInt(data.start_time),
startTime: postgresTimestampToMicroseconds(data.start_time),
duration: BigInt(data.duration),
maxMatchPercentage: BigInt(data.max_match_percentage),
maxMatchAmount: BigInt(data.max_match_amount),
Expand Down Expand Up @@ -196,7 +196,7 @@ const toArenaInfoFromDatabase = (data: DatabaseStructType["ArenaInfo"]): Types["
aptLocked: BigInt(data.apt_locked),
emojicoin0MarketAddress: data.emojicoin_0_market_address,
emojicoin1MarketAddress: data.emojicoin_1_market_address,
startTime: BigInt(data.start_time),
startTime: postgresTimestampToMicroseconds(data.start_time),
duration: BigInt(data.duration),
maxMatchPercentage: BigInt(data.max_match_percentage),
maxMatchAmount: BigInt(data.max_match_amount),
Expand Down
4 changes: 2 additions & 2 deletions src/typescript/sdk/src/indexer-v2/types/json-types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -255,7 +255,7 @@ type ArenaMeleeEventData = {
melee_id: Uint64String;
emojicoin_0_market_address: string;
emojicoin_1_market_address: string;
start_time: Uint64String;
start_time: PostgresTimestamp;
duration: Uint64String;
max_match_percentage: Uint64String;
max_match_amount: Uint64String;
Expand Down Expand Up @@ -327,7 +327,7 @@ type ArenaInfoData = {
apt_locked: Uint64String;
emojicoin_0_market_address: string;
emojicoin_1_market_address: string;
start_time: Uint64String;
start_time: PostgresTimestamp;
duration: Uint64String;
max_match_percentage: Uint64String;
max_match_amount: Uint64String;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,6 @@ export const floatColumns: Set<AnyColumnName> = new Set([

// Arena
"melee_id",
"start_time",
"duration",
"max_match_percentage",
"max_match_amount",
Expand Down
84 changes: 84 additions & 0 deletions src/typescript/sdk/tests/e2e/queries/candlesticks.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,84 @@
// cspell:word timespan

import { AccountAddress } from "@aptos-labs/ts-sdk";
import {
getMarketResourceFromWriteSet,
getPeriodBoundaryAsDate,
Period,
periodEnumToRawDuration,
sleep,
type SymbolEmoji,
} from "../../../src";
import { EmojicoinClient } from "../../../src/client/emojicoin-client";
import { fetchPeriodicEventsSince, waitForEmojicoinIndexer } from "../../../src/indexer-v2";
import { getFundedAccount } from "../../utils/test-accounts";

// This test must have a really long timeout because it essentially sleeps for 60 seconds after
// registering a market to ensure that at least one periodic state event is emitted.
jest.setTimeout(90000);

const ONE_MINUTE = 60 * 1000;
const TWO_SECONDS = 2 * 1000;

describe("verifies parsing of periodic state event data", () => {
const registrant = getFundedAccount("084");
const emojicoin = new EmojicoinClient();

it("properly parses periodic state event data", async () => {
const emojis: SymbolEmoji[] = ["🫐"];

// Register the market, get the market ID and address.
const { response, marketID, marketAddress } = await emojicoin
.register(registrant, emojis)
.then(({ response, registration }) => {
expect(response.success).toBe(true);
return {
marketID: BigInt(registration.event.marketID),
marketAddress: AccountAddress.from(registration.event.marketMetadata.marketAddress),
response,
};
});

// Get the period start time to know exactly how long to wait to ensure a periodic event emits.
// This could just wait 60+ seconds, but to shorten the test time on average, just wait the
// exact amount of time.
const marketResource = getMarketResourceFromWriteSet(response, marketAddress)!;
expect(marketResource).toBeDefined();
const oneMinutePeriodicStateTracker = marketResource.periodicStateTrackers.find(
(p) => Number(p.period) === periodEnumToRawDuration(Period.Period1M)
)!;
expect(oneMinutePeriodicStateTracker).toBeDefined();
// Find the period boundary (where the period starts).
const periodBoundaryStart = getPeriodBoundaryAsDate(
oneMinutePeriodicStateTracker.startTime,
Period.Period1M
);

// Add a minute to it to get the end of the period.
const periodBoundaryEnd = periodBoundaryStart.getTime() + ONE_MINUTE;

// Wait until the end of the period.
const waitTime = periodBoundaryEnd - Date.now();
await sleep(waitTime);

// Wait another 2 seconds to account for any skew between Date() and on-chain time.
await sleep(TWO_SECONDS);

// Chat, to trigger a periodic state event.
const { events, response: chatResponse } = await emojicoin.chat(registrant, emojis, ["⏱️"]);
expect(events.periodicStateEvents.length).toBeGreaterThanOrEqual(1);

await waitForEmojicoinIndexer(chatResponse.version);
const resPeriodicEvents = await fetchPeriodicEventsSince({
marketID,
start: new Date(Date.now() - 10 * ONE_MINUTE), // Ensure the event shows up by making the
end: new Date(Date.now() + 10 * ONE_MINUTE), // timespan range much larger than necessary.
period: Period.Period1M,
});

const pEvent = resPeriodicEvents.at(0)!;
expect(pEvent).toBeDefined();
expect(pEvent.periodicMetadata.period).toEqual(Period.Period1M);
expect(pEvent.periodicMetadata.startTime).toBe(oneMinutePeriodicStateTracker.startTime);
});
});
25 changes: 23 additions & 2 deletions src/typescript/sdk/tests/e2e/schema.test.ts
Original file line number Diff line number Diff line change
@@ -1,3 +1,7 @@
// cspell:word nspname
// cspell:word proname
// cspell:word pronamespace

import { EMOJICOIN_INDEXER_URL } from "../../src/server/env";
import { type AnyColumnName, TableName } from "../../src/indexer-v2/types/json-types";
import {
Expand All @@ -8,7 +12,6 @@ import {
} from "../../src/indexer-v2/types/postgres-numeric-types";

// This is not the full response type; it's just what we use in this test.
// NOTE: This does *not* cover the RPC function calls/schemas. Only tables and views.
interface DatabaseSchema {
definitions: {
[Table in TableName]: {
Expand All @@ -29,6 +32,24 @@ interface DatabaseSchema {
};
}

// Note that these tests are *not* exhaustive. There is a more robust and comprehensive way to test
// that the types in the schema match our conversion types; however, it's not worth the effort since
// it's unlikely any significant more amount of types will be added.
// The most ideal way to do this would be to reflect on the schema with something like:
//
// SELECT
// n.nspname AS schema_name,
// p.proname AS function_name,
// pg_get_function_result(p.oid) AS result_type,
// pg_get_function_arguments(p.oid) AS arguments
// FROM pg_proc p
// JOIN pg_namespace n ON p.pronamespace = n.oid
// WHERE p.proname = 'aggregate_market_state';
//
// and then using the values/types returned there to parse (and validate in schema e2e tests).
//
// For now, what's here is fine.

describe("verifies the schema is what's expected", () => {
it("pulls the schema from `postgrest` and compares it against the types in the SDK", async () => {
const response = await fetch(EMOJICOIN_INDEXER_URL);
Expand Down Expand Up @@ -76,7 +97,7 @@ describe("verifies the schema is what's expected", () => {
expect(marketStateColumnNames.has("daily_tvl_per_lp_coin_growth_q64")).toBe(false);
});

it("ensures that there are no duplicate column names with different types", () => {
it("ensures that there are no duplicate column names with different types in the SDK", () => {
const mergedSetSize = new Set([...floatColumns, ...bigintColumns, ...integerColumns]).size;
const sumOfIndividualSetSize =
new Set(floatColumns).size + new Set(bigintColumns).size + new Set(integerColumns).size;
Expand Down

0 comments on commit ad47a88

Please sign in to comment.