diff --git a/src/clickhouse/makeQuery.ts b/src/clickhouse/makeQuery.ts index fbfe929..af35945 100644 --- a/src/clickhouse/makeQuery.ts +++ b/src/clickhouse/makeQuery.ts @@ -10,6 +10,7 @@ export interface Query { meta: Meta[], data: T[], rows: number, + rows_before_limit_at_least: number, statistics: { elapsed: number, rows_read: number, @@ -21,7 +22,7 @@ export async function makeQuery(query: string) { try { const response = await client.query({ query }) const data: Query = await response.json(); - + prometheus.query.inc(); prometheus.bytes_read.inc(data.statistics.bytes_read); prometheus.rows_read.inc(data.statistics.rows_read); @@ -32,7 +33,16 @@ export async function makeQuery(query: string) { } catch (e: any) { logger.error(e.message) - return { data: [] } + return { + meta: [], + data: [], + rows: 0, + rows_before_limit_at_least: 0, + statistics: { + elapsed: 0, + rows_read: 0, + bytes_read: 0, + } + }; } - } \ No newline at end of file diff --git a/src/fetch/balance.ts b/src/fetch/balance.ts index 6f2f1d9..d470c97 100644 --- a/src/fetch/balance.ts +++ b/src/fetch/balance.ts @@ -2,7 +2,8 @@ import { makeQuery } from "../clickhouse/makeQuery.js"; import { logger } from "../logger.js"; import { getBalanceChanges } from "../queries.js"; import * as prometheus from "../prometheus.js"; -import { toJSON } from "./utils.js"; +import { addMetadata, toJSON } from "./utils.js"; +import { parseLimit, parsePage } from "../utils.js"; function verifyParams(searchParams: URLSearchParams) { const account = searchParams.get("account"); @@ -20,7 +21,14 @@ export default async function (req: Request) { const query = getBalanceChanges(searchParams); const response = await makeQuery(query) - return toJSON(response.data); + return toJSON( + addMetadata( + response.data, + response.rows_before_limit_at_least, + parseLimit(searchParams.get("limit")), + parsePage(searchParams.get("page")) + ) + ); } catch (e: any) { logger.error(e); prometheus.request_error.inc({ pathname: "/balance", status: 400 }); diff --git a/src/fetch/openapi.ts b/src/fetch/openapi.ts index d46d6bf..abf9521 100644 --- a/src/fetch/openapi.ts +++ b/src/fetch/openapi.ts @@ -49,11 +49,10 @@ const parameterLimit: ParameterObject = { schema: { type: "number", maximum: config.maxLimit, minimum: 1 }, } -// TODO: Determine offset from `limit` and replace this with page numbers const parameterOffset: ParameterObject = { - name: "offset", + name: "page", in: "query", - description: "Index offset for results pagination.", + description: "Page index for results pagination.", required: false, schema: { type: "number", minimum: 1 }, } diff --git a/src/fetch/supply.ts b/src/fetch/supply.ts index 082ea4f..280ac50 100644 --- a/src/fetch/supply.ts +++ b/src/fetch/supply.ts @@ -2,7 +2,8 @@ import { makeQuery } from "../clickhouse/makeQuery.js"; import { logger } from "../logger.js"; import { getTotalSupply } from "../queries.js"; import * as prometheus from "../prometheus.js"; -import { toJSON } from "./utils.js"; +import { addMetadata, toJSON } from "./utils.js"; +import { parseLimit, parsePage } from "../utils.js"; function verifyParams(searchParams: URLSearchParams) { const contract = searchParams.get("contract"); @@ -20,7 +21,14 @@ export default async function (req: Request) { const query = getTotalSupply(searchParams); const response = await makeQuery(query) - return toJSON(response.data); + return toJSON( + addMetadata( + response.data, + response.rows_before_limit_at_least, + parseLimit(searchParams.get("limit")), + parsePage(searchParams.get("page")) + ) + ); } catch (e: any) { logger.error(e); prometheus.request_error.inc({ pathname: "/supply", status: 400 }); diff --git a/src/fetch/transfers.ts b/src/fetch/transfers.ts index 77fb92b..5ec0b8d 100644 --- a/src/fetch/transfers.ts +++ b/src/fetch/transfers.ts @@ -2,7 +2,8 @@ import { makeQuery } from "../clickhouse/makeQuery.js"; import { logger } from "../logger.js"; import { getTransfers } from "../queries.js"; import * as prometheus from "../prometheus.js"; -import { toJSON } from "./utils.js"; +import { addMetadata, toJSON } from "./utils.js"; +import { parseLimit, parsePage } from "../utils.js"; export default async function (req: Request) { try { @@ -12,7 +13,14 @@ export default async function (req: Request) { const query = getTransfers(searchParams); const response = await makeQuery(query) - return toJSON(response.data); + return toJSON( + addMetadata( + response.data, + response.rows_before_limit_at_least, + parseLimit(searchParams.get("limit")), + parsePage(searchParams.get("page")) + ) + ); } catch (e: any) { logger.error(e); prometheus.request_error.inc({ pathname: "/transfers", status: 400 }); diff --git a/src/fetch/utils.spec.ts b/src/fetch/utils.spec.ts new file mode 100644 index 0000000..06144e9 --- /dev/null +++ b/src/fetch/utils.spec.ts @@ -0,0 +1,38 @@ +import { expect, test } from "bun:test"; +import { addMetadata } from "./utils.js"; + +test("addMetadata pagination", () => { + const limit = 5; + const mock_query_reponse = { + data: Array(limit), + rows: limit, + rows_before_limit_at_least: 5*limit, // Simulate query with more total results than the query limit making pagination relevant + }; + + const first_page = addMetadata(mock_query_reponse.data, mock_query_reponse.rows_before_limit_at_least, limit, 1); + expect(first_page.meta.next_page).toBe(2); + expect(first_page.meta.previous_page).toBe(1); // Previous page should be set to 1 on first page + expect(first_page.meta.total_pages).toBe(5); + expect(first_page.meta.total_results).toBe(5*limit); + + const odd_page = addMetadata(mock_query_reponse.data, mock_query_reponse.rows_before_limit_at_least, limit, 3); + expect(odd_page.meta.next_page).toBe(4); + expect(odd_page.meta.previous_page).toBe(2); + expect(odd_page.meta.total_pages).toBe(5); + expect(odd_page.meta.total_results).toBe(5*limit); + + const even_page = addMetadata(mock_query_reponse.data, mock_query_reponse.rows_before_limit_at_least, limit, 4); + expect(even_page.meta.next_page).toBe(5); + expect(even_page.meta.previous_page).toBe(3); + expect(even_page.meta.total_pages).toBe(5); + expect(even_page.meta.total_results).toBe(5*limit); + + const last_page = addMetadata(mock_query_reponse.data, mock_query_reponse.rows_before_limit_at_least, limit, 5); + expect(last_page.meta.next_page).toBe(last_page.meta.total_pages); // Next page should be capped to total_pages on last page + expect(last_page.meta.previous_page).toBe(4); + expect(last_page.meta.total_pages).toBe(5); + expect(last_page.meta.total_results).toBe(5*limit); + + // TODO: Expect error message on beyond last page + // const beyond_last_page = addMetadata(mock_query_reponse.data, mock_query_reponse.rows_before_limit_at_least, limit, 6); +}); \ No newline at end of file diff --git a/src/fetch/utils.ts b/src/fetch/utils.ts index be854ec..2601853 100644 --- a/src/fetch/utils.ts +++ b/src/fetch/utils.ts @@ -1,3 +1,16 @@ export function toJSON(data: any, status: number = 200) { return new Response(JSON.stringify(data), { status, headers: { "Content-Type": "application/json" } }); +} + +export function addMetadata(data: any[], total_before_limit: number, limit: number, page: number) { + // TODO: Catch page number greater than total_pages and return error + return { + data, + meta: { + "next_page": (page * limit >= total_before_limit) ? page : page + 1, + "previous_page": (page <= 1) ? page : page - 1, + "total_pages": Math.ceil(total_before_limit / limit), + "total_results": total_before_limit + } + } } \ No newline at end of file diff --git a/src/queries.spec.ts b/src/queries.spec.ts index 8887815..42e9210 100644 --- a/src/queries.spec.ts +++ b/src/queries.spec.ts @@ -6,6 +6,7 @@ import { getTransfers, addAmountFilter, } from "./queries.js"; +import { config } from "./config.js"; const contract = "eosio.token"; const account = "push.sx"; @@ -64,7 +65,7 @@ test("getTotalSupply", () => { ) ); expect(query).toContain(formatSQL(`ORDER BY block_number DESC`)); - expect(query).toContain(formatSQL(`LIMIT 1`)); + expect(query).toContain(formatSQL(`LIMIT ${config.maxLimit}`)); }); test("getTotalSupply with options", () => { @@ -98,7 +99,7 @@ test("getBalanceChange", () => { ) ); expect(query).toContain(formatSQL(`ORDER BY timestamp DESC`)); - expect(query).toContain(formatSQL(`LIMIT 1`)); + expect(query).toContain(formatSQL(`LIMIT ${config.maxLimit}`)); }); test("getBalanceChanges with options", () => { @@ -133,5 +134,5 @@ test("getTransfers", () => { ) ); expect(query).toContain(formatSQL(`ORDER BY timestamp DESC`)); - expect(query).toContain(formatSQL(`LIMIT 100`)); + expect(query).toContain(formatSQL(`LIMIT ${config.maxLimit}`)); }); \ No newline at end of file diff --git a/src/queries.ts b/src/queries.ts index 86d79c3..5ff3b65 100644 --- a/src/queries.ts +++ b/src/queries.ts @@ -1,5 +1,5 @@ -import { DEFAULT_SORT_BY } from "./config.js"; -import { parseLimit, parseTimestamp } from "./utils.js"; +import { DEFAULT_SORT_BY, config } from "./config.js"; +import { parseLimit, parsePage, parseTimestamp } from "./utils.js"; // For reference on Clickhouse Database tables: // https://raw.githubusercontent.com/pinax-network/substreams-antelope-tokens/main/schema.sql @@ -82,11 +82,11 @@ export function getTotalSupply(searchParams: URLSearchParams, example?: boolean) query += ` ORDER BY block_number ${sort_by ?? DEFAULT_SORT_BY} `; } - const limit = parseLimit(searchParams.get("limit")); - query += ` LIMIT ${limit} `; - - const offset = searchParams.get("offset"); - if (offset) query += ` OFFSET ${offset} `; + const limit = parseLimit(searchParams.get("limit"), config.maxLimit); + if (limit) query += ` LIMIT ${limit}`; + + const page = parsePage(searchParams.get("page")); + if (page) query += ` OFFSET ${limit * (page - 1)} `; return query; } @@ -115,11 +115,11 @@ export function getBalanceChanges(searchParams: URLSearchParams, example?: boole //if (contract && !account) query += `GROUP BY (contract, account) ORDER BY timestamp DESC`; } - const limit = parseLimit(searchParams.get("limit")); - query += ` LIMIT ${limit} `; + const limit = parseLimit(searchParams.get("limit"), config.maxLimit); + if (limit) query += ` LIMIT ${limit}`; - const offset = searchParams.get("offset"); - if (offset) query += ` OFFSET ${offset} `; + const page = parsePage(searchParams.get("page")); + if (page) query += ` OFFSET ${limit * (page - 1)} `; return query; } @@ -154,11 +154,11 @@ export function getTransfers(searchParams: URLSearchParams, example?: boolean) { query += ` ORDER BY timestamp DESC`; } - const limit = parseLimit(searchParams.get("limit"), 100); - query += ` LIMIT ${limit} `; + const limit = parseLimit(searchParams.get("limit"), config.maxLimit); + if (limit) query += ` LIMIT ${limit}`; - const offset = searchParams.get("offset"); - if (offset) query += ` OFFSET ${offset} `; + const page = parsePage(searchParams.get("page")); + if (page) query += ` OFFSET ${limit * (page - 1)} `; return query; } diff --git a/src/utils.spec.ts b/src/utils.spec.ts index ded6da8..9d2f6e4 100644 --- a/src/utils.spec.ts +++ b/src/utils.spec.ts @@ -1,13 +1,27 @@ import { expect, test } from "bun:test"; -import { parseBlockId, parseTimestamp } from "./utils.js"; +import { parseBlockId, parseLimit, parsePage, parseTimestamp } from "./utils.js"; +import { config } from "./config.js"; test("parseBlockId", () => { expect(parseBlockId("0x123") as string).toBe("123"); }); +test("parseLimit", () => { + expect(parseLimit("1")).toBe(1); + expect(parseLimit("0")).toBe(0); + expect(parseLimit(10)).toBe(10); + expect(parseLimit(config.maxLimit + 1)).toBe(config.maxLimit); +}); + +test("parsePage", () => { + expect(parsePage("1")).toBe(1); + expect(parsePage("0")).toBe(1); + expect(parsePage(10)).toBe(10); +}); + test("parseTimestamp", () => { expect(parseTimestamp("1697587100")).toBe(1697587100); expect(parseTimestamp("1697587100000")).toBe(1697587100); expect(parseTimestamp("awdawd")).toBeNaN(); expect(parseTimestamp(null)).toBeUndefined(); -}); +}); \ No newline at end of file diff --git a/src/utils.ts b/src/utils.ts index e399ef9..667df3d 100644 --- a/src/utils.ts +++ b/src/utils.ts @@ -1,20 +1,35 @@ import { config } from "./config.js"; +export function parseBlockId(block_id?: string | null) { + return block_id ? block_id.replace("0x", "") : undefined; +} + export function parseLimit(limit?: string | null | number, defaultLimit?: number) { - let value = 1 // default 1 + let value = 0; // default 0 (no limit) if (defaultLimit) value = defaultLimit; if (limit) { if (typeof limit === "string") value = parseInt(limit); if (typeof limit === "number") value = limit; } - // limit must be between 1 and maxLimit + // limit must be between 0 (no limit) and maxLimit + if (value < 0) value = 0; if (value > config.maxLimit) value = config.maxLimit; return value; } -export function parseBlockId(block_id?: string | null) { - return block_id ? block_id.replace("0x", "") : undefined; +export function parsePage(page?: string | null | number) { + let value = 1; + + if (page) { + if (typeof page === "string") value = parseInt(page); + if (typeof page === "number") value = page; + } + + if (value <= 0) + value = 1; + + return value; } export function parseTimestamp(timestamp?: string | null | number) {